def _create_regional_ttt_store(self, typ): if typ == 'a': conf = gf.ConfigTypeA( id='empty_regional', source_depth_min=0., source_depth_max=20*km, source_depth_delta=10*km, distance_min=10*km, distance_max=2000*km, distance_delta=10*km, sample_rate=2.0, ncomponents=10, earthmodel_1d=cake.load_model(), tabulated_phases=[ gf.TPDef(id=id, definition=defi) for (id, defi) in [ ('depthp', 'p'), ('pS', 'pS'), ('P', 'P'), ('S', 'S') ] ]) elif typ == 'b': conf = gf.ConfigTypeB( id='empty_regional_b', receiver_depth_min=0., receiver_depth_max=5*km, receiver_depth_delta=5*km, source_depth_min=0., source_depth_max=20*km, source_depth_delta=10*km, distance_min=10*km, distance_max=2000*km, distance_delta=10*km, sample_rate=2.0, ncomponents=10, earthmodel_1d=cake.load_model(), tabulated_phases=[ gf.TPDef(id=id, definition=defi) for (id, defi) in [ ('depthp', 'p'), ('pS', 'pS'), ('P', 'P'), ('S', 'S') ] ]) store_dir = mkdtemp(prefix='gfstore') self.tempdirs.append(store_dir) gf.Store.create(store_dir, config=conf) store = gf.Store(store_dir) store.make_ttt() store.close() return store_dir
def update_model(self): if not self._model or self._model[0] != self.chosen_model: if self.chosen_model.startswith('Cake builtin: '): load_model = cake.load_model( self.chosen_model.split(': ', 1)[1]) elif self.chosen_model.startswith('GF Store: '): store_id = self.chosen_model.split(': ', 1)[1] load_model = self._engine.get_store(store_id)\ .config.earthmodel_1d else: load_model = cake.load_model(self.chosen_model) self._model = (self.chosen_model, load_model)
def test_path(self): mod = cake.load_model() phase = cake.PhaseDef('P') ray = mod.arrivals(phases=[phase], distances=[70.], zstart=100.) z, x, t = ray[0].zxt_path_subdivided() assert z[0].size == 681
def test_interface_model_extract(self): nz = 100 mod = cake.load_model() layers = list(mod.elements()) for i in xrange(nz): i = num.random.randint(0, len(layers)-3) i2 = num.random.randint(i+2, len(layers)-1) z1 = layers[i].zbot z2 = layers[i2].zbot zmin = min(z1, z2) zmax = max(z1, z2) new_mod = mod.extract(zmin, zmax) interface_material_top = mod.material(zmin) interface_material_bot = mod.material(zmax) elements = list(new_mod.elements()) n_layers = len([e for e in elements if isinstance(e, cake.Layer)]) if isinstance(elements[0], cake.Layer): self.assertEqual(elements[0].mtop, interface_material_top) self.assertEqual(elements[0].ilayer, 0) if isinstance(elements[-1], cake.Layer): self.assertEqual(elements[-1].ilayer, n_layers-1) self.assertEqual(elements[-1].mbot, interface_material_bot) self.assertEqual(elements[0].ztop, zmin) self.assertEqual(elements[-1].zbot, zmax) if zmin == 0.: assert isinstance(elements[0], cake.Surface)
def test_random_model_extract(self): nz = 100 mod = cake.load_model() layers = list(mod.elements()) zmin = layers[0].ztop zmax = layers[mod.nlayers-1].zbot zmins = num.random.uniform(zmin, zmax, nz) zmaxs = num.random.uniform(zmin, zmax, nz) for i in range(nz): zmin = min(zmins[i], zmaxs[i]) zmax = max(zmins[i], zmaxs[i]) new_mod = mod.extract(zmin, zmax) elements = list(new_mod.elements()) n_layers = len([e for e in elements if isinstance(e, cake.Layer)]) interface_material_top = mod.material(zmin) interface_material_bot = mod.material(zmax) if not isinstance(elements[0], cake.Surface): self.assertEqual(elements[0].mtop, interface_material_top) if isinstance(elements[-1], cake.Layer): self.assertEqual(elements[-1].ilayer, n_layers-1) for k, v in elements[-1].mbot.__dict__.items(): self.assertAlmostEqual( v, interface_material_bot.__dict__[k], 6) self.assertEqual(elements[0].ztop, zmin) self.assertEqual(elements[-1].zbot, zmax)
def test_angles(self): mod = cake.load_model() data = [ [1.0*km, 1.0*km, 1.0*km, 90., 90., 'P'], [1.0*km, 2.0*km, 1.0*km, 45., 135., 'P\\'], [2.0*km, 1.0*km, 1.0*km, 135., 45., 'p'], [1.0*km, 2.0*km, math.sqrt(3.)*km, 60., 120., 'P\\'], [2.0*km, 1.0*km, math.sqrt(3.)*km, 120., 60., 'p']] for (zstart, zstop, dist, takeoff_want, incidence_want, pdef_want) \ in data: rays = mod.arrivals( zstart=zstart, zstop=zstop, phases=[cake.PhaseDef(sphase) for sphase in 'P,p,P\\,p\\'.split(',')], distances=[dist*cake.m2d]) for ray in rays: takeoff = round(ray.takeoff_angle()) incidence = round(ray.incidence_angle()) pdef = ray.used_phase().definition() assert takeoff == takeoff_want assert incidence == incidence_want assert pdef == pdef_want
def test_classic(self): phase = cake.PhaseDef.classic('PP')[0] assert str(phase) == ''' Phase definition "P<(cmb)(moho)pP<(cmb)(moho)p": - P mode propagation, departing downward \ (may not propagate deeper than interface cmb) - passing through moho on upgoing path - P mode propagation, departing upward - surface reflection - P mode propagation, departing downward \ (may not propagate deeper than interface cmb) - passing through moho on upgoing path - P mode propagation, departing upward - arriving at target from below'''.strip() mod = cake.load_model() rays = mod.arrivals( phases=[phase], distances=[5000*km*cake.m2d], zstart=500.) assert str(rays[0]).split() == '''10.669 s/deg 5000 km 601.9 s \ 33.8 33.8 17% 12% P<(cmb)(moho)pP<(cmb)(moho)p (P^0P) \ 0_1_2_3_(4-5)_(6-7)_8_(7-6)_(5-4)_3_2_1_0|\ 0_1_2_3_(4-5)_(6-7)_8_(7-6)_(5-4)_3_2_1_0'''.split() assert abs(rays[0].t - 601.9) < 0.2
def traveltimes(self, phase, traces): Logfile.red('Enter AUTOMATIC CROSSCORRELATION ') Logfile.red('\n\n+++++++++++++++++++++++++++++++++++++++++++++++\n ') T = [] Wdict = OrderedDict() SNR = OrderedDict() Config = self.Config cfg = ConfigObj(dict=Config) for i in self.StationMeta: Logfile.red('read in %s ' % (i)) de = loc2degrees(self.Origin, i) Phase = cake.PhaseDef(phase) traveltime_model = cfg.Str('traveltime_model') path = palantiri.__path__ model = cake.load_model(path[0] + '/data/' + traveltime_model) if cfg.colesseo_input() is True: arrivals = model.arrivals([de, de], phases=Phase, zstart=self.Origin.depth, zstop=0.) else: arrivals = model.arrivals([de, de], phases=Phase, zstart=self.Origin.depth * km, zstop=0.) try: ptime = arrivals[0].t except Exception: try: arrivals = model.arrivals([de, de], phases=Phase, zstart=self.Origin.depth * km - 2.1) ptime = arrivals[0].t except Exception: ptime = 0 T.append(ptime) if ptime == 0: Logfile.red('Available phases for station %s in\ range %f deegree' % (i, de)) Logfile.red('you tried phase %s' % (phase)) raise Exception("ILLEGAL: phase definition") else: tw = self.calculateTimeWindows(ptime) if cfg.pyrocko_download() is True: w, snr, found = self.readWaveformsCross_pyrocko( i, tw, ptime, traces) elif cfg.colesseo_input() is True: w, snr = self.readWaveformsCross_colesseo(i, tw, ptime) else: w, snr = self.readWaveformsCross(i, tw, ptime) Wdict[i.getName()] = w SNR[i.getName()] = snr Logfile.red('\n\n+++++++++++++++++++++++++++++++++++++++++++++++ ') Logfile.red('Exit AUTOMATIC FILTER ') return Wdict, SNR
def init(store_dir, variant): if variant is None: variant = '2006' if variant not in ('2006', '2006a'): raise gf.store.StoreError('unsupported variant: %s' % variant) modelling_code_id = 'qseis.%s' % variant qseis = QSeisConfig(qseis_version=variant) qseis.time_region = ( gf.meta.Timing('begin-50'), gf.meta.Timing('end+100')) qseis.cut = ( gf.meta.Timing('begin-50'), gf.meta.Timing('end+100')) qseis.wavelet_duration_samples = 0.001 qseis.sw_flat_earth_transform = 1 store_id = os.path.basename(os.path.realpath(store_dir)) config = gf.meta.ConfigTypeA( id=store_id, ncomponents=10, sample_rate=0.2, receiver_depth=0*km, source_depth_min=10*km, source_depth_max=20*km, source_depth_delta=10*km, distance_min=100*km, distance_max=1000*km, distance_delta=10*km, earthmodel_1d=cake.load_model().extract(depth_max='cmb'), modelling_code_id=modelling_code_id, tabulated_phases=[ gf.meta.TPDef( id='begin', definition='p,P,p\\,P\\,Pv_(cmb)p'), gf.meta.TPDef( id='end', definition='2.5'), gf.meta.TPDef( id='P', definition='!P'), gf.meta.TPDef( id='S', definition='!S'), gf.meta.TPDef( id='p', definition='!p'), gf.meta.TPDef( id='s', definition='!s')]) config.validate() return gf.store.Store.create_editables( store_dir, config=config, extra={'qseis': qseis})
def test_interface_model_extract(self): nz = 100 mod = cake.load_model() layers = list(mod.elements()) for i in range(nz): i = num.random.randint(0, len(layers)-3) i2 = num.random.randint(i+2, len(layers)-1) z1 = layers[i].zbot z2 = layers[i2].zbot zmin = min(z1, z2) zmax = max(z1, z2) new_mod = mod.extract(zmin, zmax) interface_material_top = mod.material(zmin) interface_material_bot = mod.material(zmax) elements = list(new_mod.elements()) n_layers = len([e for e in elements if isinstance(e, cake.Layer)]) if isinstance(elements[0], cake.Layer): self.assertEqual(elements[0].mtop, interface_material_top) self.assertEqual(elements[0].ilayer, 0) if isinstance(elements[-1], cake.Layer): self.assertEqual(elements[-1].ilayer, n_layers-1) self.assertEqual(elements[-1].mbot, interface_material_bot) self.assertEqual(elements[0].ztop, zmin) self.assertEqual(elements[-1].zbot, zmax) if zmin == 0.: assert isinstance(elements[0], cake.Surface)
def update_model(self): if not self._model or self._model[0] != self.chosen_model: if self.chosen_model in cake.builtin_models(): load_model = cake.load_model(self.chosen_model) else: load_model = self._engine.get_store(self.chosen_model).config.earthmodel_1d self._model = (self.chosen_model, load_model)
def test_random_model_extract(self): nz = 100 mod = cake.load_model() layers = list(mod.elements()) zmin = layers[0].ztop zmax = layers[mod.nlayers-1].zbot zmins = num.random.uniform(zmin, zmax, nz) zmaxs = num.random.uniform(zmin, zmax, nz) for i in xrange(nz): zmin = min(zmins[i], zmaxs[i]) zmax = max(zmins[i], zmaxs[i]) new_mod = mod.extract(zmin, zmax) elements = list(new_mod.elements()) n_layers = len([e for e in elements if isinstance(e, cake.Layer)]) interface_material_top = mod.material(zmin) interface_material_bot = mod.material(zmax) if not isinstance(elements[0], cake.Surface): self.assertEqual(elements[0].mtop, interface_material_top) if isinstance(elements[-1], cake.Layer): self.assertEqual(elements[-1].ilayer, n_layers-1) for k, v in elements[-1].mbot.__dict__.items(): self.assertAlmostEqual( v, interface_material_bot.__dict__[k], 6) self.assertEqual(elements[0].ztop, zmin) self.assertEqual(elements[-1].zbot, zmax)
def runParallel(inmodel): print 'does not work properly. EXIT' sys.exit(0) ProBar = progressbar.ProgressBar(maxval=iterations).start() misfits={} misfits['pMF']=[]; misfits['sMF']=[] misfits['ScsMF']=[]; misfits['ScssMF']=[] loadmod=cake.load_model(inmodel) for latindx, lat in enumerate(_lats): for lonindx, lon in enumerate(_lons): for zindex, z in enumerate(_depths): #iteration+=1 # Start prozess with one event (depth), and one model: eve=model.Event(lat,lon,str_to_time("2010-04-11 22:08:15.500"), "Spain_Durcal" , z,6.3) [ttpdiff, ttsdiff, ttScsdiff, ttScssdiff] = depthfinder.startup(loadmod, eve, maxdist) pMF, sMF, ScsMF, ScssMF= map( lambda x: calculateMisfit(x,maxdist), [ttpdiff,ttsdiff,ttScsdiff,ttScssdiff]) resultArray[latindx][lonindx][zindex] = [pMF, sMF, ScsMF, ScssMF] # update progressbar ProBar.update(iteration) identifierstring = inmodel+'.%s.%s.%s'%(lat, lon, z) results[identifierstring]=misfits try: output = open('results.p','w') pickle.dump(results, output) finally: output.close()
def setTshiftFile(self, reference_event, latindx, lonindx, results, modelstr, phase, depths, maxdist=None): ''' Creates a time shift file relative to a given reference_event. :param reference_event: time shift will be analyzed relative to this event :param latindx: index of latitude of the reference_events' origin within the results array :param lonindx: index of longitude of the reference_events' origin within the results array :param results: Results as numpy nd array :param modelstr: string modelname :param phase: string phasename :param depths: list of depths which were used in the processing :param maxdist: maximum epicentral distance up to which the time shift is to be analyzed ''' if (type(modelstr)==str): modelload=cake.load_model(modelstr) modelname=(modelstr.split('/')[1]).split('.')[0] if (type(phase)==str): phaseload=cake.PhaseDef(phase) # get best fit index: minimum, minimumIndex = decomposer.getMinimumMisfitForLatindxLonindx(results, modelstr, phase, latindx, lonindx) # set depth of reference event to depth of best fit: reference_event.depth=depths[minimumIndex] # calculate traveltimes for best fit: for card in self._myStationBox.getStationCards: theophases = self.calculateTheoreticalArrivals(modelload, reference_event, card) if theophases[phase] and card._Phases[phase]: # and card.getDistance2Event(reference_event)<=maxdist: tshift = card._Phases[phase]-theophases[phase].t self._myStationBox.setUsedStations(card, tshift) else: pass # create input file for shift plot self._myStationBox.writeUsedStationsFile(val='{0}{1}'.format(phase,modelname))
def _create_regional_ttt_store(self): conf = gf.ConfigTypeA( id='empty_regional', source_depth_min=0., source_depth_max=20*km, source_depth_delta=10*km, distance_min=1000*km, distance_max=2000*km, distance_delta=10*km, sample_rate=2.0, ncomponents=10, earthmodel_1d=cake.load_model(), tabulated_phases=[ gf.TPDef(id=id, definition=defi) for (id, defi) in [ ('depthp', 'p'), ('pS', 'pS'), ('P', 'P'), ('S', 'S') ] ]) store_dir = mkdtemp(prefix='gfstore') self.tempdirs.append(store_dir) gf.Store.create(store_dir, config=conf) store = gf.Store(store_dir) store.make_ttt() store.close() return store_dir
def test_simplify(self): mod1 = cake.load_model() mod2 = mod1.simplify() phases = cake.PhaseDef.classic('Pdiff') rays1 = mod1.arrivals(phases=phases, distances=[120.], zstart=500.) rays2 = mod2.arrivals(phases=phases, distances=[120.], zstart=500.) assert abs(rays1[0].t - 915.9) < 0.1 assert abs(rays2[0].t - 915.9) < 0.1
def example(): conf = QSeisSConfigFull() conf.source_depth = 15. conf.receiver_basement_depth = 35. conf.receiver_max_distance = 2000. conf.earthmodel_1d = cake.load_model().extract(depth_max='cmb') conf.sw_flat_earth_transform = 1 return conf
def calctakeoff(Station, Event, Config): de = loc2degrees(Event, Station) Phase = cake.PhaseDef('P') model = cake.load_model() arrivals = model.arrivals([de, de], phases=Phase, zstart=Event.depth * km) return arrivals[0].takeoff_angle()
def example(): conf = QSeisRConfigFull() conf.source = QSeis2dSource(lat=-80.5, lon=90.1) conf.receiver_location = QSeisRReceiver(lat=13.4, lon=240.5, depth=0.0) conf.time_reduction = 10.0 conf.earthmodel_receiver_1d = cake.load_model().extract( depth_max='moho') return conf
def runSerial(models=None): ''' Execute serial processing (1 CPU). :param models: list of models to investigate if no models are given, all models will be investigated. ''' if models==None: models2use = _models else: models2use = [models] iteration=0 iterations = len(models2use)*len(_depths)*len(_lats)*len(_lons) sys.stdout.write('calculating misfits... ') ProBar = progressbar.ProgressBar(maxval=iterations).start() # instantiate result array as numpy nd array: resultArray = np.ndarray(shape=(len(_lats), len(_lons), len(_depths), 4)) for mod in models2use: misfits={} misfits['pMF']=[]; misfits['sMF']=[] misfits['ScsMF']=[]; misfits['ScssMF']=[] loadmod=cake.load_model(mod) for latindx, lat in enumerate(_lats): for lonindx, lon in enumerate(_lons): for zindex, z in enumerate(_depths): iteration+=1 eve=model.Event(lat,lon,str_to_time("2010-04-11 22:08:15.500"), "Spain_Durcal" , z,6.3) [ttpdiff, ttsdiff, ttScsdiff, ttScssdiff] = depthfinder.startup(loadmod, eve, maxdist) [pMF, sMF, ScsMF, ScssMF]= map( lambda x: calculateMisfit(x,maxdist), [ttpdiff,ttsdiff,ttScsdiff,ttScssdiff]) # update progressbar ProBar.update(iteration) # write data to numpy array: resultArray[latindx][lonindx][zindex] = [pMF, sMF, ScsMF, ScssMF] results[mod]=resultArray depthfinder.storeStationBox() # finish progressbar: ProBar.finish() # write dict to pickled data: try: output = open('numpy_results.p','w') pickle.dump(results, output) finally: output.close() # write used stations file: depthfinder._myStationBox.writeUsedStationsFile()
def update_model(self): if not self._model or self._model[0] != self.chosen_model: if self.chosen_model in cake.builtin_models()\ or os.path.exists(self.chosen_model): load_model = cake.load_model(self.chosen_model) else: load_model = self._engine.get_store(self.chosen_model)\ .config.earthmodel_1d self._model = (self.chosen_model, load_model)
def example(): conf = QSeisConfigFull() conf.receiver_distances = [ 2000. ] conf.receiver_azimuths = [ 0. ] conf.time_start = -10.0 conf.time_reduction_velocity = 15.0 conf.earthmodel_1d = cake.load_model().extract(depth_max='cmb') conf.sw_flat_earth_transform = 1 return conf
def init(store_dir, variant): if variant is None: variant = '2010beta' if ('qssp.' + variant) not in program_bins: raise gf.store.StoreError('unsupported qssp variant: %s' % variant) qssp = QSSPConfig(qssp_version=variant) qssp.time_region = ( gf.Timing('begin-50'), gf.Timing('end+100')) qssp.cut = ( gf.Timing('begin-50'), gf.Timing('end+100')) store_id = os.path.basename(os.path.realpath(store_dir)) config = gf.meta.ConfigTypeA( id=store_id, ncomponents=10, sample_rate=0.2, receiver_depth=0*km, source_depth_min=10*km, source_depth_max=20*km, source_depth_delta=10*km, distance_min=100*km, distance_max=1000*km, distance_delta=10*km, earthmodel_1d=cake.load_model(), modelling_code_id='qssp', tabulated_phases=[ gf.meta.TPDef( id='begin', definition='p,P,p\\,P\\,Pv_(cmb)p'), gf.meta.TPDef( id='end', definition='2.5'), gf.meta.TPDef( id='P', definition='!P'), gf.meta.TPDef( id='S', definition='!S'), gf.meta.TPDef( id='p', definition='!p'), gf.meta.TPDef( id='s', definition='!s')]) config.validate() return gf.store.Store.create_editables( store_dir, config=config, extra={'qssp': qssp})
def example(): conf = QSeisConfigFull() conf.receiver_distances = [2000.] conf.receiver_azimuths = [0.] conf.time_start = -10.0 conf.time_reduction_velocity = 15.0 conf.earthmodel_1d = cake.load_model().extract(depth_max='cmb') conf.earthmodel_receiver_1d = None conf.sw_flat_earth_transform = 1 return conf
def test_single_layer_extract(self): mod = cake.load_model() zmin = 100. zmax = 200. new_mod = mod.extract(zmin, zmax) elements = list(new_mod.elements()) interface_material_top = mod.material(zmin) interface_material_bot = mod.material(zmax) if not isinstance(elements[0], cake.Surface): self.assertEqual(elements[0].mtop, interface_material_top) self.assertEqual(elements[-1].mbot, interface_material_bot) self.assertEqual(elements[0].ztop, zmin) self.assertEqual(elements[-1].zbot, zmax)
def test_earthmodel(self): from pyrocko import cake mod = cake.load_model() nx, ny = 500, 500 delta = (cake.earthradius * 2.0) / (nx-1) x = num.arange(nx) * delta - cake.earthradius y = num.arange(ny) * delta - cake.earthradius x2 = x[num.newaxis, :] y2 = y[:, num.newaxis] z = cake.earthradius - num.sqrt(x2**2 + y2**2) vp_pro = mod.profile('vp') z_pro = mod.profile('z') vp = num.interp(z, z_pro, vp_pro) inside = z > 0.0 speeds = num.ones((ny, nx)) speeds[:, :] = 300. speeds[inside] = vp[inside] times = num.zeros((ny, nx)) - 1.0 iy = ny - int(round(600*km / delta)) ix = nx//2 times[iy, ix] = 0.0 @benchmark.labeled('test_earthmodel') def run(): eikonal_ext.eikonal_solver_fmm_cartesian(speeds, times, delta) run() times[num.logical_not(inside)] = num.nan self.compare_with_reference(times, 'test_earthmodel.npy') if show_plot: from matplotlib import pyplot as plt plt.gcf().add_subplot(1, 1, 1, aspect=1.0) plt.pcolormesh(x, y, speeds, cmap='gray', edgecolor='none') plt.contour(x, y, times, levels=num.linspace(0., 1200, 20)) plt.gca().axis('off') plt.show()
def test_earthmodel(self): from pyrocko import cake mod = cake.load_model() nx, ny = 500, 500 delta = (cake.earthradius * 2.0) / (nx - 1) x = num.arange(nx) * delta - cake.earthradius y = num.arange(ny) * delta - cake.earthradius x2 = x[num.newaxis, :] y2 = y[:, num.newaxis] z = cake.earthradius - num.sqrt(x2**2 + y2**2) vp_pro = mod.profile('vp') z_pro = mod.profile('z') vp = num.interp(z, z_pro, vp_pro) inside = z > 0.0 speeds = num.ones((ny, nx)) speeds[:, :] = 300. speeds[inside] = vp[inside] times = num.zeros((ny, nx)) - 1.0 iy = ny - int(round(600 * km / delta)) ix = nx // 2 times[iy, ix] = 0.0 @benchmark.labeled('test_earthmodel') def run(): eikonal_ext.eikonal_solver_fmm_cartesian(speeds, times, delta) run() times[num.logical_not(inside)] = num.nan self.compare_with_reference(times, 'test_earthmodel.npy') if show_plot: from matplotlib import pyplot as plt plt.gcf().add_subplot(1, 1, 1, aspect=1.0) plt.pcolormesh(x, y, speeds, cmap='gray', edgecolor='none') plt.contour(x, y, times, levels=num.linspace(0., 1200, 20)) plt.gca().axis('off') plt.show()
def setup(self, config): Shifter.setup(self, config) self._earthmodels = config.earthmodels self._earthmodels.extend([ CakeEarthmodel(id=fn, earthmodel_1d=cake.load_model( cake.builtin_model_filename(fn))) for fn in cake.builtin_models() ]) self._tabulated_phases = config.tabulated_phases if not self._tabulated_phases: raise LassieError('missing tabulated phases in config') self._cache_path = config.expand_path(config.cache_path)
def init(store_dir): qssp = QSSPConfig() qssp.time_region = ( gf.meta.Timing('begin-50'), gf.meta.Timing('end+100')) qssp.cut = ( gf.meta.Timing('begin-50'), gf.meta.Timing('end+100')) store_id = os.path.basename(os.path.realpath(store_dir)) config = gf.meta.ConfigTypeA( id = store_id, ncomponents = 10, sample_rate = 0.2, receiver_depth = 0*km, source_depth_min = 10*km, source_depth_max = 20*km, source_depth_delta = 10*km, distance_min = 100*km, distance_max = 1000*km, distance_delta = 10*km, earthmodel_1d = cake.load_model(), modelling_code_id = 'qssp', tabulated_phases = [ gf.meta.TPDef( id = 'begin', definition = 'p,P,p\\,P\\,Pv_(cmb)p'), gf.meta.TPDef( id = 'end', definition = '2.5'), gf.meta.TPDef( id = 'P', definition = '!P'), gf.meta.TPDef( id = 'S', definition = '!S'), gf.meta.TPDef( id = 'p', definition = '!p'), gf.meta.TPDef( id = 's', definition = '!s') ]) config.validate() return gf.store.Store.create_editables(store_dir, config=config, extra={'qssp': qssp})
def call(self): '''Main work routine of the snuffling.''' self.cleanup() self.wanted = [] for iphase, phase in enumerate(self._phases): if getattr(self, 'wantphase_%s' % iphase): self.wanted.append(cake.PhaseDef(phase)) if not self.wanted: return viewer = self.get_viewer() pile = self.get_pile self.event = viewer.get_active_event() if self.event is None: self.fail('No active event is marked.') self.stations = [ s for s in viewer.stations.values() if s.station in pile.stations ] if not self.stations: self.fail('No station information available.') self.model = cake.load_model(self.earth_model) self.phases_string = [] for s in self.stations: rays = self.model.arrivals([s.dist_deg], phases=self.wanted, zstart=self.event.depth) for ray in rays: incidence_angle = ray.incidence_angle() takeoff_angle = ray.takeoff_angle() m = PhaseMarker(nslc_ids = [(s.network,s.station,'*', '*')], tmin = self.event.time+ray.t, tmax=self.event.time+ray.t, kind=1, event=self.event, incidence_angle=incidence_angle, takeoff_angle=takeoff_angle, phasename=ray.given_phase().definition()) self.add_marker(m) self.phases_string.append(ray.__str__()+'\n') self.called = True
def setup(self): '''Customization of the snuffling.''' self.set_name('Cake') self._phases = ('p P pP pP\ pPv3pP\ pPv3pPv3pP\ P(moho)p S s'.split()) for iphase, phase in enumerate(self._phases): self.add_parameter(Switch(phase, 'wantphase_%s' % iphase, iphase==0)) self.choice = Choice('Model','earth_model',cake.builtin_models()[0],(cake.builtin_models())) self.add_parameter(self.choice) self.add_trigger('Plot Model', self.plot_model) self.add_trigger('Plot Rays', self.plot_rays) self.add_trigger('Add Model', self.add_model_to_choice) self.add_trigger('Add Phase', self.add_phase_definition) self.add_trigger('Print Arrivals', self.print_arrivals) self.set_live_update(False) self.model = cake.load_model(self.earth_model) self.called = False self.stations = None
def GetPSArrivalRayTracing(sta_coords = np.array([0,0,0.0]), eq_coords =np.array([0,0,3900]),model_name = 'VpVs.nd'): # play witht the Pyrocko modules from pyrocko import cake import matplotlib matplotlib.style.use('ggplot') from LocationsOnGrid import LocationsOnGridSmall eq_depth = eq_coords[2] so_offset = np.linalg.norm(sta_coords[:2] - eq_coords[:2]) model =cake.load_model('VpVs.nd') _,_,_,stCoords = LocationsOnGridSmall(receiver_name='receiver.dat',NX=1,NY = 1,NZ =1) # Get the receiver locations Distance = so_offset*cake.m2d p_transmission_paths = model.arrivals(distances = [Distance],phases = [cake.PhaseDef('p')],zstart = eq_depth) s_transmission_paths = model.arrivals(distances = [Distance],phases = [cake.PhaseDef('s')],zstart = eq_depth) for rayP,rayS in zip(p_transmission_paths,s_transmission_paths): p_arrival = rayP.t print p_arrival s_arrival = rayS.t print s_arrival return p_arrival,s_arrival,so_offset,model
def dummy_store(self): if self._dummy_store is None: conf = gf.ConfigTypeA( id='empty_regional', source_depth_min=0., source_depth_max=20 * km, source_depth_delta=10 * km, distance_min=1000 * km, distance_max=2000 * km, distance_delta=10 * km, sample_rate=2.0, ncomponents=10, earthmodel_1d=cake.load_model(crust2_profile=(50., 10.))) store_dir = mkdtemp(prefix='gfstore') self.tempdirs.append(store_dir) gf.Store.create(store_dir, config=conf) self._dummy_store = gf.Store(store_dir) return self._dummy_store
def dummy_store(self): if self._dummy_store is None: conf = gf.ConfigTypeA( id='empty_regional', source_depth_min=0., source_depth_max=20*km, source_depth_delta=10*km, distance_min=1000*km, distance_max=2000*km, distance_delta=10*km, sample_rate=2.0, ncomponents=10, earthmodel_1d=cake.load_model(crust2_profile=(50., 10.))) store_dir = mkdtemp(prefix='gfstore') self.tempdirs.append(store_dir) gf.Store.create(store_dir, config=conf) self._dummy_store = gf.Store(store_dir) return self._dummy_store
def download(self, event, directory='array_data', timing=None, length=None, want='all', force=False, prefix=False, dump_config=False, get_responses=False): """:param want: either 'all' or ID as string or list of IDs as strings """ use = [] #ts = {} unit = 'M' if all([timing, length]) is None: raise Exception('Define one of "timing" and "length"') prefix = prefix or '' directory = pjoin(prefix, directory) if not os.path.isdir(directory): os.mkdir(directory) pzresponses = {} logger.info('download data: %s at %sN %sE' % ( event.name, event.lat, event.lon)) for site, array_data_provder in self.providers.items(): logger.info('requesting data from site %s' % site) for array_id, codes in array_data_provder.items(): if array_id not in want and want != ['all']: continue sub_directory = pjoin(directory, array_id) logger.info("%s" % array_id) codes = array_data_provder[array_id] if not isinstance(codes, list): codes = [codes] selection = [ c + tuple((event.time, event.time+1000.)) for c in codes] logger.debug('selection: %s' % selection) try: # if site=='bgr': # st = ws.station(url='http://eida.bgr.de/', selection=selection) # else: # st = ws.station(site=site, selection=selection) st = ws.station(site=site, selection=selection) except ws.EmptyResult as e: logging.error('No results: %s %s. skip' % (e, array_id)) continue except ValueError as e: logger.error(e) logger.error('...skipping...') continue stations = st.get_pyrocko_stations() min_dist = min( [ortho.distance_accurate50m(s, event) for s in stations]) max_dist = max( [ortho.distance_accurate50m(s, event) for s in stations]) mod = cake.load_model(crust2_profile=(event.lat, event.lon)) if length: tstart = 0. tend = length elif timing: tstart = timing[0].t(mod, (event.depth, min_dist)) tend = timing[1].t(mod, (event.depth, max_dist)) selection = [ c + tuple((event.time + tstart, event.time + tend) ) for c in codes] try: d = ws.dataselect(site=site, selection=selection) store.remake_dir(sub_directory, force) store.remake_dir(pjoin(sub_directory, 'responses'), force) fn = pjoin(sub_directory, 'traces.mseed') with open(fn, 'w') as f: f.write(d.read()) f.close() if get_responses: trs = io.load(fn, getdata=False) logger.info('Request responses from %s' % site) if progressbar: pb = progressbar.ProgressBar(maxval=len(trs)).start() for i_tr, tr in enumerate(trs): try: st = ws.station( site=site, selection=selection, level='response') pzresponse = st.get_pyrocko_response( nslc=tr.nslc_id, timespan=(tr.tmin, tr.tmax), fake_input_units=unit) pzresponse.regularize() except fdsnstation.NoResponseInformation as e: logger.warn("no response information: %s" % e) pzresponse = None pass except fdsnstation.MultipleResponseInformation as e: logger.warn("MultipleResponseInformation: %s" % e) pzresponse = None pass pzresponses[tr.nslc_id] = pzresponse pzresponses[tr.nslc_id].dump(filename=pjoin( sub_directory, 'responses', 'resp_%s.yaml' % '.'.join(tr.nslc_id))) if progressbar: pb.update(i_tr) if progressbar: pb.finish() model.dump_stations( stations, pjoin(sub_directory, 'stations.pf')) if timing: t = Timings(list(timing)) self.timings[array_id] = t if array_id not in use and array_id not in self.use: use.append(array_id) except ws.EmptyResult as e: logging.error('%s on %s' % (e, array_id)) self.use.extend(use)
from pyrocko import cake import collections import numpy as num import matplotlib.pyplot as plt """ soll klarstellen, warum ich 4 sekunden als static length benutze. """ km = 1000 ttt = collections.defaultdict(dict) m = cake.load_model("earthmodel_castor.txt") phases = ["Pv12.5p", "P", "p", "pP"] phases = [cake.PhaseDef(p) for p in phases] distances = num.linspace(10, 200, 40) * km * cake.m2d print distances toplot = [] for d in distances: allts = [] print d for p in phases: for arrival in m.arrivals([d], phases=p, zstart=2000): print arrival allts.append(arrival.t) toplot.append([d, max(allts) - min(allts)])
def optparse(required=(), optional=(), args=sys.argv, usage='%prog [options]', descr=None): want = required + optional parser = OptionParser(prog='cake', usage=usage, description=descr.capitalize() + '.', add_help_option=False, formatter=util.BetterHelpFormatter()) parser.add_option('-h', '--help', action='help', help='Show help message and exit.') if 'phases' in want: group = OptionGroup( parser, 'Phases', ''' Seismic phase arrivals may be either specified as traditional phase names (e.g. P, S, PP, PcP, ...) or in Cake's own syntax which is more powerful. Use the --classic option, for traditional phase names. Use the --phase option if you want to define phases in Cake's syntax. ''') group.add_option( '--phase', '--phases', dest='phases', action="append", default=[], metavar='PHASE1,PHASE2,...', help='''Comma separated list of seismic phases in Cake\'s syntax. The definition of a seismic propagation path in Cake's phase syntax is a string consisting of an alternating sequence of "legs" and "knees". A "leg" represents seismic wave propagation without any conversions, encountering only super-critical reflections. Legs are denoted by "P", "p", "S", or "s". The capital letters are used when the take-off of the "leg" is in downward direction, while the lower case letters indicate a take-off in upward direction. A "knee" is an interaction with an interface. It can be a mode conversion, a reflection, or propagation as a headwave or diffracted wave. * conversion is simply denoted as: "(INTERFACE)" or "DEPTH" * upperside reflection: "v(INTERFACE)" or "vDEPTH" * underside reflection: "^(INTERFACE)" or "^DEPTH" * normal kind headwave or diffracted wave: "v_(INTERFACE)" or "v_DEPTH" The interface may be given by name or by depth: INTERFACE is the name of an interface defined in the model, DEPTH is the depth of an interface in [km] (the interface closest to that depth is chosen). If two legs appear consecutively without an explicit "knee", surface interaction is assumed. The preferred standard interface names in cake are "conrad", "moho", "cmb" (core-mantle boundary), and "cb" (inner core boundary). The phase definition may end with a backslash "\\", to indicate that the ray should arrive at the receiver from above instead of from below. It is possible to restrict the maximum and minimum depth of a "leg" by appending "<(INTERFACE)" or "<DEPTH" or ">(INTERFACE)" or ">DEPTH" after the leg character, respectively. When plotting rays or travel-time curves, the color can be set by appending "{COLOR}" to the phase definition, where COLOR is the name of a color or an RGB or RGBA color tuple in the format "R/G/B" or "R/G/B/A", respectively. The values can be normalized to the range [0, 1] or to [0, 255]. The latter is only assumed when any of the values given exceeds 1.0. ''') group.add_option( '--classic', dest='classic_phases', action='append', default=[], metavar='PHASE1,PHASE2,...', help='''Comma separated list of seismic phases in classic nomenclature. Run "cake list-phase-map" for a list of available phase names. When plotting, color can be specified in the same way as in --phases.''') parser.add_option_group(group) if 'model' in want: group = OptionGroup(parser, 'Model') group.add_option( '--model', dest='model_filename', metavar='(NAME or FILENAME)', help='Use builtin model named NAME or user model from file ' 'FILENAME. By default, the "ak135-f-continental.m" model is ' 'used. Run "cake list-models" for a list of builtin models.') group.add_option( '--format', dest='model_format', metavar='FORMAT', choices=['nd', 'hyposat'], default='nd', help='Set model file format (available: nd, hyposat; default: ' 'nd).') group.add_option( '--crust2loc', dest='crust2loc', metavar='LAT,LON', help='Set model from CRUST2.0 profile at location (LAT,LON).') group.add_option( '--crust2profile', dest='crust2profile', metavar='KEY', help='Set model from CRUST2.0 profile with given KEY.') parser.add_option_group(group) if any(x in want for x in ('zstart', 'zstop', 'distances', 'sloc', 'rloc')): group = OptionGroup(parser, 'Source-receiver geometry') if 'zstart' in want: group.add_option('--sdepth', dest='sdepth', type='float', default=0.0, metavar='FLOAT', help='Source depth [km] (default: 0)') if 'zstop' in want: group.add_option('--rdepth', dest='rdepth', type='float', default=0.0, metavar='FLOAT', help='Receiver depth [km] (default: 0)') if 'distances' in want: group.add_option('--distances', dest='sdist', metavar='DISTANCES', help='Surface distances as "start:stop:n" or ' '"dist1,dist2,..." [km]') group.add_option('--sloc', dest='sloc', metavar='LAT,LON', help='Source location (LAT,LON).') group.add_option('--rloc', dest='rloc', metavar='LAT,LON', help='Receiver location (LAT,LON).') parser.add_option_group(group) if 'material' in want: group = OptionGroup( parser, 'Material', 'An isotropic elastic material may be specified by giving ' 'a combination of some of the following options. ') group.add_option('--vp', dest='vp', default=None, type='float', metavar='FLOAT', help='P-wave velocity [km/s]') group.add_option('--vs', dest='vs', default=None, type='float', metavar='FLOAT', help='S-wave velocity [km/s]') group.add_option('--rho', dest='rho', default=None, type='float', metavar='FLOAT', help='density [g/cm**3]') group.add_option('--qp', dest='qp', default=None, type='float', metavar='FLOAT', help='P-wave attenuation Qp (default: 1456)') group.add_option('--qs', dest='qs', default=None, type='float', metavar='FLOAT', help='S-wave attenuation Qs (default: 600)') group.add_option('--poisson', dest='poisson', default=None, type='float', metavar='FLOAT', help='Poisson ratio') group.add_option('--lambda', dest='lame_lambda', default=None, type='float', metavar='FLOAT', help='Lame parameter lambda [GPa]') group.add_option('--mu', dest='lame_mu', default=None, type='float', metavar='FLOAT', help='Shear modulus [GPa]') group.add_option('--qk', dest='qk', default=None, type='float', metavar='FLOAT', help='Bulk attenuation Qk') group.add_option('--qmu', dest='qmu', default=None, type='float', metavar='FLOAT', help='Shear attenuation Qmu') parser.add_option_group(group) if any(x in want for x in ('vred', 'as_degrees', 'accuracy', 'slowness', 'interface', 'aspect', 'shade_model')): group = OptionGroup(parser, 'General') if 'vred' in want: group.add_option('--vred', dest='vred', type='float', metavar='FLOAT', help='Velocity for time reduction in plot [km/s]') if 'as_degrees' in want: group.add_option( '--degrees', dest='as_degrees', action='store_true', default=False, help='Distances are in [deg] instead of [km], velocities in ' '[deg/s] instead of [km/s], slownesses in [s/deg] ' 'instead of [s/km].') if 'accuracy' in want: group.add_option('--accuracy', dest='accuracy', type='float', metavar='MAXIMUM_RELATIVE_RMS', default=0.002, help='Set accuracy for model simplification.') if 'slowness' in want: group.add_option( '--slowness', dest='slowness', type='float', metavar='FLOAT', default=0.0, help='Select surface slowness [s/km] (default: 0)') if 'interface' in want: group.add_option('--interface', dest='interface', metavar='(NAME or DEPTH)', help='Name or depth [km] of interface to select') if 'aspect' in want: group.add_option('--aspect', dest='aspect', type='float', metavar='FLOAT', help='Aspect ratio for plot') if 'shade_model' in want: group.add_option('--no-shade-model', dest='shade_model', action='store_false', default=True, help='Suppress shading of earth model layers') parser.add_option_group(group) if any(x in want for x in ('output_format', )): group = OptionGroup(parser, 'Output') if 'output_format' in want: group.add_option( '--output-format', dest='output_format', metavar='FORMAT', default='textual', choices=('textual', 'nd'), help='Set model output format (available: textual, nd, ' 'default: textual)') parser.add_option_group(group) if usage == 'cake help-options': parser.print_help() (options, args) = parser.parse_args(args) if len(args) != 2: parser.error( 'Cake arguments should look like "--option" or "--option=...".') d = {} as_degrees = False if 'as_degrees' in want: as_degrees = options.as_degrees d['as_degrees'] = as_degrees if 'accuracy' in want: d['accuracy'] = options.accuracy if 'output_format' in want: d['output_format'] = options.output_format if 'aspect' in want: d['aspect'] = options.aspect if 'shade_model' in want: d['shade_model'] = options.shade_model if 'phases' in want: phases = [] phase_colors = {} try: for ss in options.phases: for s in ss.split(','): s = process_color(s, phase_colors) phases.append(cake.PhaseDef(s)) for pp in options.classic_phases: for p in pp.split(','): p = process_color(p, phase_colors) phases.extend(cake.PhaseDef.classic(p)) except (cake.PhaseDefParseError, cake.UnknownClassicPhase) as e: parser.error(e) if not phases and 'phases' in required: s = process_color('P', phase_colors) phases.append(cake.PhaseDef(s)) if phases: d['phase_colors'] = phase_colors d['phases'] = phases if 'model' in want: if options.model_filename: d['model'] = cake.load_model(options.model_filename, options.model_format) if options.crust2loc or options.crust2profile: if options.crust2loc: try: args = tuple( [float(x) for x in options.crust2loc.split(',')]) except Exception: parser.error('format for --crust2loc option is ' '"LATITUDE,LONGITUDE"') elif options.crust2profile: args = (options.crust2profile.upper(), ) else: assert False if 'model' in d: d['model'] = d['model'].replaced_crust(args) else: from pyrocko import crust2x2 profile = crust2x2.get_profile(*args) d['model'] = cake.LayeredModel.from_scanlines( cake.from_crust2x2_profile(profile)) if 'vred' in want: d['vred'] = options.vred if d['vred'] is not None: if not as_degrees: d['vred'] *= r2d * cake.km / cake.earthradius if 'distances' in want: distances = None if options.sdist: if options.sdist.find(':') != -1: ssn = options.sdist.split(':') if len(ssn) != 3: parser.error('format for distances is ' '"min_distance:max_distance:n_distances"') distances = num.linspace(*map(float, ssn)) else: distances = num.array(list(map(float, options.sdist.split(','))), dtype=num.float) if not as_degrees: distances *= r2d * cake.km / cake.earthradius if options.sloc and options.rloc: try: slat, slon = tuple([float(x) for x in options.sloc.split(',')]) rlat, rlon = tuple([float(x) for x in options.rloc.split(',')]) except Exception: parser.error('format for --sloc and --rloc options is ' '"LATITUDE,LONGITUDE"') distance_sr = orthodrome.distance_accurate50m_numpy( slat, slon, rlat, rlon) distance_sr *= r2d / cake.earthradius if distances is not None: distances = num.concatenate((distances, [distance_sr])) else: distances = num.array([distance_sr], dtype=num.float) if distances is not None: d['distances'] = distances else: if 'distances' not in required: d['distances'] = None if 'slowness' in want: d['slowness'] = options.slowness / cake.d2r if not as_degrees: d['slowness'] /= cake.km * cake.m2d if 'interface' in want: if options.interface: try: d['interface'] = float(options.interface) * cake.km except ValueError: d['interface'] = options.interface else: d['interface'] = None if 'zstart' in want: d['zstart'] = options.sdepth * cake.km if 'zstop' in want: d['zstop'] = options.rdepth * cake.km if 'material' in want: md = {} userfactor = dict(vp=1000., vs=1000., rho=1000., qp=1., qs=1., qmu=1., qk=1., lame_lambda=1.0e9, lame_mu=1.0e9, poisson=1.) for k in userfactor.keys(): if getattr(options, k) is not None: md[k] = getattr(options, k) * userfactor[k] if not (bool('lame_lambda' in md) == bool('lame_mu' in md)): parser.error('lambda and mu must be specified both.') if 'lame_lambda' in md and 'lame_mu' in md: md['lame'] = md.pop('lame_lambda'), md.pop('lame_mu') if md: try: d['material'] = cake.Material(**md) except cake.InvalidArguments as e: parser.error(str(e)) for k in list(d.keys()): if k not in want: del d[k] for k in required: if k not in d: if k == 'model': d['model'] = cake.load_model('ak135-f-continental.m') elif k == 'distances': d['distances'] = num.linspace(10*cake.km, 100*cake.km, 10) \ / cake.earthradius * r2d elif k == 'phases': d['phases'] = list(map(cake.PhaseDef, 'Pp')) else: parser.error('missing %s' % k) return Anon(d)
import os.path as op import logging import numpy as num from pyrocko import spit, cake logging.basicConfig(level=logging.INFO) km = 1000. model = cake.load_model('ak135-f-average.f') phases = [cake.PhaseDef('P'), cake.PhaseDef('p'), cake.PhaseDef('Pv_(cmb)p')] def f(x): source_depth, distance = x rays = model.arrivals(phases=phases, distances=[distance], zstart=source_depth) if rays: return rays[0].takeoff_angle() return None fn = 'takeoffangles.sptree' if not op.exists(fn): tree = spit.SPTree(f, 0.01, [[0., 750*km], [0., 120.]], [1.*km, 0.1]) tree.dump(fn)
def example(): conf = PsGrnConfigFull() conf.earthmodel_1d = cake.load_model().extract(depth_max=100 * km) conf.psgrn_outdir = 'TEST_psgrn_functions/' return conf
def main(): parser = OptionParser(usage=usage, description=description) parser.add_option('--force', dest='force', action='store_true', default=False, help='allow recreation of output <directory>') parser.add_option('--debug', dest='debug', action='store_true', default=False, help='print debugging information to stderr') parser.add_option('--dry-run', dest='dry_run', action='store_true', default=False, help='show available stations/channels and exit ' '(do not download waveforms)') parser.add_option('--continue', dest='continue_', action='store_true', default=False, help='continue download after a accident') parser.add_option('--local-data', dest='local_data', action='append', help='add file/directory with local data') parser.add_option('--local-stations', dest='local_stations', action='append', help='add local stations file') parser.add_option('--selection', dest='selection_file', action='append', help='add local stations file') parser.add_option( '--local-responses-resp', dest='local_responses_resp', action='append', help='add file/directory with local responses in RESP format') parser.add_option('--local-responses-pz', dest='local_responses_pz', action='append', help='add file/directory with local pole-zero responses') parser.add_option( '--local-responses-stationxml', dest='local_responses_stationxml', help='add file with local response information in StationXML format') parser.add_option( '--window', dest='window', default='full', help='set time window to choose [full, p, "<time-start>,<time-end>"' '] (time format is YYYY-MM-DD HH:MM:SS)') parser.add_option( '--out-components', choices=['enu', 'rtu'], dest='out_components', default='rtu', help='set output component orientations to radial-transverse-up [rtu] ' '(default) or east-north-up [enu]') parser.add_option('--out-units', choices=['M', 'M/S', 'M/S**2'], dest='output_units', default='M', help='set output units to displacement "M" (default),' ' velocity "M/S" or acceleration "M/S**2"') parser.add_option( '--padding-factor', type=float, default=3.0, dest='padding_factor', help='extend time window on either side, in multiples of 1/<fmin_hz> ' '(default: 5)') parser.add_option( '--zero-padding', dest='zero_pad', action='store_true', default=False, help='Extend traces by zero-padding if clean restitution requires' 'longer windows') parser.add_option( '--credentials', dest='user_credentials', action='append', default=[], metavar='SITE,USER,PASSWD', help='user credentials for specific site to access restricted data ' '(this option can be repeated)') parser.add_option( '--token', dest='auth_tokens', metavar='SITE,FILENAME', action='append', default=[], help='user authentication token for specific site to access ' 'restricted data (this option can be repeated)') parser.add_option( '--sites', dest='sites', metavar='SITE1,SITE2,...', default='geofon,iris,orfeus', help='sites to query (available: %s, default: "%%default"' % ', '.join(g_sites_available)) parser.add_option( '--band-codes', dest='priority_band_code', metavar='V,L,M,B,H,S,E,...', default='B,H', help='select and prioritize band codes (default: %default)') parser.add_option( '--instrument-codes', dest='priority_instrument_code', metavar='H,L,G,...', default='H,L', help='select and prioritize instrument codes (default: %default)') parser.add_option('--radius-min', dest='radius_min', metavar='VALUE', default=0.0, type=float, help='minimum radius [km]') parser.add_option('--nstations-wanted', dest='nstations_wanted', metavar='N', type=int, help='number of stations to select initially') (options, args) = parser.parse_args(sys.argv[1:]) print('Parsed arguments:', args) if len(args) not in (10, 7, 6): parser.print_help() sys.exit(1) if options.debug: util.setup_logging(program_name, 'debug') else: util.setup_logging(program_name, 'info') if options.local_responses_pz and options.local_responses_resp: logger.critical('cannot use local responses in PZ and RESP ' 'format at the same time') sys.exit(1) n_resp_opt = 0 for resp_opt in (options.local_responses_pz, options.local_responses_resp, options.local_responses_stationxml): if resp_opt: n_resp_opt += 1 if n_resp_opt > 1: logger.critical('can only handle local responses from either PZ or ' 'RESP or StationXML. Cannot yet merge different ' 'response formats.') sys.exit(1) if options.local_responses_resp and not options.local_stations: logger.critical('--local-responses-resp can only be used ' 'when --stations is also given.') sys.exit(1) try: ename = '' magnitude = None mt = None if len(args) == 10: time = util.str_to_time(args[1] + ' ' + args[2]) lat = float(args[3]) lon = float(args[4]) depth = float(args[5]) * km iarg = 6 elif len(args) == 7: if args[2].find(':') == -1: sname_or_date = None lat = float(args[1]) lon = float(args[2]) event = None time = None else: sname_or_date = args[1] + ' ' + args[2] iarg = 3 elif len(args) == 6: sname_or_date = args[1] iarg = 2 if len(args) in (7, 6) and sname_or_date is not None: events = get_events_by_name_or_date([sname_or_date], catalog=geofon) if len(events) == 0: logger.critical('no event found') sys.exit(1) elif len(events) > 1: logger.critical('more than one event found') sys.exit(1) event = events[0] time = event.time lat = event.lat lon = event.lon depth = event.depth ename = event.name magnitude = event.magnitude mt = event.moment_tensor radius = float(args[iarg]) * km fmin = float(args[iarg + 1]) sample_rate = float(args[iarg + 2]) eventname = args[iarg + 3] cwd = str(sys.argv[1]) event_dir = op.join(cwd, 'data', 'events', eventname) output_dir = op.join(event_dir, 'waveforms') except: raise parser.print_help() sys.exit(1) if options.force and op.isdir(event_dir): if not options.continue_: shutil.rmtree(event_dir) if op.exists(event_dir) and not options.continue_: logger.critical( 'directory "%s" exists. Delete it first or use the --force option' % event_dir) sys.exit(1) util.ensuredir(output_dir) if time is not None: event = model.Event(time=time, lat=lat, lon=lon, depth=depth, name=ename, magnitude=magnitude, moment_tensor=mt) if options.window == 'full': if event is None: logger.critical('need event for --window=full') sys.exit(1) low_velocity = 1500. timewindow = VelocityWindow(low_velocity, tpad=options.padding_factor / fmin) tmin, tmax = timewindow(time, radius, depth) elif options.window == 'p': if event is None: logger.critical('need event for --window=p') sys.exit(1) phases = list(map(cake.PhaseDef, 'P p'.split())) emod = cake.load_model() tpad = options.padding_factor / fmin timewindow = PhaseWindow(emod, phases, -tpad, tpad) arrivaltimes = [] for dist in num.linspace(0, radius, 20): try: arrivaltimes.extend(timewindow(time, dist, depth)) except NoArrival: pass if not arrivaltimes: logger.error('required phase arrival not found') sys.exit(1) tmin = min(arrivaltimes) tmax = max(arrivaltimes) else: try: stmin, stmax = options.window.split(',') tmin = util.str_to_time(stmin.strip()) tmax = util.str_to_time(stmax.strip()) timewindow = FixedWindow(tmin, tmax) except ValueError: logger.critical('invalid argument to --window: "%s"' % options.window) sys.exit(1) if event is not None: event.name = eventname tfade = tfade_factor / fmin tpad = tfade tmin -= tpad tmax += tpad tinc = None priority_band_code = options.priority_band_code.split(',') for s in priority_band_code: if len(s) != 1: logger.critical('invalid band code: %s' % s) priority_instrument_code = options.priority_instrument_code.split(',') for s in priority_instrument_code: if len(s) != 1: logger.critical('invalid instrument code: %s' % s) station_query_conf = dict(latitude=lat, longitude=lon, minradius=options.radius_min * km * cake.m2d, maxradius=radius * cake.m2d, channel=','.join('%s??' % s for s in priority_band_code)) target_sample_rate = sample_rate fmax = target_sample_rate # target_sample_rate = None # priority_band_code = ['H', 'B', 'M', 'L', 'V', 'E', 'S'] priority_units = ['M/S', 'M', 'M/S**2'] # output_units = 'M' sites = [x.strip() for x in options.sites.split(',') if x.strip()] for site in sites: if site not in g_sites_available: logger.critical('unknown FDSN site: %s' % site) sys.exit(1) for s in options.user_credentials: try: site, user, passwd = s.split(',') g_user_credentials[site] = user, passwd except ValueError: logger.critical('invalid format for user credentials: "%s"' % s) sys.exit(1) for s in options.auth_tokens: try: site, token_filename = s.split(',') with open(token_filename, 'r') as f: g_auth_tokens[site] = f.read() except (ValueError, OSError, IOError): logger.critical('cannot get token from file: %s' % token_filename) sys.exit(1) fn_template0 = \ 'data_%(network)s.%(station)s.%(location)s.%(channel)s_%(tmin)s.mseed' fn_template_raw = op.join(output_dir, 'raw', fn_template0) fn_stations_raw = op.join(output_dir, 'stations.raw.txt') fn_template_rest = op.join(output_dir, 'rest', fn_template0) fn_commandline = op.join(output_dir, 'beatdown.command') ftap = (ffade_factors[0] * fmin, fmin, fmax, ffade_factors[1] * fmax) # chapter 1: download sxs = [] for site in sites: try: extra_args = { 'iris': dict(matchtimeseries=True), }.get(site, {}) extra_args.update(station_query_conf) if site == 'geonet': extra_args.update(starttime=tmin, endtime=tmax) else: extra_args.update(startbefore=tmax, endafter=tmin, includerestricted=(site in g_user_credentials or site in g_auth_tokens)) logger.info('downloading channel information (%s)' % site) sx = fdsn.station(site=site, format='text', level='channel', **extra_args) except fdsn.EmptyResult: logger.error('No stations matching given criteria. (%s)' % site) sx = None if sx is not None: sxs.append(sx) if all(sx is None for sx in sxs) and not options.local_data: sys.exit(1) nsl_to_sites = defaultdict(list) nsl_to_station = {} if options.selection_file: logger.info('using stations from stations file!') stations = [] for fn in options.selection_file: stations.extend(model.load_stations(fn)) nsls_selected = set(s.nsl() for s in stations) else: nsls_selected = None for sx, site in zip(sxs, sites): site_stations = sx.get_pyrocko_stations() for s in site_stations: nsl = s.nsl() nsl_to_sites[nsl].append(site) if nsl not in nsl_to_station: if nsls_selected: if nsl in nsls_selected: nsl_to_station[nsl] = s else: nsl_to_station[ nsl] = s # using first site with this station logger.info('number of stations found: %i' % len(nsl_to_station)) # station weeding if options.nstations_wanted: nsls_selected = None stations_all = [ nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys()) ] for s in stations_all: s.set_event_relative_data(event) stations_selected = weeding.weed_stations(stations_all, options.nstations_wanted)[0] nsls_selected = set(s.nsl() for s in stations_selected) logger.info('number of stations selected: %i' % len(nsls_selected)) if tinc is None: tinc = 3600. have_data = set() if options.continue_: fns = glob.glob(fn_template_raw % starfill()) p = pile.make_pile(fns) else: fns = [] have_data_site = {} could_have_data_site = {} for site in sites: have_data_site[site] = set() could_have_data_site[site] = set() available_through = defaultdict(set) it = 0 nt = int(math.ceil((tmax - tmin) / tinc)) for it in range(nt): tmin_win = tmin + it * tinc tmax_win = min(tmin + (it + 1) * tinc, tmax) logger.info('time window %i/%i (%s - %s)' % (it + 1, nt, util.tts(tmin_win), util.tts(tmax_win))) have_data_this_window = set() if options.continue_: trs_avail = p.all(tmin=tmin_win, tmax=tmax_win, load_data=False) for tr in trs_avail: have_data_this_window.add(tr.nslc_id) for site, sx in zip(sites, sxs): if sx is None: continue selection = [] channels = sx.choose_channels( target_sample_rate=target_sample_rate, priority_band_code=priority_band_code, priority_units=priority_units, priority_instrument_code=priority_instrument_code, timespan=(tmin_win, tmax_win)) for nslc in sorted(channels.keys()): if nsls_selected is not None and nslc[:3] not in nsls_selected: continue could_have_data_site[site].add(nslc) if nslc not in have_data_this_window: channel = channels[nslc] if event: lat_, lon_ = event.lat, event.lon else: lat_, lon_ = lat, lon try: dist = orthodrome.distance_accurate50m_numpy( lat_, lon_, channel.latitude.value, channel.longitude.value) except: dist = orthodrome.distance_accurate50m_numpy( lat_, lon_, channel.latitude, channel.longitude) if event: depth_ = event.depth time_ = event.time else: depth_ = None time_ = None tmin_, tmax_ = timewindow(time_, dist, depth_) tmin_this = tmin_ - tpad tmax_this = float(tmax_ + tpad) tmin_req = max(tmin_win, tmin_this) tmax_req = min(tmax_win, tmax_this) if channel.sample_rate: try: deltat = 1.0 / int(channel.sample_rate.value) except: deltat = 1.0 / int(channel.sample_rate) else: deltat = 1.0 if tmin_req < tmax_req: logger.debug('deltat %f' % deltat) # extend time window by some samples because otherwise # sometimes gaps are produced # apparently the WS are only sensitive to full seconds # round to avoid gaps, increase safetiy window selection.append(nslc + (math.floor(tmin_req - deltat * 20.0), math.ceil(tmax_req + deltat * 20.0))) if options.dry_run: for (net, sta, loc, cha, tmin, tmax) in selection: available_through[net, sta, loc, cha].add(site) else: neach = 100 i = 0 nbatches = ((len(selection) - 1) // neach) + 1 while i < len(selection): selection_now = selection[i:i + neach] f = tempfile.NamedTemporaryFile() try: sbatch = '' if nbatches > 1: sbatch = ' (batch %i/%i)' % ( (i // neach) + 1, nbatches) logger.info('downloading data (%s)%s' % (site, sbatch)) data = fdsn.dataselect(site=site, selection=selection_now, **get_user_credentials(site)) while True: buf = data.read(1024) if not buf: break f.write(buf) f.flush() trs = io.load(f.name) for tr in trs: tr.fix_deltat_rounding_errors() logger.debug('cutting window: %f - %f' % (tmin_win, tmax_win)) logger.debug( 'available window: %f - %f, nsamples: %g' % (tr.tmin, tr.tmax, tr.ydata.size)) try: logger.debug('tmin before snap %f' % tr.tmin) tr.snap(interpolate=True) logger.debug('tmin after snap %f' % tr.tmin) tr.chop(tmin_win, tmax_win, snap=(math.floor, math.ceil), include_last=True) logger.debug( 'cut window: %f - %f, nsamles: %g' % (tr.tmin, tr.tmax, tr.ydata.size)) have_data.add(tr.nslc_id) have_data_site[site].add(tr.nslc_id) except trace.NoData: pass fns2 = io.save(trs, fn_template_raw) for fn in fns2: if fn in fns: logger.warn('overwriting file %s', fn) fns.extend(fns2) except fdsn.EmptyResult: pass except HTTPError: logger.warn('an error occurred while downloading data ' 'for channels \n %s' % '\n '.join('.'.join(x[:4]) for x in selection_now)) f.close() i += neach if options.dry_run: nslcs = sorted(available_through.keys()) all_channels = defaultdict(set) all_stations = defaultdict(set) def plural_s(x): return '' if x == 1 else 's' for nslc in nslcs: sites = tuple(sorted(available_through[nslc])) logger.info('selected: %s.%s.%s.%s from site%s %s' % (nslc + (plural_s(len(sites)), '+'.join(sites)))) all_channels[sites].add(nslc) all_stations[sites].add(nslc[:3]) nchannels_all = 0 nstations_all = 0 for sites in sorted(all_channels.keys(), key=lambda sites: (-len(sites), sites)): nchannels = len(all_channels[sites]) nstations = len(all_stations[sites]) nchannels_all += nchannels nstations_all += nstations logger.info('selected (%s): %i channel%s (%i station%s)' % ('+'.join(sites), nchannels, plural_s(nchannels), nstations, plural_s(nstations))) logger.info('selected total: %i channel%s (%i station%s)' % (nchannels_all, plural_s(nchannels_all), nstations_all, plural_s(nstations_all))) logger.info('dry run done.') sys.exit(0) for nslc in have_data: # if we are in continue mode, we have to guess where the data came from if not any(nslc in have_data_site[site] for site in sites): for site in sites: if nslc in could_have_data_site[site]: have_data_site[site].add(nslc) sxs = {} for site in sites: selection = [] for nslc in sorted(have_data_site[site]): selection.append(nslc + (tmin - tpad, tmax + tpad)) if selection: logger.info('downloading response information (%s)' % site) sxs[site] = fdsn.station(site=site, level='response', selection=selection) sxs[site].dump_xml(filename=op.join(output_dir, 'stations.%s.xml' % site)) # chapter 1.5: inject local data if options.local_data: have_data_site['local'] = set() plocal = pile.make_pile(options.local_data, fileformat='detect') logger.info( 'Importing local data from %s between %s (%f) and %s (%f)' % (options.local_data, util.time_to_str(tmin), tmin, util.time_to_str(tmax), tmax)) for traces in plocal.chopper_grouped(gather=lambda tr: tr.nslc_id, tmin=tmin, tmax=tmax, tinc=tinc): for tr in traces: if tr.nslc_id not in have_data: fns.extend(io.save(traces, fn_template_raw)) have_data_site['local'].add(tr.nslc_id) have_data.add(tr.nslc_id) sites.append('local') if options.local_responses_pz: sxs['local'] = epz.make_stationxml( epz.iload(options.local_responses_pz)) if options.local_responses_resp: local_stations = [] for fn in options.local_stations: local_stations.extend(model.load_stations(fn)) sxs['local'] = resp.make_stationxml( local_stations, resp.iload(options.local_responses_resp)) if options.local_responses_stationxml: sxs['local'] = stationxml.load_xml( filename=options.local_responses_stationxml) # chapter 1.6: dump raw data stations file nsl_to_station = {} for site in sites: if site in sxs: stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax)) for s in stations: nsl = s.nsl() if nsl not in nsl_to_station: nsl_to_station[nsl] = s stations = [nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys())] util.ensuredirs(fn_stations_raw) model.dump_stations(stations, fn_stations_raw) dump_commandline(sys.argv, fn_commandline) # chapter 2: restitution if not fns: logger.error('no data available') sys.exit(1) p = pile.make_pile(fns, show_progress=False) p.get_deltatmin() otinc = None if otinc is None: otinc = nice_seconds_floor(p.get_deltatmin() * 500000.) otinc = 3600. otmin = math.floor(p.tmin / otinc) * otinc otmax = math.ceil(p.tmax / otinc) * otinc otpad = tpad * 2 fns = [] rest_traces_b = [] win_b = None for traces_a in p.chopper_grouped(gather=lambda tr: tr.nslc_id, tmin=otmin, tmax=otmax, tinc=otinc, tpad=otpad): rest_traces_a = [] win_a = None for tr in traces_a: win_a = tr.wmin, tr.wmax if win_b and win_b[0] >= win_a[0]: fns.extend(cut_n_dump(rest_traces_b, win_b, fn_template_rest)) rest_traces_b = [] win_b = None response = None failure = [] for site in sites: try: if site not in sxs: continue logger.debug('Getting response for %s' % tr.__str__()) response = sxs[site].get_pyrocko_response( tr.nslc_id, timespan=(tr.tmin, tr.tmax), fake_input_units=options.output_units) break except stationxml.NoResponseInformation: failure.append('%s: no response information' % site) except stationxml.MultipleResponseInformation: failure.append('%s: multiple response information' % site) if response is None: failure = ', '.join(failure) else: failure = '' try: if tr.tmin > tmin and options.zero_pad: logger.warning( 'Trace too short for clean restitution in ' 'desired frequency band -> zero-padding!') tr.extend(tr.tmin - tfade, tr.tmax + tfade, 'repeat') rest_tr = tr.transfer(tfade, ftap, response, invert=True) rest_traces_a.append(rest_tr) except (trace.TraceTooShort, trace.NoData): failure = 'trace too short' if failure: logger.warn('failed to restitute trace %s.%s.%s.%s (%s)' % (tr.nslc_id + (failure, ))) if rest_traces_b: rest_traces = trace.degapper(rest_traces_b + rest_traces_a, deoverlap='crossfade_cos') fns.extend(cut_n_dump(rest_traces, win_b, fn_template_rest)) rest_traces_a = [] if win_a: for tr in rest_traces: try: rest_traces_a.append( tr.chop(win_a[0], win_a[1] + otpad, inplace=False)) except trace.NoData: pass rest_traces_b = rest_traces_a win_b = win_a fns.extend(cut_n_dump(rest_traces_b, win_b, fn_template_rest)) # chapter 3: rotated restituted traces for inspection if not event: sys.exit(0) fn_template1 = \ 'DISPL.%(network)s.%(station)s.%(location)s.%(channel)s' fn_waveforms = op.join(output_dir, 'prepared', fn_template1) fn_stations = op.join(output_dir, 'stations.prepared.txt') fn_event = op.join(event_dir, 'event.txt') fn_event_yaml = op.join(event_dir, 'event.yaml') nsl_to_station = {} for site in sites: if site in sxs: stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax)) for s in stations: nsl = s.nsl() if nsl not in nsl_to_station: nsl_to_station[nsl] = s p = pile.make_pile(fns, show_progress=False) deltat = None if sample_rate is not None: deltat = 1.0 / sample_rate traces_beat = [] used_stations = [] for nsl, s in nsl_to_station.items(): s.set_event_relative_data(event) traces = p.all(trace_selector=lambda tr: tr.nslc_id[:3] == nsl) if options.out_components == 'rtu': pios = s.guess_projections_to_rtu(out_channels=('R', 'T', 'Z')) elif options.out_components == 'enu': pios = s.guess_projections_to_enu(out_channels=('E', 'N', 'Z')) else: assert False for (proj, in_channels, out_channels) in pios: proc = trace.project(traces, proj, in_channels, out_channels) for tr in proc: tr_beat = heart.SeismicDataset.from_pyrocko_trace(tr) traces_beat.append(tr_beat) for ch in out_channels: if ch.name == tr.channel: s.add_channel(ch) if proc: io.save(proc, fn_waveforms) used_stations.append(s) stations = list(used_stations) util.ensuredirs(fn_stations) model.dump_stations(stations, fn_stations) model.dump_events([event], fn_event) from pyrocko.guts import dump dump([event], filename=fn_event_yaml) utility.dump_objects(op.join(cwd, 'seismic_data.pkl'), outlist=[stations, traces_beat]) logger.info('prepared waveforms from %i stations' % len(stations))
def refTrigger(self, RefWaveform, phase, cfg_yaml): Config = self.Config cfg = ConfigObj(dict=Config) name = ('%s.%s.%s.%s') % (RefWaveform[0].stats.network, RefWaveform[0].stats.station, RefWaveform[0].stats.location, RefWaveform[0].stats.channel) i = self.searchMeta(name, self.StationMeta) de = loc2degrees(self.Origin, i) ptime = 0 Phase = cake.PhaseDef(phase) model = cake.load_model() if cfg_yaml.config_data.colesseo_input is True: arrivals = model.arrivals([de, de], phases=Phase, zstart=self.Origin.depth, zstop=0.) else: arrivals = model.arrivals([de, de], phases=Phase, zstart=self.Origin.depth*km, zstop=0.) try: ptime = arrivals[0].t except Exception: arrivals = model.arrivals([de, de], phases=Phase, zstart=self.Origin.depth*km-0.1) ptime = arrivals[0].t if ptime == 0: raise Exception("\033[31mILLEGAL: phase definition\033[0m") tw = self.calculateTimeWindows(ptime) if cfg_yaml.config_data.pyrocko_download is True: stP = self.readWaveformsPicker_pyrocko(i, tw, self.Origin, ptime, cfg_yaml) elif cfg_yaml.config_data.colesseo_input is True: stP = self.readWaveformsPicker_colos(i, tw, self.Origin, ptime, cfg_yaml) else: stP = self.readWaveformsPicker(i, tw, self.Origin, ptime, cfg_yaml) refuntouchname = os.path.basename(self.AF)+'-refstation-raw.mseed' stP.write(os.path.join(self.EventPath, refuntouchname), format='MSEED', byteorder='>') stP.filter("bandpass", freqmin=float(cfg_yaml.config_xcorr.refstationfreqmin), freqmax=float(cfg_yaml.config_xcorr.refstationfreqmax)) stP.trim(tw['xcorrstart'], tw['xcorrend']) trP = stP[0] trP.stats.starttime = UTCDateTime(3600) refname = os.path.basename(self.AF)+'-refstation-filtered.mseed' trP.write(os.path.join(self.EventPath, refname), format='MSEED', byteorder='>') sta = float(cfg_yaml.config_xcorr.refsta) lta = float(cfg_yaml.config_xcorr.reflta) cft = recSTALTA(trP.data, int(sta * trP.stats.sampling_rate), int(lta * trP.stats.sampling_rate)) t = triggerOnset(cft, lta, sta) try: onset = t[0][0] / trP.stats.sampling_rate except Exception: onset = self.mintforerun trigger = trP.stats.starttime+onset tdiff = (trP.stats.starttime + onset)-(UTCDateTime(3600) + self.mintforerun) refp = UTCDateTime(self.Origin.time)+ptime reftriggeronset = refp+onset-self.mintforerun if cfg_yaml.config_xcorr.autoxcorrcorrectur is True: refmarkername = os.path.join(self.EventPath, ('%s-marker') % (os.path.basename( self.AF))) fobjrefmarkername = open(refmarkername, 'w') fobjrefmarkername.write('# Snuffler Markers File Version\ 0.2\n') fobjrefmarkername.write(('phase: %s 0 %s None None None XWStart None False\n') % (tw['xcorrstart'].strftime('%Y-%m-%d %H:%M:%S.%f'), name)) fobjrefmarkername.write(('phase: %s 0 %s None None None XWEnd None False\n') % (tw['xcorrend'].strftime('%Y-%m-%d %H:%M:%S.%f'), name)) fobjrefmarkername.write(('phase: %s 1 %s None None None TheoP None False\n') % (refp.strftime('%Y-%m-%d %H:%M:%S.%f'), name)) fobjrefmarkername.write(('phase: %s 3 %s None None None XTrig None False') % (reftriggeronset.strftime('%Y-%m-%d %H:%M:%S.%f'), name)) fobjrefmarkername.close() cmd = 'snuffler %s --markers=%s&' % (os.path.join( self.EventPath, refuntouchname), refmarkername) os.system(cmd) thrOn = float(self.Config['reflta']) thrOff = float(self.Config['refsta']) plotTrigger(trP, cft, thrOn, thrOff) selection = float(input('Enter self picked phase in seconds: ')) tdiff = selection-self.mintforerun refname = os.path.basename(self.AF)+'-shift.mseed' trP.stats.starttime = trP.stats.starttime - selection trP.write(os.path.join(self.EventPath, refname), format='MSEED') ''' tdiff = 0 trigger = trP.stats.starttime ''' To = Trigger(name, trigger, os.path.basename(self.AF), tdiff) return tdiff, To
def test_dump(self): f = BytesIO() mod = cake.load_model() mod.profile('vp').dump(f) f.close()
logger.critical('need event for --window=full') sys.exit(1) low_velocity = 1500. timewindow = VelocityWindow(low_velocity, tpad=options.padding_factor / fmin) tmin, tmax = timewindow(time, radius, depth) elif options.window == 'p': if event is None: logger.critical('need event for --window=p') sys.exit(1) phases = list(map(cake.PhaseDef, 'P p'.split())) emod = cake.load_model() tpad = options.padding_factor / fmin timewindow = PhaseWindow(emod, phases, -tpad, tpad) arrivaltimes = [] for dist in num.linspace(0, radius, 20): try: arrivaltimes.extend(timewindow(time, dist, depth)) except NoArrival: pass if not arrivaltimes: logger.error('required phase arrival not found') sys.exit(1)
def init_config(name, date=None, min_magnitude=6.0, main_path='./', datatypes=['geodetic'], mode='geometry', source_type='RectangularSource', n_sources=1, waveforms=['any_P'], sampler='SMC', hyper_sampler='Metropolis', use_custom=False, individual_gfs=False): """ Initialise BEATconfig File and write it main_path/name . Fine parameters have to be edited in the config file .yaml manually. Parameters ---------- name : str Name of the event date : str 'YYYY-MM-DD', date of the event min_magnitude : scalar, float approximate minimum Mw of the event datatypes : List of strings data sets to include in the optimization: either 'geodetic' and/or 'seismic' mode : str type of optimization problem: 'Geometry' / 'Static'/ 'Kinematic' n_sources : int number of sources to solve for / discretize depending on mode parameter waveforms : list of strings of waveforms to include into the misfit function and GF calculation sampler : str Optimization algorithm to use to sample the solution space Options: 'SMC', 'Metropolis' use_custom : boolean Flag to setup manually a custom velocity model. individual_gfs : boolean Flag to use individual Green's Functions for each specific station. If false a reference location will be initialised in the config file. If true the reference locations will be taken from the imported station objects. Returns ------- :class:`BEATconfig` """ c = BEATconfig(name=name, date=date) c.project_dir = os.path.join(os.path.abspath(main_path), name) if mode == 'geometry' or mode == 'interseismic': if date is not None and not mode == 'interseismic': c.event = utility.search_catalog(date=date, min_magnitude=min_magnitude) elif mode == 'interseismic': c.event = model.Event(lat=10., lon=10., depth=0.) c.date = 'dummy' logger.info('Interseismic mode! Using event as reference for the' ' stable block! Please update coordinates!') else: logger.warn('No given date! Using dummy event!' ' Updating reference coordinates (spatial & temporal)' ' necessary!') c.event = model.Event(duration=1.) c.date = 'dummy' if 'geodetic' in datatypes: c.geodetic_config = GeodeticConfig() if use_custom: logger.info('use_custom flag set! The velocity model in the' ' geodetic GF configuration has to be updated!') c.geodetic_config.gf_config.custom_velocity_model = \ load_model().extract(depth_max=100. * km) c.geodetic_config.gf_config.use_crust2 = False c.geodetic_config.gf_config.replace_water = False else: c.geodetic_config = None if 'seismic' in datatypes: c.seismic_config = SeismicConfig() c.seismic_config.init_waveforms(waveforms) if not individual_gfs: c.seismic_config.gf_config.reference_location = \ ReferenceLocation(lat=10.0, lon=10.0) else: c.seismic_config.gf_config.reference_location = None if use_custom: logger.info('use_custom flag set! The velocity model in the' ' seismic GF configuration has to be updated!') c.seismic_config.gf_config.custom_velocity_model = \ load_model().extract(depth_max=100. * km) c.seismic_config.gf_config.use_crust2 = False c.seismic_config.gf_config.replace_water = False else: c.seismic_config = None elif mode == 'ffi': if source_type != 'RectangularSource': raise TypeError('Static distributed slip is so far only supported' ' for RectangularSource(s)') gmc = load_config(c.project_dir, 'geometry') if gmc is not None: logger.info('Taking information from geometry_config ...') if source_type != gmc.problem_config.source_type: raise ValueError( 'Specified reference source: "%s" differs from the' ' source that has been used previously in' ' "geometry" mode: "%s"!' % (source_type, gmc.problem_config.source_type)) n_sources = gmc.problem_config.n_sources point = { k: v.testvalue for k, v in gmc.problem_config.priors.iteritems() } point = utility.adjust_point_units(point) source_points = utility.split_point(point) reference_sources = init_reference_sources( source_points, n_sources, gmc.problem_config.source_type, gmc.problem_config.stf_type) c.date = gmc.date c.event = gmc.event if 'geodetic' in datatypes: gc = gmc.geodetic_config if gc is None: logger.warning( 'Asked for "geodetic" datatype but geometry config ' 'has no such datatype! Initialising default "geodetic"' ' linear config!') gc = GeodeticConfig() lgf_config = GeodeticLinearGFConfig() else: lgf_config = GeodeticLinearGFConfig( earth_model_name=gc.gf_config.earth_model_name, store_superdir=gc.gf_config.store_superdir, n_variations=gc.gf_config.n_variations, reference_sources=reference_sources, sample_rate=gc.gf_config.sample_rate) c.geodetic_config = gc c.geodetic_config.gf_config = lgf_config elif 'seismic' in datatypes: sc = gmc.seismic_config if sc is None: logger.warning( 'Asked for "seismic" datatype but geometry config ' 'has no such datatype! Initialising default "seismic"' ' linear config!') sc = SeismicConfig() lgf_config = SeismicLinearGFConfig() else: lgf_config = SeismicLinearGFConfig( earth_model_name=sc.gf_config.earth_model_name, sample_rate=sc.gf_config.sample_rate, reference_location=sc.gf_config.reference_location, store_superdir=sc.gf_config.store_superdir, n_variations=sc.gf_config.n_variations, reference_sources=reference_sources) c.seismic_config = sc c.seismic_config.gf_config = lgf_config else: logger.warning('Found no geometry setup, ...') raise ImportError( 'No geometry configuration file existing! Please initialise' ' a "geometry" configuration ("beat init command"), update' ' the Greens Function information and create GreensFunction' ' stores for the non-linear problem.') c.problem_config = ProblemConfig(n_sources=n_sources, datatypes=datatypes, mode=mode, source_type=source_type) c.problem_config.init_vars() c.problem_config.set_decimation_factor() c.sampler_config = SamplerConfig(name=sampler) c.sampler_config.set_parameters(update_covariances=False) c.hyper_sampler_config = SamplerConfig(name=hyper_sampler) c.hyper_sampler_config.set_parameters(update_covariances=None) c.update_hypers() c.problem_config.validate_priors() c.regularize() c.validate() logger.info('Project_directory: %s \n' % c.project_dir) util.ensuredir(c.project_dir) dump_config(c) return c
import numpy as num import matplotlib.pyplot as plt from pyrocko import spit, cake from pyrocko.gf import meta # Define a list of phases. phase_defs = [meta.TPDef(id='stored:p', definition='p'), meta.TPDef(id='stored:P', definition='P')] # Load a velocity model. In this example use the default AK135. mod = cake.load_model() # Time and space tolerance thresholds defining the accuracy of the # :py:class:`pyrocko.spit.SPTree`. t_tolerance = 0.1 # in seconds x_tolerance = num.array((500., 500.)) # in meters # Boundaries of the grid. xmin = 0. xmax = 20000 zmin = 0. zmax = 11000 x_bounds = num.array(((xmin, xmax), (zmin, zmax))) # In this example the receiver is located at the surface. receiver_depth = 0. interpolated_tts = {} for phase_def in phase_defs:
def test_model_io(self): mod = cake.load_model() s = cake.write_nd_model_str(mod) assert isinstance(s, str)
def call(self): self.cleanup() viewer = self.get_viewer() event = viewer.get_active_event() stations = self.get_stations() for s in stations: print(s.nsl()) nsl_to_station = dict((s.nsl(), s) for s in stations) if event is None: self.error('No active event set.') markers = self.get_selected_markers() if len(markers) != 1: self.error('Exactly one marker must be selected.') marker = markers[0] try: nslc = marker.one_nslc() except pmarker.MarkerOneNSLCRequired: self.error('Marker must be picked on a single trace.') marker_station = nsl_to_station[nslc[:3]] mod = cake.load_model() def traveltime(station): dist = event.distance_to(station) arrivals = mod.arrivals(zstart=event.depth, zstop=0., distances=[dist * cake.m2d], phases=[cake.PhaseDef(self.phasename)]) if not arrivals: raise NoArrival() return arrivals[0].t try: tt_marker_station = traveltime(marker_station) except NoArrival: self.error('Selected phase does not arrive at station.') nsl_to_delay = {} for station in stations: nsl_to_delay[station.nsl()] = \ traveltime(station) - tt_marker_station pile = self.get_pile() nsl_to_traces = {} nsl_to_tspan = {} fs = [f for f in [viewer.lowpass, viewer.highpass] if f is not None] if fs: tpad = 2.0 / min(fs) else: tpad = 0.0 deltats = set() for nsl in nsl_to_delay.keys(): delay = nsl_to_delay[nsl] tmin = marker.tmin + delay tmax = marker.tmax + delay nsl_to_tspan[nsl] = (tmin, tmax) trs = pile.all(tmin=tmin, tmax=tmax, tpad=tpad, trace_selector=lambda tr: tr.nslc_id[:3] == nsl, want_incomplete=False) for tr in trs: if viewer.lowpass is not None: tr.lowpass(4, viewer.lowpass) if viewer.highpass is not None: tr.highpass(4, viewer.highpass) tr.chop(tr.wmin, tr.wmax) deltats.add(tr.deltat) if trs: nsl_to_traces[nsl] = trs if len(deltats) != 1: self.error('All traces must have same sampling rate.') # add markers for nsl in nsl_to_traces.keys(): tmin, tmax = nsl_to_tspan[nsl] for tr in nsl_to_traces[nsl]: mark = PhaseMarker([tr.nslc_id], tmin=tmin, tmax=tmax, kind=1, phasename=self.phasename) self.add_marker(mark) # cross correlations nsls = sorted(list(nsl_to_traces.keys())) pair_corrs = [] for nsl_a in nsls: trs_a = nsl_to_traces[nsl_a] if self.channels_relamp == 'All': comps = sorted(set([tr.channel[-1] for tr in trs_a])) else: comps = [c.strip() for c in self.channels_relamp.split(',')] for nsl_b in nsls: trs_b = nsl_to_traces[nsl_b] for comp in comps: try: tr_a = get_trace(trs_a, lambda tr: tr.channel.endswith(comp)) tr_b = get_trace(trs_b, lambda tr: tr.channel.endswith(comp)) except NotFound: continue if tr_a is tr_b: continue tr_cor = trace.correlate(tr_a, tr_b, mode='full', normalization='normal') delaymax, ccmax = tr_cor.max() delaymin, ccmin = tr_cor.min() delay_syn = nsl_to_delay[nsl_b] - nsl_to_delay[nsl_a] if abs(ccmin) < abs(ccmax): delay = delaymax ccabsmax = abs(ccmax) ccsignedmax = ccmax else: delay = delaymin ccabsmax = abs(ccmin) ccsignedmax = ccmin tr_b_shifted = tr_b.copy() tr_b_shifted.shift(-delay) tmin_com = max(tr_b_shifted.tmin, tr_a.tmin) tmax_com = min(tr_b_shifted.tmax, tr_a.tmax) tr_a_chopped = tr_a.chop(tmin_com, tmax_com, inplace=False) tr_b_chopped = tr_b_shifted.chop(tmin_com, tmax_com, inplace=False) ya = tr_a_chopped.ydata yb = tr_b_chopped.ydata relamp1 = num.sum(ya * yb) / num.sum(yb**2) relamp2 = num.sum(ya * yb) / num.sum(ya**2) if nsl_a[1] == 'LYKK': print(ccabsmax, relamp1, relamp2, abs((relamp1 / (1.0 / relamp2) - 1.0))) if ccabsmax < self.cc_min: continue if abs((relamp1 / (1.0 / relamp2) - 1.0)) > 0.2: continue relamp = (relamp1 + 1. / relamp2) * 0.5 pair_corrs.append((tr_a.nslc_id, tr_b.nslc_id, ccsignedmax, relamp, delay, delay_syn)) nslc_to_relamp = invert_relative_amplitudes(pair_corrs) self._nslc_to_relamp = nslc_to_relamp nsl_to_xy = {} for nsl in nsl_to_traces.keys(): trs = nsl_to_traces[nsl] try: cc = [c.strip() for c in self.channels_polar.split(',')] tr_y, tr_x = [ get_trace(trs, lambda tr: tr.channel.endswith(c)) for c in cc ] x = tr_x.get_ydata() y = tr_y.get_ydata() nsl_to_xy[nsl] = (x, y) except NotFound: pass nsls = sorted(list(nsl_to_xy.keys())) n = len(nsls) xs_l = [nsl_to_xy[nsl][0] for nsl in nsls] ys_l = [nsl_to_xy[nsl][1] for nsl in nsls] nsamp = min(min(x.size for x in xs_l), min(y.size for y in ys_l)) xs = num.vstack(x[:nsamp] for x in xs_l) ys = num.vstack(y[:nsamp] for y in ys_l) amps = num.sqrt(xs**2 + ys**2) amp_maxs = num.max(amps, axis=1) xs = xs / amp_maxs[:, num.newaxis] ys = ys / amp_maxs[:, num.newaxis] nphi = 73 phis = num.linspace(-180., 180., nphi) d = num.zeros((n, n, nphi)) for ia in range(n): for iphi, phi in enumerate(phis): x = xs[ia, :] y = ys[ia, :] xrot = num.cos(phi * d2r) * x + num.sin(phi * d2r) * y yrot = -num.sin(phi * d2r) * x + num.cos(phi * d2r) * y d[ia, :, iphi] = num.sqrt( num.sum((xrot[num.newaxis, :] - xs)**2 + (yrot[num.newaxis, :] - ys)**2, axis=1)) imins = num.argmin(d, axis=2) dmins = num.min(d, axis=2) dmin_median = num.median(dmins) phimins = phis[imins] nsl_to_rot = {} for nsl in nsls: nsl_to_rot[nsl] = 0. failed = set() for i in range(n): mean_min_error = num.mean(dmins[i, :] / dmin_median) print(mean_min_error, nsls[i]) if mean_min_error > 3.0: failed.add(nsls[i]) while True: ia_worst = num.argmax(num.mean(num.abs(phimins), axis=1)) phimod = ((phis[num.newaxis, :] + phimins[ia_worst, :, num.newaxis] + 180.) % 360.) - 180. phirot = phis[num.argmin(num.mean(num.abs(phimod), axis=0))] if abs(phirot) < 10.: break nsl = nsls[ia_worst] mean_min_error = num.mean(dmins[ia_worst, :] / dmin_median) phimins[ia_worst, :] = ( (phimins[ia_worst, :] + phirot) + 180.) % 360. - 180. phimins[:, ia_worst] = ( (phimins[:, ia_worst] - phirot) + 180.) % 360. - 180. if nsl not in failed: print('%-20s %8.0f' % ('.'.join(nsl), phirot)) nsl_to_rot[nsl] += phirot fframe = self.figure_frame() fig = fframe.gcf() fig.clf() if n == 0: self.error('No matching traces found.') ncols = 1 while ncols**2 < n: ncols += 1 nrows = ncols axes = fig.add_subplot(1, 2, 1, aspect=1.0) axes.axison = False axes.set_xlim(-0.05 - ncols, ncols + 0.05) axes.set_ylim(-0.05 - nrows, nrows + 0.05) axes.set_title('Event: %s, Phase: %s' % (event.name, self.phasename)) for insl, nsl in enumerate(nsls): irow = insl // ncols icol = insl % ncols trs = nsl_to_traces[nsl] try: x, y = nsl_to_xy[nsl] cc = [c.strip() for c in self.channels_polar.split(',')] tr_y, tr_x = [ get_trace(trs, lambda tr: tr.channel.endswith(c)) for c in cc ] xpos = icol * 2 - ncols + 1 ypos = -irow * 2 + nrows - 1 x = tr_x.get_ydata() y = tr_y.get_ydata() a = num.sqrt(x**2 + y**2) amax = num.max(a) phi = nsl_to_rot[nsl] color = 'black' if num.abs(phi) > 0: color = mpl_color('chocolate2') if num.abs(phi) > 30: color = mpl_color('orange2') if nsl in failed: color = mpl_color('scarletred2') axes.plot(x / amax + xpos, y / amax + ypos, color=color, alpha=0.7) if nsl not in failed: xrot = num.cos(phi * d2r) * x - num.sin(phi * d2r) * y yrot = num.sin(phi * d2r) * x + num.cos(phi * d2r) * y axes.plot(xrot / amax + xpos, yrot / amax + ypos, color='black', alpha=0.5) # axes.plot( # [xpos, num.sin(phi*d2r) + xpos], # [ypos, num.cos(phi*d2r) + ypos], # color=color, alpha=0.5) axes.annotate('.'.join(_ for _ in nsl if _), xy=(icol * 2 - ncols + 1, -irow * 2 + nrows - 2), xycoords='data', xytext=(0, 0), textcoords='offset points', verticalalignment='center', horizontalalignment='center', rotation=0.) except NotFound: pass axes = fig.add_subplot(1, 2, 2) nslcs = sorted(nslc_to_relamp.keys()) pdata = [] for inslc, nslc in enumerate(nslcs): nsl = nslc[:3] cha = nslc[3] tr = get_trace(nsl_to_traces[nsl], lambda tr: tr.channel == cha).copy() tr.shift(-(event.time + tt_marker_station + nsl_to_delay[nsl])) relamp = nslc_to_relamp[nslc] tr.ydata /= relamp color = 'black' if abs(num.log10(relamp)) > num.log10(1.1): color = mpl_color('chocolate2') if abs(num.log10(relamp)) > num.log10(2.0): color = mpl_color('orange2') if abs(num.log10(relamp)) > num.log10(10.0): color = mpl_color('scarletred2') pdata.append((tr, relamp, color, inslc, nslc)) ranges = trace.minmax([aa[0] for aa in pdata], lambda tr: None) ymin, ymax = ranges[None] yabsmax = max(abs(ymin), abs(ymax)) for (tr, relamp, color, inslc, nslc) in pdata: axes.plot(tr.get_xdata(), inslc + tr.get_ydata() / yabsmax, color=color) axes.annotate('.'.join(_ for _ in nslc if _), xy=(0, inslc), xycoords=('axes fraction', 'data'), xytext=(-5, 0), textcoords='offset points', verticalalignment='center', horizontalalignment='right', rotation=0., color=color) axes.annotate('x %g' % (1.0 / relamp), xy=(1., inslc), xycoords=('axes fraction', 'data'), xytext=(+5, 0), textcoords='offset points', verticalalignment='center', horizontalalignment='left', rotation=0., color=color) axes.get_yaxis().set_visible(False) for which in ['top', 'right', 'left']: axes.spines[which].set_visible(False) axes.set_xlabel('Time [s]') fframe.draw()
Log.loc[Log.shape[0]-1,'VpMean']=Log.loc[Log.shape[0]-2,'VpMean'] layer_strBot = "%.3f %.3f %.3f %.3f\n" % ( z_layers[i+1]*1.05,Log.loc[mask,'Vp'].mean(), Log.loc[mask,'Vs'].mean(), Log.loc[mask,'Rho'].mean() ) NDFile = NDFile + layer_strBot ################################################################################################################# # Load the model with open(('RepsolHighRes.nd'),'w') as f: f.write(NDFile) model =cake.load_model(('VpVs.nd')) #model =cake.load_model('VpVs.nd') MC = 100000 nLayers = 6 pertVp = np.zeros((MC,nLayers)) pertVs = np.zeros((MC,nLayers)) tops= np.zeros(nLayers) bots = np.zeros(nLayers) k=0.00001 i=0 MCDf=pd.DataFrame() for l in model.layers():
def process(self, event, timing, bazi=None, slow=None, restitute=False, *args, **kwargs): ''' :param timing: CakeTiming. Uses the definition without the offset. :param fn_dump_center: filename to where center stations shall be dumped :param fn_beam: filename of beam trace :param model: earthmodel to use(optional) :param earthmodel to use(optional) :param network: network code(optional) :param station: station code(optional) ''' logger.debug('start beam forming') stations = self.stations network_code = kwargs.get('responses', None) network_code = kwargs.get('network', '') station_code = kwargs.get('station', 'STK') c_station_id = (network_code, station_code) t_shifts = [] lat_c, lon_c, z_c = self.c_lat_lon_z self.station_c = Station(lat=float(lat_c), lon=float(lon_c), elevation=float(z_c), depth=0., name='Array Center', network=c_station_id[0], station=c_station_id[1][:5]) fn_dump_center = kwargs.get('fn_dump_center', 'array_center.pf') fn_beam = kwargs.get('fn_beam', 'beam.mseed') if event: mod = cake.load_model(crust2_profile=(event.lat, event.lon)) dist = ortho.distance_accurate50m(event, self.station_c) ray = timing.t(mod, (event.depth, dist), get_ray=True) if ray is None: logger.error( 'None of defined phases available at beam station:\n %s' % self.station_c) return else: b = ortho.azimuth(self.station_c, event) if b >= 0.: self.bazi = b elif b < 0.: self.bazi = 360. + b self.slow = ray.p / (cake.r2d * cake.d2m) else: self.bazi = bazi self.slow = slow logger.info( 'stacking %s with slowness %1.4f s/km at back azimut %1.1f ' 'degrees' % ('.'.join(c_station_id), self.slow * cake.km, self.bazi)) lat0 = num.array([lat_c] * len(stations)) lon0 = num.array([lon_c] * len(stations)) lats = num.array([s.lat for s in stations]) lons = num.array([s.lon for s in stations]) ns, es = ortho.latlon_to_ne_numpy(lat0, lon0, lats, lons) theta = num.float(self.bazi * num.pi / 180.) R = num.array([[num.cos(theta), -num.sin(theta)], [num.sin(theta), num.cos(theta)]]) distances = R.dot(num.vstack((es, ns)))[1] channels = set() self.stacked = {} num_stacked = {} self.t_shifts = {} self.shifted_traces = [] taperer = trace.CosFader(xfrac=0.05) if self.diff_dt_treat == 'downsample': self.traces.sort(key=lambda x: x.deltat) elif self.diff_dt_treat == 'oversample': dts = [t.deltat for t in self.traces] for tr in self.traces: tr.resample(min(dts)) for tr in self.traces: if tr.nslc_id[:2] == c_station_id: continue tr = tr.copy(data=True) tr.ydata = tr.ydata.astype( num.float64) - tr.ydata.mean(dtype=num.float64) tr.taper(taperer) try: stack_trace = self.stacked[tr.channel] num_stacked[tr.channel] += 1 except KeyError: stack_trace = tr.copy(data=True) stack_trace.set_ydata(num.zeros(len(stack_trace.get_ydata()))) stack_trace.set_codes(network=c_station_id[0], station=c_station_id[1], location='', channel=tr.channel) self.stacked[tr.channel] = stack_trace channels.add(tr.channel) num_stacked[tr.channel] = 1 nslc_id = tr.nslc_id try: stats = list( filter( lambda x: util.match_nslc('%s.%s.%s.*' % x.nsl(), nslc_id), stations)) stat = stats[0] except IndexError: break i = stations.index(stat) d = distances[i] t_shift = d * self.slow t_shifts.append(t_shift) tr.shift(t_shift) self.t_shifts[tr.nslc_id[:2]] = t_shift if self.normalize_std: tr.ydata = tr.ydata / tr.ydata.std() if num.abs(tr.deltat - stack_trace.deltat) > 0.000001: if self.diff_dt_treat == 'downsample': stack_trace.downsample_to(tr.deltat) elif self.diff_dt_treat == 'upsample': raise Exception( 'something went wrong with the upsampling, previously') stack_trace.add(tr) self.shifted_traces.append(tr) if self.post_normalize: for ch, tr in self.stacked.items(): tr.set_ydata(tr.get_ydata() / num_stacked[ch]) self.save_station(fn_dump_center) self.checked_nslc([stack_trace]) self.save(stack_trace, fn_beam) return self.shifted_traces, stack_trace, t_shifts