def station_to_target(s, quantity, store_id): if quantity == 'restituted': quantity = 'displacement' return Target(codes=s.nsl() + tuple('Z'), lat=s.lat, lon=s.lon, elevation=s.elevation, quantity=quantity, store_id=store_id)
class SimilarityMatrix(Object): ''' A container class to store a list of :py:class:`Similarity` instances and how they have been calculated.''' events = List.T(model.Event.T()) targets = List.T(Target.T()) similarities = List.T(Similarity.T()) filters = List.T(trace.FrequencyResponse.T()) padding = Float.T() windowing_method = String.T() vmin = Float.T() vmax = Float.T()
def station_to_target(s, quantity, store_id): if quantity == "restituted": quantity = "displacement" return Target( codes=s.nsl() + tuple("Z"), lat=s.lat, lon=s.lon, elevation=s.elevation, quantity=quantity, store_id=store_id, )
def get_azimuthal_targets(store_id, source, radius, azi_begin=0., azi_end=360., dazi=1., interpolation='multilinear', components='RTZ', quantity='displacement'): assert dazi > 0. assert azi_begin < azi_end nstations = int((azi_end - azi_begin) // dazi) assert nstations > 0 azimuths = num.linspace(azi_begin, azi_end, nstations) coords = num.zeros((2, nstations)) coords[0, :] = num.cos(azimuths * d2r) coords[1, :] = num.sin(azimuths * d2r) coords *= radius dips = {'R': 0., 'T': 0., 'Z': -90.} for comp in components: assert comp in dips.keys() target_kwargs = dict(quantity='displacement', interpolation=interpolation, store_id=store_id) targets = [ Target(lat=source.lat, lon=source.lon, north_shift=coords[0, iazi] + source.north_shift, east_shift=coords[1, iazi] + source.east_shift, azimuth={ 'R': azi, 'T': azi + 90., 'Z': 0. }[channel], dip=dips[channel], codes=('', 'S%01d' % iazi, '', channel), **target_kwargs) for iazi, azi in enumerate(azimuths) for channel in components ] for target, azi in zip(targets, azimuths): target.azimuth = azi target.dazi = dazi return targets, azimuths
def make_targets(pile, stations): targets = [] for nslc_id in pile.nslc_ids.keys(): for s in stations: if util.match_nslc('%s.*'%(s.nsl_string()), nslc_id): targets.append(Target(lat=s.lat, lon=s.lon, depth=s.depth, elevation=s.elevation, codes=nslc_id)) else: continue return targets
# We need a pyrocko.gf.Engine object which provides us with the traces # extracted from the store. In this case we are going to use a local # engine since we are going to query a local store. engine = LocalEngine(store_superdirs=['/media/usb/gf_stores']) # The store we are going extract data from: store_id = 'crust2_dd' # Define a list of pyrocko.gf.Target objects, representing the recording # devices. In this case one station with a three component sensor will # serve fine for demonstation. channel_codes = 'ENZ' targets = [ Target(lat=10., lon=10., store_id=store_id, codes=('', 'STA', '', channel_code)) for channel_code in channel_codes ] # Let's use a double couple source representation. source_dc = DCSource(lat=11., lon=11., depth=10000., strike=20., dip=40., rake=60., magnitude=4.) # Processing that data will return a pyrocko.gf.Reponse object. response = engine.process(source_dc, targets)
def doCalc_syn (flag,Config,WaveformDict,FilterMetaData,Gmint,Gmaxt,TTTGridMap, Folder,Origin, ntimes, switch, ev,arrayfolder, syn_in, parameter): ''' method for calculating semblance of one station array ''' Logfile.add ('PROCESS %d %s' % (flag,' Enters Semblance Calculation') ) Logfile.add ('MINT : %f MAXT: %f Traveltime' % (Gmint,Gmaxt)) cfg = ConfigObj (dict=Config) dimX = cfg.dimX() # ('dimx') dimY = cfg.dimY() # ('dimy') winlen = cfg.winlen () # ('winlen') step = cfg.step() # ('step') new_frequence = cfg.newFrequency() #('new_frequence') forerun= cfg.Int('forerun') duration= cfg.Int('duration') gridspacing = cfg.Float('gridspacing') nostat = len (WaveformDict) traveltimes = {} recordstarttime = '' minSampleCount = 999999999 if cfg.UInt ('forerun')>0: ntimes = int ((cfg.UInt ('forerun') + cfg.UInt ('duration') ) / cfg.UInt ('step') ) else: ntimes = int ((cfg.UInt ('duration') ) / cfg.UInt ('step') ) nsamp = int (winlen * new_frequence) nstep = int (step * new_frequence) from pyrocko import obspy_compat from pyrocko import orthodrome, model obspy_compat.plant() ############################################################################ calcStreamMap = WaveformDict stations = [] py_trs = [] for trace in calcStreamMap.keys(): py_tr = obspy_compat.to_pyrocko_trace(calcStreamMap[trace]) py_trs.append(py_tr) for il in FilterMetaData: if str(il) == str(trace): szo = model.Station(lat=il.lat, lon=il.lon, station=il.sta, network=il.net, channels=py_tr.channel, elevation=il.ele, location=il.loc) stations.append(szo) #right number of stations? store_id = syn_in.store() engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()]) targets = [] for st in stations: target = Target( lat=st.lat, lon=st.lon, store_id=store_id, codes=(st.network, st.station, st.location, 'BHZ'), tmin=-1900, tmax=3900, interpolation='multilinear', quantity=cfg.quantity()) targets.append(target) if syn_in.nsources() == 1: if syn_in.use_specific_stf() is True: stf = syn_in.stf() exec(stf) else: stf = STF() if syn_in.source() == 'RectangularSource': source = RectangularSource( lat=float(syn_in.lat_0()), lon=float(syn_in.lon_0()), depth=syn_in.depth_syn_0()*1000., strike=syn_in.strike_0(), dip=syn_in.dip_0(), rake=syn_in.rake_0(), width=syn_in.width_0()*1000., length=syn_in.length_0()*1000., nucleation_x=syn_in.nucleation_x_0(), slip=syn_in.slip_0(), nucleation_y=syn_in.nucleation_y_0(), stf=stf, time=util.str_to_time(syn_in.time_0())) if syn_in.source() == 'DCSource': source = DCSource( lat=float(syn_in.lat_0()), lon=float(syn_in.lon_0()), depth=syn_in.depth_syn_0()*1000., strike=syn_in.strike_0(), dip=syn_in.dip_0(), rake=syn_in.rake_0(), stf=stf, time=util.str_to_time(syn_in.time_0()), magnitude=syn_in.magnitude_0()) else: sources = [] for i in range(syn_in.nsources()): if syn_in.use_specific_stf() is True: stf = syn_in.stf() exec(stf) else: stf = STF() if syn_in.source() == 'RectangularSource': sources.append(RectangularSource( lat=float(syn_in.lat_1(i)), lon=float(syn_in.lon_1(i)), depth=syn_in.depth_syn_1(i)*1000., strike=syn_in.strike_1(i), dip=syn_in.dip_1(i), rake=syn_in.rake_1(i), width=syn_in.width_1(i)*1000., length=syn_in.length_1(i)*1000., nucleation_x=syn_in.nucleation_x_1(i), slip=syn_in.slip_1(i), nucleation_y=syn_in.nucleation_y_1(i), stf=stf, time=util.str_to_time(syn_in.time_1(i)))) if syn_in.source() == 'DCSource': sources.append(DCSource( lat=float(syn_in.lat_1(i)), lon=float(syn_in.lon_1(i)), depth=syn_in.depth_1(i)*1000., strike=syn_in.strike_1(i), dip=syn_in.dip_1(i), rake=syn_in.rake_1(i), stf=stf, time=util.str_to_time(syn_in.time_1(i)), magnitude=syn_in.magnitude_1(i))) source = CombiSource(subsources=sources) response = engine.process(source, targets) synthetic_traces = response.pyrocko_traces() if cfg.Bool('synthetic_test_add_noise') is True: from noise_addition import add_noise trs_orgs = [] calcStreamMapsyn = calcStreamMap.copy() #from pyrocko import trace for tracex in calcStreamMapsyn.keys(): for trl in synthetic_traces: if str(trl.name()[4:12]) == str(tracex[4:]): tr_org = obspy_compat.to_pyrocko_trace(calcStreamMapsyn[tracex]) tr_org.downsample_to(2.0) trs_orgs.append(tr_org) store_id = syn_in.store() engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()]) synthetic_traces = add_noise(trs_orgs, engine, source.pyrocko_event(), stations, store_id, phase_def='P') trs_org = [] trs_orgs = [] fobj = os.path.join(arrayfolder, 'shift.dat') xy = num.loadtxt(fobj, usecols=1, delimiter=',') calcStreamMapsyn = calcStreamMap.copy() #from pyrocko import trace for tracex in calcStreamMapsyn.keys(): for trl in synthetic_traces: if str(trl.name()[4:12])== str(tracex[4:]): mod = trl recordstarttime = calcStreamMapsyn[tracex].stats.starttime.timestamp recordendtime = calcStreamMapsyn[tracex].stats.endtime.timestamp tr_org = obspy_compat.to_pyrocko_trace(calcStreamMapsyn[tracex]) trs_orgs.append(tr_org) tr_org_add = mod.chop(recordstarttime, recordendtime, inplace=False) synthetic_obs_tr = obspy_compat.to_obspy_trace(tr_org_add) calcStreamMapsyn[tracex] = synthetic_obs_tr trs_org.append(tr_org_add) calcStreamMap = calcStreamMapsyn if cfg.Bool('shift_by_phase_pws') == True: calcStreamMapshifted= calcStreamMap.copy() from obspy.core import stream stream = stream.Stream() for trace in calcStreamMapshifted.keys(): stream.append(calcStreamMapshifted[trace]) pws_stack = PWS_stack([stream], weight=2, normalize=True) for tr in pws_stack: for trace in calcStreamMapshifted.keys(): calcStreamMapshifted[trace]=tr calcStreamMap = calcStreamMapshifted if cfg.Bool('shift_by_phase_onset') == True: pjoin = os.path.join timeev = util.str_to_time(ev.time) trs_orgs= [] calcStreamMapshifted= calcStreamMap.copy() for trace in calcStreamMapshifted.keys(): tr_org = obspy_compat.to_pyrocko_trace(calcStreamMapshifted[trace]) trs_orgs.append(tr_org) timing = CakeTiming( phase_selection='first(p|P|PP|P(cmb)P(icb)P(icb)p(cmb)p)-20', fallback_time=100.) traces = trs_orgs event = model.Event(lat=float(ev.lat), lon=float(ev.lon), depth=ev.depth*1000., time=timeev) directory = arrayfolder bf = BeamForming(stations, traces, normalize=True) shifted_traces = bf.process(event=event, timing=timing, fn_dump_center=pjoin(directory, 'array_center.pf'), fn_beam=pjoin(directory, 'beam.mseed')) i = 0 store_id = syn_in.store() engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()]) for trace in calcStreamMapshifted.keys(): recordstarttime = calcStreamMapshifted[trace].stats.starttime.timestamp recordendtime = calcStreamMapshifted[trace].stats.endtime.timestamp mod = shifted_traces[i] extracted = mod.chop(recordstarttime, recordendtime, inplace=False) shifted_obs_tr = obspy_compat.to_obspy_trace(extracted) calcStreamMapshifted[trace]=shifted_obs_tr i = i+1 calcStreamMap = calcStreamMapshifted weight = 0. if cfg.Bool('weight_by_noise') == True: from noise_analyser import analyse pjoin = os.path.join timeev = util.str_to_time(ev.time) trs_orgs= [] calcStreamMapshifted= calcStreamMap.copy() for trace in calcStreamMapshifted.keys(): tr_org = obspy_compat.to_pyrocko_trace(calcStreamMapshifted[trace]) trs_orgs.append(tr_org) timing = CakeTiming( phase_selection='first(p|P|PP|P(cmb)P(icb)P(icb)p(cmb)p)-20', fallback_time=100.) traces = trs_orgs event = model.Event(lat=float(ev.lat), lon=float(ev.lon), depth=ev.depth*1000., time=timeev) directory = arrayfolder bf = BeamForming(stations, traces, normalize=True) shifted_traces = bf.process(event=event, timing=timing, fn_dump_center=pjoin(directory, 'array_center.pf'), fn_beam=pjoin(directory, 'beam.mseed')) i = 0 store_id = syn_in.store() engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()]) weight = analyse(shifted_traces, engine, event, stations, 100., store_id, nwindows=1, check_events=True, phase_def='P') for trace in calcStreamMap.keys(): recordstarttime = calcStreamMap[trace].stats.starttime d = calcStreamMap[trace].stats.starttime d = d.timestamp if calcStreamMap[trace].stats.npts < minSampleCount: minSampleCount = calcStreamMap[trace].stats.npts ############################################################################ traces = num.ndarray (shape=(len(calcStreamMap), minSampleCount), dtype=float) traveltime = num.ndarray (shape=(len(calcStreamMap), dimX*dimY), dtype=float) latv = num.ndarray (dimX*dimY, dtype=float) lonv = num.ndarray (dimX*dimY, dtype=float) ############################################################################ c=0 streamCounter = 0 for key in calcStreamMap.keys(): streamID = key c2 = 0 for o in calcStreamMap[key]: if c2 < minSampleCount: traces[c][c2] = o c2 += 1 for key in TTTGridMap.keys(): if streamID == key: traveltimes[streamCounter] = TTTGridMap[key] else: "NEIN", streamID, key if not streamCounter in traveltimes : continue #hs : thread crashed before g = traveltimes[streamCounter] dimZ = g.dimZ mint = g.mint maxt = g.maxt Latul = g.Latul Lonul = g.Lonul Lator = g.Lator Lonor = g.Lonor gridElem = g.GridArray for x in range(dimX): for y in range(dimY): elem = gridElem[x, y] traveltime [c][x * dimY + y] = elem.tt latv [x * dimY + y] = elem.lat lonv [x * dimY + y] = elem.lon #endfor c += 1 streamCounter += 1 #endfor ############################## CALCULATE PARAMETER FOR SEMBLANCE CALCULATION ################## nsamp = winlen * new_frequence nstep = int (step*new_frequence) migpoints = dimX * dimY dimZ = 0 new_frequence = cfg.newFrequency () # ['new_frequence'] maxp = int (Config['ncore']) Logfile.add ('PROCESS %d NTIMES: %d' % (flag,ntimes)) if False : print ('nostat ',nostat,type(nostat)) print ('nsamp ',nsamp,type(nsamp)) print ('ntimes ',ntimes,type(ntimes)) print ('nstep ',nstep,type(nstep)) print ('dimX ',dimX,type(dimX)) print ('dimY ',dimY,type(dimY)) print ('mint ',Gmint,type(mint)) print ('new_freq ',new_frequence,type(new_frequence)) print ('minSampleCount ',minSampleCount,type(minSampleCount)) print ('latv ',latv,type(latv)) print ('traces',traces,type(traces)) print ('traveltime',traveltime,type(traveltime)) #==================================semblance calculation======================================== t1 = time.time() traces = traces.reshape (1,nostat*minSampleCount) traveltime = traveltime.reshape (1,nostat*dimX*dimY) USE_C_CODE = True try: if USE_C_CODE : import Cm import CTrig start_time = time.time() k = Cm.otest (maxp,nostat,nsamp,ntimes,nstep,dimX,dimY,Gmint,new_frequence, minSampleCount,latv,lonv,traveltime,traces) print("--- %s seconds ---" % (time.time() - start_time)) else : start_time = time.time() k = otest (maxp,nostat,nsamp,ntimes,nstep,dimX,dimY,Gmint,new_frequence, minSampleCount,latv,lonv,traveltime,traces) #hs print("--- %s seconds ---" % (time.time() - start_time)) except: print("loaded tttgrid has probably wrong dimensions or stations, delete\ ttgrid or exchange") t2 = time.time() partSemb = k partSemb_syn = partSemb.reshape (ntimes,migpoints) return partSemb_syn
def generate_test_data_grid(store_id, store_dirs, coordinates, geometry_params, pre=0.5, post=3, stations_input=None, batch_loading=256, paths_disks=None): engine = LocalEngine(store_superdirs=[store_dirs]) store = engine.get_store(store_id) mod = store.config.earthmodel_1d cake_phase = cake.PhaseDef("P") phase_list = [cake_phase] waveforms_events = [] waveforms_events_uncut = [] waveforms_noise = [] sources = [] lats = coordinates[0] lons = coordinates[1] depths = coordinates[2] if stations_input is None: stations_unsorted = model.load_stations("data/stations.pf") else: stations_unsorted = model.load_stations(stations_input) for st in stations_unsorted: st.dist = orthodrome.distance_accurate50m(st.lat, st.lon, lats[0], lons[0]) st.azi = orthodrome.azimuth(st.lat, st.lon, lats[0], lons[0]) stations = sorted(stations_unsorted, key=lambda x: x.dist, reverse=True) targets = [] events = [] mean_lat = [] mean_lon = [] max_rho = 0. for st in stations: mean_lat.append(st.lat) mean_lon.append(st.lon) for cha in st.channels: if cha.name is not "R" and cha.name is not "T" and cha.name is not "Z": target = Target(lat=st.lat, lon=st.lon, store_id=store_id, interpolation='multilinear', quantity='displacement', codes=st.nsl() + (cha.name, )) targets.append(target) strikes = geometry_params[0] dips = geometry_params[1] rakes = geometry_params[2] vs = geometry_params[3] ws = geometry_params[4] grid_points = [] for lat in lats: for lon in lons: for depth in depths: grid_points.append([lat, lon, depth]) ray.init(num_cpus=num_cpus - 1) npm = len(lats) * len(lons) * len(depths) npm_geom = len(strikes) * len(dips) * len(rakes) results = ray.get([ get_parallel_mtqt.remote(i, targets, store_id, post, pre, stations, mod, grid_points[i], strikes, dips, rakes, vs, ws, store_dirs, batch_loading=batch_loading, npm=npm_geom, paths_disks=paths_disks) for i in range(len(grid_points)) ]) ray.shutdown() return waveforms_events
def doCalc(flag, Config, WaveformDict, FilterMetaData, Gmint, Gmaxt, TTTGridMap, Folder, Origin, ntimes, switch, ev, arrayfolder, syn_in): ''' method for calculating semblance of one station array ''' Logfile.add('PROCESS %d %s' % (flag, ' Enters Semblance Calculation')) Logfile.add('MINT : %f MAXT: %f Traveltime' % (Gmint, Gmaxt)) cfg = ConfigObj(dict=Config) cfg_f = FilterCfg(Config) timeev = util.str_to_time(ev.time) dimX = cfg.dimX() #('dimx') dimY = cfg.dimY() #('dimy') winlen = cfg.winlen() #('winlen') step = cfg.step() #('step') new_frequence = cfg.newFrequency() #('new_frequence') forerun = cfg.Int('forerun') duration = cfg.Int('duration') nostat = len(WaveformDict) traveltimes = {} recordstarttime = '' minSampleCount = 999999999 ntimes = int((forerun + duration) / step) nsamp = int(winlen * new_frequence) nstep = int(step * new_frequence) from pyrocko import obspy_compat from pyrocko import model obspy_compat.plant() ############################################################################ calcStreamMap = WaveformDict stations = [] py_trs = [] lats = [] lons = [] for trace in calcStreamMap.iterkeys(): py_tr = obspy_compat.to_pyrocko_trace(calcStreamMap[trace]) py_trs.append(py_tr) for il in FilterMetaData: if str(il) == str(trace): szo = model.Station(lat=float(il.lat), lon=float(il.lon), station=il.sta, network=il.net, channels=py_tr.channel, elevation=il.ele, location=il.loc) stations.append(szo) lats.append(float(il.lat)) lons.append(float(il.lon)) array_center = [num.mean(lats), num.mean(lons)] #==================================synthetic BeamForming====================== if cfg.Bool('synthetic_test') is True: store_id = syn_in.store() engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()]) recordstarttimes = [] for tracex in calcStreamMap.iterkeys(): recordstarttimes.append( calcStreamMap[tracex].stats.starttime.timestamp) tr_org = obspy_compat.to_pyrocko_trace(calcStreamMap[tracex]) tmin = tr_org.tmin #tmin= num.min(recordstarttimes) targets = [] sources = [] for st in stations: target = Target(lat=st.lat, lon=st.lon, store_id=store_id, codes=(st.network, st.station, st.location, 'BHZ'), tmin=-6900, tmax=6900, interpolation='multilinear', quantity=cfg.quantity()) targets.append(target) if syn_in.nsources() == 1: if syn_in.use_specific_stf() is True: stf = syn_in.stf() exec(stf) else: stf = STF() if syn_in.source() == 'RectangularSource': sources.append( RectangularSource( lat=float(syn_in.lat_0()), lon=float(syn_in.lon_0()), east_shift=float(syn_in.east_shift_0()) * 1000., north_shift=float(syn_in.north_shift_0()) * 1000., depth=syn_in.depth_syn_0() * 1000., strike=syn_in.strike_0(), dip=syn_in.dip_0(), rake=syn_in.rake_0(), width=syn_in.width_0() * 1000., length=syn_in.length_0() * 1000., nucleation_x=syn_in.nucleation_x_0(), slip=syn_in.slip_0(), nucleation_y=syn_in.nucleation_y_0(), stf=stf, time=util.str_to_time(syn_in.time_0()))) if syn_in.source() == 'DCSource': sources.append( DCSource(lat=float(syn_in.lat_0()), lon=float(syn_in.lon_0()), east_shift=float(syn_in.east_shift_0()) * 1000., north_shift=float(syn_in.north_shift_0()) * 1000., depth=syn_in.depth_syn_0() * 1000., strike=syn_in.strike_0(), dip=syn_in.dip_0(), rake=syn_in.rake_0(), stf=stf, time=util.str_to_time(syn_in.time_0()), magnitude=syn_in.magnitude_0())) else: for i in range(syn_in.nsources()): if syn_in.use_specific_stf() is True: stf = syn_in.stf() exec(stf) else: stf = STF() if syn_in.source() == 'RectangularSource': sources.append( RectangularSource( lat=float(syn_in.lat_1(i)), lon=float(syn_in.lon_1(i)), east_shift=float(syn_in.east_shift_1(i)) * 1000., north_shift=float(syn_in.north_shift_1(i)) * 1000., depth=syn_in.depth_syn_1(i) * 1000., strike=syn_in.strike_1(i), dip=syn_in.dip_1(i), rake=syn_in.rake_1(i), width=syn_in.width_1(i) * 1000., length=syn_in.length_1(i) * 1000., nucleation_x=syn_in.nucleation_x_1(i), slip=syn_in.slip_1(i), nucleation_y=syn_in.nucleation_y_1(i), stf=stf, time=util.str_to_time(syn_in.time_1(i)))) if syn_in.source() == 'DCSource': sources.append( DCSource( lat=float(syn_in.lat_1(i)), lon=float(syn_in.lon_1(i)), east_shift=float(syn_in.east_shift_1(i)) * 1000., north_shift=float(syn_in.north_shift_1(i)) * 1000., depth=syn_in.depth_syn_1(i) * 1000., strike=syn_in.strike_1(i), dip=syn_in.dip_1(i), rake=syn_in.rake_1(i), stf=stf, time=util.str_to_time(syn_in.time_1(i)), magnitude=syn_in.magnitude_1(i))) #source = CombiSource(subsources=sources) synthetic_traces = [] for source in sources: response = engine.process(source, targets) synthetic_traces_source = response.pyrocko_traces() if not synthetic_traces: synthetic_traces = synthetic_traces_source else: for trsource, tr in zip(synthetic_traces_source, synthetic_traces): tr.add(trsource) from pyrocko import trace as trld #trld.snuffle(synthetic_traces) timeev = util.str_to_time(syn_in.time_0()) if cfg.Bool('synthetic_test_add_noise') is True: from noise_addition import add_noise trs_orgs = [] calcStreamMapsyn = calcStreamMap.copy() #from pyrocko import trace for tracex in calcStreamMapsyn.iterkeys(): for trl in synthetic_traces: if str(trl.name()[4:12]) == str(tracex[4:]) or str( trl.name()[3:13]) == str(tracex[3:]) or str( trl.name()[3:11]) == str(tracex[3:]) or str( trl.name()[3:14]) == str(tracex[3:]): tr_org = obspy_compat.to_pyrocko_trace( calcStreamMapsyn[tracex]) tr_org.downsample_to(2.0) trs_orgs.append(tr_org) store_id = syn_in.store() engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()]) synthetic_traces = add_noise(trs_orgs, engine, source.pyrocko_event(), stations, store_id, phase_def='P') trs_org = [] trs_orgs = [] from pyrocko import trace fobj = os.path.join(arrayfolder, 'shift.dat') calcStreamMapsyn = calcStreamMap.copy() for tracex in calcStreamMapsyn.iterkeys(): for trl in synthetic_traces: if str(trl.name()[4:12]) == str(tracex[4:]) or str( trl.name()[3:13]) == str(tracex[3:]) or str( trl.name()[3:11]) == str(tracex[3:]) or str( trl.name()[3:14]) == str(tracex[3:]): mod = trl recordstarttime = calcStreamMapsyn[ tracex].stats.starttime.timestamp recordendtime = calcStreamMapsyn[ tracex].stats.endtime.timestamp tr_org = obspy_compat.to_pyrocko_trace( calcStreamMapsyn[tracex]) if switch == 0: tr_org.bandpass(4, cfg_f.flo(), cfg_f.fhi()) elif switch == 1: tr_org.bandpass(4, cfg_f.flo2(), cfg_f.fhi2()) trs_orgs.append(tr_org) tr_org_add = mod.chop(recordstarttime, recordendtime, inplace=False) synthetic_obs_tr = obspy_compat.to_obspy_trace(tr_org_add) calcStreamMapsyn[tracex] = synthetic_obs_tr trs_org.append(tr_org_add) calcStreamMap = calcStreamMapsyn if cfg.Bool('shift_by_phase_pws') == True: calcStreamMapshifted = calcStreamMap.copy() from obspy.core import stream stream = stream.Stream() for trace in calcStreamMapshifted.iterkeys(): stream.append(calcStreamMapshifted[trace]) pws_stack = PWS_stack([stream], weight=2, normalize=True) for tr in pws_stack: for trace in calcStreamMapshifted.iterkeys(): calcStreamMapshifted[trace] = tr calcStreamMap = calcStreamMapshifted if cfg.Bool('shift_by_phase_cc') is True: from stacking import align_traces calcStreamMapshifted = calcStreamMap.copy() list_tr = [] for trace in calcStreamMapshifted.iterkeys(): tr_org = calcStreamMapshifted[trace] list_tr.append(tr_org) shifts, ccs = align_traces(list_tr, 10, master=False) for shift in shifts: for trace in calcStreamMapshifted.iterkeys(): tr_org = obspy_compat.to_pyrocko_trace( calcStreamMapshifted[trace]) tr_org.shift(shift) shifted = obspy_compat.to_obspy_trace(tr_org) calcStreamMapshifted[trace] = shifted calcStreamMap = calcStreamMapshifted if cfg.Bool('shift_by_phase_onset') is True: pjoin = os.path.join timeev = util.str_to_time(ev.time) trs_orgs = [] calcStreamMapshifted = calcStreamMap.copy() for trace in calcStreamMapshifted.iterkeys(): tr_org = obspy_compat.to_pyrocko_trace(calcStreamMapshifted[trace]) trs_orgs.append(tr_org) timing = CakeTiming( phase_selection='first(p|P|PP|P(cmb)P(icb)P(icb)p(cmb)p)-20', fallback_time=100.) traces = trs_orgs event = model.Event(lat=float(ev.lat), lon=float(ev.lon), depth=ev.depth * 1000., time=timeev) directory = arrayfolder bf = BeamForming(stations, traces, normalize=True) shifted_traces = bf.process(event=event, timing=timing, fn_dump_center=pjoin( directory, 'array_center.pf'), fn_beam=pjoin(directory, 'beam.mseed')) i = 0 store_id = syn_in.store() engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()]) for tracex in calcStreamMapshifted.iterkeys(): for trl in shifted_traces: if str(trl.name()[4:12]) == str(tracex[4:]) or str( trl.name()[3:13]) == str(tracex[3:]) or str( trl.name()[3:11]) == str(tracex[3:]) or str( trl.name()[3:14]) == str(tracex[3:]): mod = trl recordstarttime = calcStreamMapshifted[ tracex].stats.starttime.timestamp recordendtime = calcStreamMapshifted[ tracex].stats.endtime.timestamp tr_org = obspy_compat.to_pyrocko_trace( calcStreamMapshifted[tracex]) tr_org_add = mod.chop(recordstarttime, recordendtime, inplace=False) shifted_obs_tr = obspy_compat.to_obspy_trace(tr_org_add) calcStreamMapshifted[tracex] = shifted_obs_tr calcStreamMap = calcStreamMapshifted weight = 1. if cfg.Bool('weight_by_noise') is True: from noise_analyser import analyse pjoin = os.path.join timeev = util.str_to_time(ev.time) trs_orgs = [] calcStreamMapshifted = calcStreamMap.copy() for trace in calcStreamMapshifted.iterkeys(): tr_org = obspy_compat.to_pyrocko_trace(calcStreamMapshifted[trace]) trs_orgs.append(tr_org) timing = CakeTiming( phase_selection='first(p|P|PP|P(cmb)P(icb)P(icb)p(cmb)p)-20', fallback_time=100.) traces = trs_orgs event = model.Event(lat=float(ev.lat), lon=float(ev.lon), depth=ev.depth * 1000., time=timeev) directory = arrayfolder bf = BeamForming(stations, traces, normalize=True) shifted_traces = bf.process(event=event, timing=timing, fn_dump_center=pjoin( directory, 'array_center.pf'), fn_beam=pjoin(directory, 'beam.mseed')) i = 0 store_id = syn_in.store() engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()]) weight = analyse(shifted_traces, engine, event, stations, 100., store_id, nwindows=1, check_events=True, phase_def='P') if cfg.Bool('array_response') is True: from obspy.signal import array_analysis from obspy.core import stream ntimesr = int((forerun + duration) / step) nsampr = int(winlen) nstepr = int(step) sll_x = -3.0 slm_x = 3.0 sll_y = -3.0 slm_y = 3.0 sl_s = 0.03, # sliding window properties # frequency properties frqlow = 1.0, frqhigh = 8.0 prewhiten = 0 # restrict output semb_thres = -1e9 vel_thres = -1e9 stime = stime etime = etime stream_arr = stream.Stream() for trace in calcStreamMapshifted.iterkeys(): stream_arr.append(calcStreamMapshifted[trace]) results = array_analysis.array_processing(stream_arr, nsamp, nstep,\ sll_x, slm_x, sll_y, slm_y,\ sl_s, semb_thres, vel_thres, \ frqlow, frqhigh, stime, \ etime, prewhiten) timestemp = results[0] relative_relpow = results[1] absolute_relpow = results[2] for trace in calcStreamMap.iterkeys(): recordstarttime = calcStreamMap[trace].stats.starttime d = calcStreamMap[trace].stats.starttime d = d.timestamp if calcStreamMap[trace].stats.npts < minSampleCount: minSampleCount = calcStreamMap[trace].stats.npts ########################################################################### traces = num.ndarray(shape=(len(calcStreamMap), minSampleCount), dtype=float) traveltime = num.ndarray(shape=(len(calcStreamMap), dimX * dimY), dtype=float) latv = num.ndarray(dimX * dimY, dtype=float) lonv = num.ndarray(dimX * dimY, dtype=float) ########################################################################### c = 0 streamCounter = 0 for key in calcStreamMap.iterkeys(): streamID = key c2 = 0 for o in calcStreamMap[key]: if c2 < minSampleCount: traces[c][c2] = o c2 += 1 for key in TTTGridMap.iterkeys(): if streamID == key: traveltimes[streamCounter] = TTTGridMap[key] else: "NEIN", streamID, key if not streamCounter in traveltimes: continue #hs : thread crashed before g = traveltimes[streamCounter] dimZ = g.dimZ mint = g.mint gridElem = g.GridArray for x in range(dimX): for y in range(dimY): elem = gridElem[x, y] traveltime[c][x * dimY + y] = elem.tt latv[x * dimY + y] = elem.lat lonv[x * dimY + y] = elem.lon #endfor c += 1 streamCounter += 1 #endfor ################ CALCULATE PARAMETER FOR SEMBLANCE CALCULATION ######## nsamp = winlen * new_frequence nstep = step * new_frequence migpoints = dimX * dimY dimZ = 0 maxp = int(Config['ncore']) Logfile.add('PROCESS %d NTIMES: %d' % (flag, ntimes)) if False: print('nostat ', nostat, type(nostat)) print('nsamp ', nsamp, type(nsamp)) print('ntimes ', ntimes, type(ntimes)) print('nstep ', nstep, type(nstep)) print('dimX ', dimX, type(dimX)) print('dimY ', dimY, type(dimY)) print('mint ', Gmint, type(mint)) print('new_freq ', new_frequence, type(new_frequence)) print('minSampleCount ', minSampleCount, type(minSampleCount)) print('latv ', latv, type(latv)) print('traces', traces, type(traces)) #===================compressed sensing================================= try: cs = cfg.cs() except: cs = 0 if cs == 1: csmaxvaluev = num.ndarray(ntimes, dtype=float) csmaxlatv = num.ndarray(ntimes, dtype=float) csmaxlonv = num.ndarray(ntimes, dtype=float) folder = Folder['semb'] fobjcsmax = open(os.path.join(folder, 'csmax_%s.txt' % (switch)), 'w') traveltimes = traveltime.reshape(1, nostat * dimX * dimY) traveltime2 = toMatrix(traveltimes, dimX * dimY) # for relstart traveltime = traveltime.reshape(dimX * dimY, nostat) import matplotlib as mpl import scipy.optimize as spopt import scipy.fftpack as spfft import scipy.ndimage as spimg import cvxpy as cvx import matplotlib.pyplot as plt A = spfft.idct(traveltime, norm='ortho', axis=0) n = (nostat * dimX * dimY) vx = cvx.Variable(dimX * dimY) res = cvx.Variable(1) objective = cvx.Minimize(cvx.norm(res, 1)) back2 = num.zeros([dimX, dimY]) l = int(nsamp) fobj = open( os.path.join(folder, '%s-%s_%03d.cs' % (switch, Origin['depth'], l)), 'w') for i in range(ntimes): ydata = [] try: for tr in traces: relstart = int((dimX * dimY - mint) * new_frequence + 0.5) + i * nstep tr = spfft.idct(tr[relstart + i:relstart + i + dimX * dimY], norm='ortho', axis=0) ydata.append(tr) ydata = num.asarray(ydata) ydata = ydata.reshape(dimX * dimY, nostat) constraints = [ res == cvx.sum_entries(0 + num.sum([ ydata[:, x] - A[:, x] * vx for x in range(nostat) ])) ] prob = cvx.Problem(objective, constraints) result = prob.solve(verbose=False, max_iters=200) x = num.array(vx.value) x = num.squeeze(x) back1 = x.reshape(dimX, dimY) sig = spfft.idct(x, norm='ortho', axis=0) back2 = back2 + back1 xs = num.array(res.value) xs = num.squeeze(xs) max_cs = num.max(back1) idx = num.where(back1 == back1.max()) csmaxvaluev[i] = max_cs csmaxlatv[i] = latv[idx[0]] csmaxlonv[i] = lonv[idx[1]] fobj.write('%.5f %.5f %.20f\n' % (latv[idx[0]], lonv[idx[1]], max_cs)) fobjcsmax.write('%.5f %.5f %.20f\n' % (latv[idx[0]], lonv[idx[1]], max_cs)) fobj.close() fobjcsmax.close() except: pass #==================================semblance calculation======================================== t1 = time.time() traces = traces.reshape(1, nostat * minSampleCount) traveltimes = traveltime.reshape(1, nostat * dimX * dimY) USE_C_CODE = False #try: if USE_C_CODE: import Cm import CTrig start_time = time.time() k = Cm.otest(maxp, nostat, nsamp, ntimes, nstep, dimX, dimY, Gmint, new_frequence, minSampleCount, latv, lonv, traveltimes, traces) print("--- %s seconds ---" % (time.time() - start_time)) else: start_time = time.time() ntimes = int((forerun + duration) / step) nsamp = int(winlen) nstep = int(step) Gmint = cfg.Int('forerun') k = otest(maxp, nostat, nsamp, ntimes, nstep, dimX, dimY, Gmint, new_frequence, minSampleCount, latv, lonv, traveltimes, traces, calcStreamMap, timeev) print("--- %s seconds ---" % (time.time() - start_time)) #except ValueError: # k = Cm.otest(maxp,nostat,nsamp,ntimes,nstep,dimX,dimY,Gmint,new_frequence, # minSampleCount,latv,lonv,traveltimes,traces) # print "loaded tttgrid has probably wrong dimensions or stations,\ # delete ttgrid or exchange is recommended" t2 = time.time() Logfile.add('%s took %0.3f s' % ('CALC:', (t2 - t1))) partSemb = k partSemb = partSemb.reshape(ntimes, migpoints) return partSemb, weight, array_center
def load(self, inv): # load the data as a pyrocko pile and reform them into an array of traces data = pile.make_pile([self.wdir + self.reduction]) self.traces = data.all() # load station file fname = self.wdir + self.network stations_list = model.load_stations(fname) for s in stations_list: s.set_channels_by_name(*self.component.split()) self.targets = [] self.tmin, self.tmax = [], [] self.arrivals = [] self.names = [] for station, tr in zip(stations_list, self.traces): # iterate over all stations # print station.lat, station.lon target = Target( lat=np.float(station.lat), # station lat. lon=np.float(station.lon), # station lon. store_id=inv.store, # The gf-store to be used for this target, # we can also employ different gf-stores for different targets. interpolation='multilinear', # interp. method between gf cells quantity='displacement', # wanted retrieved quantity codes=station.nsl() + ('BH' + self.component, )) # Station and network code # Next we extract the expected arrival time for this station from the the store, # so we can use this later to define a cut-out window for the optimization: self.targets.append(target) self.names.append(station.nsl()[1]) # print len(self.traces), len(self.targets) for station, tr, target in zip(stations_list, self.traces, self.targets): engine = LocalEngine(store_superdirs=inv.store_path) store = engine.get_store(inv.store) # trace.snuffle(tr, events=self.events) arrival = store.t(self.phase, self.base_source, target) # expected P-wave arrival # print arrival tmin = self.base_source.time + arrival - 15 # start 15s before theor. arrival tmax = self.base_source.time + arrival + 15 # end 15s after theor. arrival # # print self.tmin,self.tmax tr.chop(tmin=tmin, tmax=tmax) self.tmin.append(tmin) self.tmax.append(tmax) self.arrivals.append(self.base_source.time + arrival) self.Npoints = len(self.targets) # data vector self.d = [] self.d.append(map((lambda x: getattr(x, 'ydata')), self.traces)) self.d = flatten(self.d) # time vector t = [] for i in xrange(self.Npoints): t.append(self.traces[i].get_xdata()) # self.t.append(map((lambda x: getattr(x,'get_xdata()')),self.traces)) # convert time self.t = time2dec(map(util.time_to_str, flatten(t))) # print self.t self.N = len(self.d)
scale = 2e-14 cake_phase = cake.PhaseDef("P") phase_list = [cake_phase] waveforms_events = [] waveforms_noise = [] stations = model.load_stations("stations.raw.txt") nstations = len(stations)*3 noised = True nevents = 1200 targets = [] for st in stations: for cha in st.channels: target = Target( lat=st.lat, lon=st.lon, store_id=store_id, interpolation='multilinear', quantity='displacement', codes=st.nsl() + (cha.name,)) targets.append(target) import _pickle as pickle try: f = open("data_waveforms", 'rb') waveforms_events, nsamples = pickle.load(f) f.close() except: f = open("data_waveforms", 'wb') for i in range(0, nevents):
trace_offset = 0.1 store_id = 'qplayground_total_4_mr_full' store_id = 'crust2_u6' engine = LocalEngine(store_superdirs=['.']) ls = num.linspace dips = ls(0, 360, nframes) rakes = ls(0., 10., nframes) strikes = ls(-180., 180, nframes) depths = ls(10 * km, 10 * km, nframes) east_shifts = ls(100., 200 * km, ntraces) nshift = 5 * km targets = [ Target(store_id=store_id, east_shift=east_shift, north_shift=east_shift) for east_shift in east_shifts ] ylim = ntraces * trace_offset fig = plt.figure() ax = fig.add_subplot(111, facecolor='#111111') ax.set_xlim(-20, 190) ax.set_ylim(-1., ylim + .7) lines = [] for t in targets: l, = ax.plot([], [], color='white',
def doCalc_syn (flag,Config,WaveformDict,FilterMetaData,Gmint,Gmaxt,TTTGridMap, Folder,Origin, ntimes, switch, ev,arrayfolder, syn_in, parameter): ''' method for calculating semblance of one station array ''' Logfile.add ('PROCESS %d %s' % (flag,' Enters Semblance Calculation') ) Logfile.add ('MINT : %f MAXT: %f Traveltime' % (Gmint,Gmaxt)) cfg = ConfigObj (dict=Config) dimX = cfg.dimX() # ('dimx') dimY = cfg.dimY() # ('dimy') winlen = cfg.winlen () # ('winlen') step = cfg.step() # ('step') new_frequence = cfg.newFrequency() #('new_frequence') forerun= cfg.Int('forerun') duration= cfg.Int('duration') gridspacing = cfg.Float('gridspacing') nostat = len (WaveformDict) traveltimes = {} recordstarttime = '' minSampleCount = 999999999 ntimes = int ((forerun + duration)/step) nsamp = int (winlen * new_frequence) nstep = int (step * new_frequence) from pyrocko import obspy_compat from pyrocko import orthodrome, model obspy_compat.plant() ############################################################################ calcStreamMap = WaveformDict stations = [] py_trs = [] for trace in calcStreamMap.iterkeys(): py_tr = obspy_compat.to_pyrocko_trace(calcStreamMap[trace]) py_trs.append(py_tr) for il in FilterMetaData: if str(il) == str(trace): szo = model.Station(lat=il.lat, lon=il.lon, station=il.sta, network=il.net, channels=py_tr.channel, elevation=il.ele, location=il.loc) stations.append(szo) #right number of stations? store_id = syn_in.store() engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()]) targets = [] for st in stations: target = Target( lat=st.lat, lon=st.lon, store_id=store_id, codes=(st.network, st.station, st.location, 'BHZ'), tmin=-1900, tmax=3900, interpolation='multilinear', quantity=cfg.quantity()) targets.append(target) if syn_in.nsources() == 1: if syn_in.use_specific_stf() is True: stf = syn_in.stf() exec(stf) else: stf = STF() if syn_in.source() == 'RectangularSource': source = RectangularSource( lat=float(syn_in.lat_0()), lon=float(syn_in.lon_0()), depth=syn_in.depth_syn_0()*1000., strike=syn_in.strike_0(), dip=syn_in.dip_0(), rake=syn_in.rake_0(), width=syn_in.width_0()*1000., length=syn_in.length_0()*1000., nucleation_x=syn_in.nucleation_x_0(), slip=syn_in.slip_0(), nucleation_y=syn_in.nucleation_y_0(), stf=stf, time=util.str_to_time(syn_in.time_0())) if syn_in.source() == 'DCSource': source = DCSource( lat=float(syn_in.lat_0()), lon=float(syn_in.lon_0()), depth=syn_in.depth_syn_0()*1000., strike=syn_in.strike_0(), dip=syn_in.dip_0(), rake=syn_in.rake_0(), stf=stf, time=util.str_to_time(syn_in.time_0()), magnitude=syn_in.magnitude_0()) else: sources = [] for i in range(syn_in.nsources()): if syn_in.use_specific_stf() is True: stf = syn_in.stf() exec(stf) else: stf = STF() if syn_in.source() == 'RectangularSource': sources.append(RectangularSource( lat=float(syn_in.lat_1(i)), lon=float(syn_in.lon_1(i)), depth=syn_in.depth_syn_1(i)*1000., strike=syn_in.strike_1(i), dip=syn_in.dip_1(i), rake=syn_in.rake_1(i), width=syn_in.width_1(i)*1000., length=syn_in.length_1(i)*1000., nucleation_x=syn_in.nucleation_x_1(i), slip=syn_in.slip_1(i), nucleation_y=syn_in.nucleation_y_1(i), stf=stf, time=util.str_to_time(syn_in.time_1(i)))) if syn_in.source() == 'DCSource': sources.append(DCSource( lat=float(syn_in.lat_1(i)), lon=float(syn_in.lon_1(i)), depth=syn_in.depth_1(i)*1000., strike=syn_in.strike_1(i), dip=syn_in.dip_1(i), rake=syn_in.rake_1(i), stf=stf, time=util.str_to_time(syn_in.time_1(i)), magnitude=syn_in.magnitude_1(i))) source = CombiSource(subsources=sources) response = engine.process(source, targets) synthetic_traces = response.pyrocko_traces() if cfg.Bool('synthetic_test_add_noise') is True: from noise_addition import add_noise trs_orgs = [] calcStreamMapsyn = calcStreamMap.copy() #from pyrocko import trace for tracex in calcStreamMapsyn.iterkeys(): for trl in synthetic_traces: if str(trl.name()[4:12])== str(tracex[4:]): tr_org = obspy_compat.to_pyrocko_trace(calcStreamMapsyn[tracex]) tr_org.downsample_to(2.0) trs_orgs.append(tr_org) store_id = syn_in.store() engine = LocalEngine(store_superdirs=[syn_in.store_superdirs()]) synthetic_traces = add_noise(trs_orgs, engine, source.pyrocko_event(), stations, store_id, phase_def='P') trs_org = [] trs_orgs = [] fobj = os.path.join(arrayfolder, 'shift.dat') xy = num.loadtxt(fobj, usecols=1, delimiter=',') calcStreamMapsyn = calcStreamMap.copy() #from pyrocko import trace for tracex in calcStreamMapsyn.iterkeys(): for trl in synthetic_traces: if str(trl.name()[4:12])== str(tracex[4:]): mod = trl recordstarttime = calcStreamMapsyn[tracex].stats.starttime.timestamp recordendtime = calcStreamMapsyn[tracex].stats.endtime.timestamp tr_org = obspy_compat.to_pyrocko_trace(calcStreamMapsyn[tracex]) trs_orgs.append(tr_org) tr_org_add = mod.chop(recordstarttime, recordendtime, inplace=False) synthetic_obs_tr = obspy_compat.to_obspy_trace(tr_org_add) calcStreamMapsyn[tracex] = synthetic_obs_tr trs_org.append(tr_org_add) calcStreamMap = calcStreamMapsyn
def gen_dataset(scenarios, projdir, store_id, modelled_channel_codes, magmin, magmax, depmin, depmax, latmin, latmax, lonmin, lonmax, stations_file, gf_store_superdirs, shakemap=True, add_noise=True, t_station_dropout=False, simple_induced=True, seiger=True, generate_scenario_type="full", event_list=None, respones="responses_bgr.xml"): # random station dropout if seiger is True: times_kuper, pressure_kuper, temp_kuper, rate_kuper = get_kuperkoch_data( ) mean_pressure = num.mean(pressure_kuper) mean_temp = num.mean(temp_kuper) mean_rate = num.mean(rate_kuper) if gf_store_superdirs is None: engine = gf.LocalEngine(use_config=True) else: engine = gf.LocalEngine(store_superdirs=[gf_store_superdirs]) if t_station_dropout is True: from pyrocko.io import stationxml station_xml = stationxml.load_xml(filename=responses) for scenario in range(scenarios): generated_scenario = False while generated_scenario is False: try: if seiger is True: if generate_scenario_type is "catalog": choice = num.random.choice(len(event_list), 1) base_event = event_list[choice] source, event = get_source_seiger( generate_scenario_type, magmin, magmax, depmin, depmax, latmin, lonmin, latmax, lonmax, simple_induced, use_pressure=use_pressure, event=base_event, store_id=store_id) else: source, event = get_source() savedir = projdir + '/scenario_' + str(scenario) + '/' if not os.path.exists(savedir): os.makedirs(savedir) if stations_file is not None: stations = model.load_stations(projdir + "/" + stations_file) targets = [] for st in stations: for cha in st.channels: target = Target(lat=st.lat, lon=st.lon, store_id=store_id, interpolation='multilinear', quantity='displacement', codes=st.nsl() + (cha.name, )) targets.append(target) else: targets = [] for st in stations: channels = modelled_channel_codes for cha in channels: target = Target(lat=st.lat, lon=st.lon, store_id=store_id, interpolation='multilinear', quantity='displacement', codes=st.nsl() + (cha, )) targets.append(target) if shakemap is True: shakemap_fwd.make_shakemap(engine, source, store_id, savedir, stations=stations) gen_loop = True response = engine.process(source, targets) synthetic_traces = response.pyrocko_traces() if t_station_dropout is True: station_time_dict = load_time_dependent_stations( event, stations, station_xml) for tr in synthetic_traces: for st in station_time_dict: if tr.station == st.station: tr.ydata = tr.ydata * 0. if choice == 2: synthetic_traces = gen_white_noise(synthetic_traces) event.tags = ["no_event"] if add_noise is True and choice != 2: add_white_noise(synthetic_traces) noise_events = gen_noise_events(targets, synthetic_traces, engine) events = [event] save(synthetic_traces, events, stations, savedir, noise_events=noise_events) generated_scenario = True except pyrocko.gf.seismosizer.SeismosizerError: pass
# The store we are going extract data from: store_id = 'kazeroon' # We need a pyrocko.gf.Engine object which provides us with the traces # extracted from the store. In this case we are going to use a local # engine since we are going to query a local store. engine = LocalEngine(store_superdirs=['/home/alireza/Kiwi/GFDB']) # Define a list of pyrocko.gf.Target objects, representing the recording # devices. In this case one station with a three component sensor will # serve fine for demonstation. channel_codes = 'ENZ' targets = [ Target( lat=36.21, lon=48.22, store_id=store_id, codes=('', 'CVD', '', channel_code)) for channel_code in channel_codes] # Let's use a double couple source representation. source_dc = DCSource( lat=34.00, lon=45.00, depth=10000, strike=170, dip=35, rake=50, magnitude=5.6) # Processing that data will return a pyrocko.gf.Reponse object.