def pick_events(window=(-100, 500), filter=(0.033, 2.), phase='P', new_sampling_rate=20): #@ReservedAssignment logfile = os.path.dirname(data.rf_events) + '/log_pick_events_%s.txt' util.checkDir(logfile) util.setRootLogger(logfile=logfile % '', logdebugfile=logfile % '_debug') log.info('***** Pick events: %s' % util.parameters()) mod_data.eventPicker(data, component='all', phase=phase, window=window, filter=filter, new_sampling_rate=new_sampling_rate)
def pick_events( window=(-100, 500), filter=(0.033, 2.), phase='P', new_sampling_rate=20): #@ReservedAssignment logfile = os.path.dirname(data.rf_events) + '/log_pick_events_%s.txt' util.checkDir(logfile) util.setRootLogger(logfile=logfile % '', logdebugfile=logfile % '_debug') log.info('***** Pick events: %s' % util.parameters()) mod_data.eventPicker(data, component='all', phase=phase, window=window, filter=filter, new_sampling_rate=new_sampling_rate)
def calculate_rf(year='*', pp=False, rotateLQT=True, deconvolvef=False): logfile = data.rf_results_dir + 'a_log%s.txt' util.checkDir(logfile) util.setRootLogger(logfile=logfile % '', logdebugfile=logfile % '_debug') log.info('***** Calculate RF') for station in data.stations.keys(): stream = read(data.rf_events % (station, year + '.QHD')) if pp: stream.setPhase('PP') stream.pspier(60, data.stations) log.info('number of events: %s' % (len(stream) // 3)) #stream.filter2(0.033, 2.) stream.trim2(-20, 100, relative='ponset') stream.sort(('event.id', 'station', 'component')) stream.check() stream.setHI('mark', False) if rotateLQT: stream.rotateZNE2LQT(-5, 15, usetheo=True) stream.afarm(signoise=2.0, remove=True) else: #stream.rotateZNE2LQT(-5, 15, usetheo=True) #stream.afarm(signoise=2.0, remove=True) #stream.trim2(-20, 100, relative='ponset') #stream.rotateLQT2ZNE(usetheo=True) stream.rotateNE2RT() #stream.afarm(signoise=2.0, remove=True) #stream.trim2(-25, 100, relative='ponset') # log.info('number of events after first farm: %s' % (len(stream)//3)) # util.ipshell() #stream.receiverf(water=0.005, gauss=5, tshift=20, pad=0, # window='tukey', start=-10, end=30, where='ponset', lenslope=5) if deconvolvef: stream.receiverf() else: stream.receivert() #stream.receiverSH(-10, 80, 1) #stream.afarm('rf', signoise=2., signoiseQ=1., maxL=1 / 1.5, sigQ=False, broad=True, remove=False) #stream.afarm('rf', signoise=False, signoiseQ=False, maxL=False, sigQ=False, broad=False, remove=False) #log.info('number of events after second farm: %s' % (len(stream)//3)) stream.write(data.rf_results % (station, '') + 'nomout', 'Q') #stream.writey(data.rf_results % (station, '%s') + '_nomout', 'Q') print stream.getReasons()
def mout(station='*', year='*'): log.info('***** Move Out correction') global rf_stream logfile = data.rf_results_dir + 'a_mout_log%s.txt' util.checkDir(logfile) util.setRootLogger(logfile=logfile % '', logdebugfile=logfile % '_debug') rf_stream = read(data.rf_results % (station, year) + 'nomout.QHD') if False: st2 = rf_stream.copy() st2.moveout(phase='Ppps') st2.trim2(-20, 100) st2.write(data.rf_results % '_Ppps', 'Q') st3 = rf_stream.copy() st3.moveout(phase='Ppss') st3.trim2(-20, 100) st3.write(data.rf_results % '_Ppss', 'Q') rf_stream.moveout(phase='Ps') #rf_stream.trim2(-20, 100) rf_stream.writex(data.rf_results % ('%s', '') + 'mout', 'Q', years=False)
def main(): global data, plotdir, mark ##### Parkfield filenames # pkd = mod_data.Parkfield() # pkd.events = '/home/richter/Data/events/events_30-90_mag5.5_Parkfield.txt' # pkd.rf_events = pkd.data + '/receiver/events30-90/%s_%s' # pkd.rf_results = pkd.results + '/receiver/results/%s_%s' # pkd.rf_results_dir = pkd.results + '/receiver/results/' # pkd.rf_events = pkd.data + '/receiver/events30-90_mag5.5/%s_%s' # pkd.rf_results = pkd.results + '/receiver/results_mag5.5/%s_%s' # pkd.rf_results_dir = pkd.results + '/receiver/results_mag5.5/' # pkd.rf_plot_dir = pkd.rf_results_dir + 'plots/' ##### IPOC filenames ipoc = mod_data.IPOC() ipoc.events = '/home/richter/Data/events/2012_03_events_27-93_mag5.5_IPOC.txt' ipoc.rf_events = ipoc.data + '/receiver/2012_events_mag5.5/%s_%s' ipoc.rf_results = ipoc.results + '/receiver/2012_mag5.5_RT/%s_%s' ipoc.rf_results_dir = ipoc.results + '/receiver/2012_mag5.5_RT/' ipoc.rf_plot_dir = ipoc.rf_results_dir + 'plots/' ##### EventPicker # data=pkd # pick_events() # data.events = '/home/richter/Data/events/events_90-160_mag5.5_Parkfield.txt' # data.rf_events = data.data + '/receiver/events90-160_mag5.5/%s_%s' # pick_events(window = (-100,400), phase = 'PP') data = ipoc # data.stations.pick('LVC') # events2 = events.Events.read(data.events) # events2.pick(after='2012-01-01') # data.events = events2 #pick_events() # data.events = '/home/richter/Data/events/events_90-160_mag5.5_IPOC.txt' # data.rf_events = data.data + '/receiver/events90-160_mag5.5/%s_%s' # pick_events(window = (-100,400), phase = 'PP') ##### Constrain events #from sito import Events #data.events = Events.read(data.events) #data.events.pick(after='2010-01-01') ##### Calculating RFs # logging.basicConfig() # data=pkd # plotdir = data.rf_plot_dir # util.checkDir(plotdir) # calculate_rf() # mout() ## produce_event_files('PKD') # create_rf_plots(years=range(1996, 2011)) # ## create_interval_plot('225<=st.azi<=265.7') ## time_binned('PKD', 1996.75, 2011.25, 0.5,'225<=st.azi<=265.7') # # # IPOC logging.basicConfig() data = ipoc plotdir = data.rf_plot_dir util.checkDir(plotdir) calculate_rf(rotateLQT=False) mout() #mout('LVC') # produce_event_files('PB02') ###create_rf_plots(components='LQT') # Do not use! create_rf_plots(components='ZRT')
def plotXcorrs(data, correlations, t1, t2, filters=None, filter_now=True, start=None, end=None, select=None, plot_overview=True, plot_years=True, add_to_title='', add_to_file='', show=False, landscape=False, use_dlognorm=True, period=24 * 3600, stack=None, ext='.png', **kwargs): figsize = (8.267, 11.693) if landscape: figsize = figsize[::-1] util.checkDir(data.getPlotX(('', ''), t1, stack=stack)) if filters is None: filters = (None,) if stack: print 'Plot stack xcorrs...' else: print 'Plot xcorrs...' for correlation in ProgressBar()(correlations): stations = correlation[0][:-1], correlation[1][:-1] try: dist = data.stations.dist(*stations) except: #TaiQ station start = -150 end = 150 if start is None or end is None: if dist >= 120: t = (dist // 100) * 50 + 100 else: t = 100 #print stations, dist, t startt = -t endt = t else: startt = start endt = end if correlation[0] == correlation[1] and (start is None or start < 0): startt = 0 if filter_now: try: stream_orig = data.readX(correlation, t1, t2, period=period, filter=None, stack=stack) except IOError as ex: print ex continue for filter_ in filters: if filter_now: stream = stream_orig.copy() if filter_: stream.filter2(*filter_) else: try: stream = data.readX(correlation, t1, t2, period=period, filter=filter_, stack=stack) except IOError as ex: print ex continue if select: stream = stream.select(expr=select) #print stream if len(stream) > 0: if plot_overview: if show: save = None figtitle = add_to_title else: savebase = data.getPlotX(correlation, 'all', period=period, filter=filter_, stack=stack) + add_to_file save = savebase + ext savebase = os.path.basename(savebase) figtitle = savebase + ' ' + add_to_title stream.plotXcorr(startt, endt, imshow=True, use_dlognorm=use_dlognorm, fig=plt.figure(figsize=figsize), figtitle=figtitle, save=None, show=show, dateformatter='%Y-%m-%d', #dateformatter='%y %b' ** kwargs) tit = stream[0].stats.station t**s = stream[0].stats.station.split('-') if t**s[0] == t**s[1]: tit = t**s[0] tit = tit + ' ' + add_to_title plt.gcf().axes[1].annotate(tit, (0.5, 1.), (0, 5), 'axes fraction', 'offset points', clip_on=False, ha='center', va='bottom') plt.gcf().savefig(save, bbox_inches='tight') if show: plt.show() plt.close() if plot_years: for ys in streamyeargen2(stream): t_year, s_year = ys if show: save = None figtitle = add_to_title else: savebase = data.getPlotX(correlation, t_year, period=period, filter=filter, stack=stack) + add_to_file save = savebase + ext savebase = os.path.basename(savebase) figtitle = savebase + ' ' + add_to_title s_year.plotXcorr(startt, endt, imshow=True, use_dlognorm=use_dlognorm, fig=plt.figure(figsize=figsize), figtitle=figtitle, #'station year ' + add_to_title, dateformatter='%y-%m-%d', show=False, save=save, **kwargs)
if len(stream) == 0: continue if USE_TWS_AFTER_SW: dist = data.stations.dist(correlation[0][:-1], correlation[1][:-1]) if dist > 550: continue direct = int(round(dist / 3.)) tws = (((-direct - 30 - 80, direct + 30), 80),) stream.trim2(-trim_stream, trim_stream, 'middle') stretches = None if reftr: stretches = [] for tw in tws[i][0]: if tw <= border_time: npzfile = np.load(fit_file % (getCor(*correlation), tw)) stretches.append(-npzfile['sinus_exp']) result = stream.stretch(reftr=reftr, stretch=stretches, str_range=str_range, nstr=nstr, time_windows=tws[i], sides='right') if period == 24 * 3600: dates = [(time + 12 * 3600).toordinal() for time in stream.getHI('starttime')] else: def get_ord(time): day = (time + period / 2) day = day.toordinal() + 1.*day.hour / 24 + 1.*day.minute / 24 / 60 + 1.*day.second / 24 / 3600 return day dates = [get_ord(time) for time in stream.getHI('starttime')] dates = _correct_dates(dates) print np.array([dates[j + 1] - dates[j] for j in range(len(dates) - 1)]) checkDir(path + 'bla') np.savez(path + 'data_stretching_%s%s%s%s' % (getCor(*correlation), getFilter(filters[i]), getStack(None, period, stack), add_to_file), tw_start=tws[i][0], tw_width=tws[i][1], dates=dates, **result) #corr=corr, stretch=stretch)