def test_gps_utc_offset(self): for t_utc_0 in [x[0] for x in util.read_leap_seconds2()]: t_utc_0 = float(t_utc_0) ts_utc = num.linspace( t_utc_0 - 2.0, t_utc_0 + 2.0, 17) for t_utc in ts_utc: t_gps = t_utc + util.gps_utc_offset(t_utc) t_utc2 = t_gps + util.utc_gps_offset(t_gps) self.assertEqual(util.tts(t_utc), util.tts(t_utc2)) ts_gps = num.linspace( ts_utc[0] + util.gps_utc_offset(ts_utc[0]), ts_utc[-1] + util.gps_utc_offset(ts_utc[-1]), 17 + 4) t_utc_wrapped = [] for t_gps in ts_gps: t_utc = t_gps + util.utc_gps_offset(t_gps) t_utc_wrapped.append(t_utc - t_utc_0) num.testing.assert_almost_equal( t_utc_wrapped, num.concatenate(( num.linspace(-2.0, 0.75, 12), num.linspace(0.0, 2.0, 9))))
def search_catalog(date, min_magnitude, dayrange=1.): """ Search the gcmt catalog for the specified date (+- 1 day), filtering the events with given magnitude threshold. Parameters ---------- date : str 'YYYY-MM-DD', date of the event min_magnitude : float approximate minimum Mw of the event dayrange : float temporal search interval [days] around date Returns ------- event : :class:`pyrocko.model.Event` """ gcmt = catalog.GlobalCMT() time_s = util.stt(date + ' ' + seconds_str) d1 = time_s - (dayrange * (sphr * hrpd)) d2 = time_s + (dayrange * (sphr * hrpd)) logger.info('Getting relevant events from the gCMT catalog for the dates:' '%s - %s \n' % (util.tts(d1), util.tts(d2))) events = gcmt.get_events((d1, d2), magmin=min_magnitude) if len(events) < 1: logger.warn('Found no event information in the gCMT catalog.') event = None if len(events) > 1: logger.info( 'More than one event from that date with specified magnitude ' 'found! Please copy the relevant event information to the ' 'configuration file!') for event in events: print event event = events[0] elif len(events) == 1: event = events[0] return event
def get_time_format_eq(time): time = util.tts(time) time_year = time[0:4] time_month = time[5:7] time_day = time[8:10] time_hour = time[11:13] time_minute = time[14:16] time_seconds = time[17:19] date = time_year + time_month + time_day + "T" + time_hour + time_minute + time_seconds + "Z" return date
def benchmark_stt_tts(self): for x in range(2): if x == 1: util.util_ext = None t = util.str_to_time('1999-03-20 20:10:10') tt1 = time.time() for i in range(10000): s = util.tts(t) util.stt(s) tt2 = time.time() print(tt2 - tt1)
def benchmark_stt_tts(self): for x in xrange(2): if x == 1: util.util_ext = None t = util.str_to_time('1999-03-20 20:10:10') tt1 = time.time() for i in xrange(10000): s = util.tts(t) t2 = util.stt(s) tt2 = time.time() print tt2 - tt1
def associate(path, tmin, tmax, minlat=49.1379, maxlat=49.1879, minlon=8.1223, maxlon=8.1723, channels=["EH" + "[ZNE]"], client_list=["BGR"], iter=None, pair_n=3, moving_window=8): import shutil import os from silvertine.detector.utils.associator import run_associator if iter is None: out_basepath = os.path.join(path, 'detections') out_dir = os.path.join(path, 'asociation') else: out_basepath = os.path.join(path, 'detections_%s_%s' % (tmin, tmax)) out_dir = os.path.join(path, 'asociation_%s_%s' % (tmin, tmax)) try: shutil.rmtree(out_dir) except Exception: pass os.makedirs(out_dir) if tmin is not None: tmin = util.tts(tmin) if tmax is not None: tmax = util.tts(tmax) run_associator(input_dir=out_basepath, start_time=tmin, end_time=tmax, moving_window=moving_window, pair_n=pair_n, output_dir=out_dir, consider_combination=False)
def test_gps_utc_offset(self): for t_utc_0 in [x[0] for x in util.read_leap_seconds2()]: t_utc_0 = float(t_utc_0) ts_utc = num.linspace(t_utc_0 - 2.0, t_utc_0 + 2.0, 17) for t_utc in ts_utc: t_gps = t_utc + util.gps_utc_offset(t_utc) t_utc2 = t_gps + util.utc_gps_offset(t_gps) self.assertEqual(util.tts(t_utc), util.tts(t_utc2)) ts_gps = num.linspace(ts_utc[0] + util.gps_utc_offset(ts_utc[0]), ts_utc[-1] + util.gps_utc_offset(ts_utc[-1]), 17 + 4) t_utc_wrapped = [] for t_gps in ts_gps: t_utc = t_gps + util.utc_gps_offset(t_gps) t_utc_wrapped.append(t_utc - t_utc_0) num.testing.assert_almost_equal( t_utc_wrapped, num.concatenate((num.linspace(-2.0, 0.75, 12), num.linspace(0.0, 2.0, 9))))
def call(self): self.cleanup() viewer = self.get_viewer() vtmin, vtmax = viewer.get_time_range() pile = self.get_pile() traces = list( pile.chopper(tmin=vtmin, tmax=vtmax, trace_selector=viewer.trace_selector)) event, _ = self.get_active_event_and_stations() traces = [tr for trs in traces for tr in trs] stations = [] for tr in traces: if tr.nslc_id[:2] in viewer.stations.keys(): stations.append(viewer.get_station(viewer.station_key(tr))) distances = [ortho.distance_accurate50m(event, s) for s in stations] distances = [d / 1000. for d in distances] maxd = max(distances) mind = min(distances) distances = dict(zip([s.nsl() for s in stations], distances)) matching_traces = [ x for x in traces if util.match_nslc(self.get_station_patterns(stations), x.nslc_id) ] if self.add_markers: markers = self.get_markers() markers = [ m for m in markers if m.tmax <= vtmax and m.tmin >= vtmin and m.selected ] markers = dict(zip([tuple(m.nslc_ids) for m in markers], markers)) if self.fig is None or self.fframe.closed is True or not self._live_update: self.fframe = self.pylab(get='figure_frame') self.fig = self.fframe.gcf() if self._live_update: self.fig.clf() ymin = mind - 0.06 * (maxd - mind) ymax = maxd + 0.06 * (maxd - mind) ax = self.fig.add_subplot(111) xmin = 9E9 xmax = -xmin texts = [] manual_scale = 0.1 * (maxd - mind) * self.yscale if self.ampl_scaler == 'total min/max': max_trace = max(matching_traces, key=lambda x: max(abs(x.get_ydata()))) tr_maxy = max(abs(max_trace.get_ydata())) ampl_scale = float(tr_maxy) for tr in matching_traces: if viewer.highpass: tr.highpass(4, viewer.highpass) if viewer.lowpass: tr.lowpass(4, viewer.lowpass) if tr.nslc_id[:3] not in distances.keys(): continue if self.t_red: red = distances[tr.nslc_id[:3]] / self.t_red else: red = 0. y_pos = distances[tr.nslc_id[:3]] xdata = tr.get_xdata() - red - event.time xmin = min(xmin, min(xdata)) xmax = max(xmax, max(xdata)) tr_ydata = tr.get_ydata() if self.ampl_scaler == 'trace min/max': ampl_scale = float(max(abs(tr_ydata))) elif self.ampl_scaler == 'standard deviation': ampl_scale = float(num.std(tr_ydata)) ydata = (tr_ydata / ampl_scale * manual_scale) + y_pos ax.plot(xdata, ydata, c='black', linewidth=0.2) if self.fill_between: ax.fill_between(xdata, y_pos, ydata, where=ydata > y_pos, color='black', alpha=0.5) texts.append( ax.text(xmax, y_pos, '%s.%s.%s.%s' % tr.nslc_id, horizontalalignment='right', fontsize=6.)) if self.add_markers: for ids, m in markers.items(): if m.match_nslc(tr.nslc_id) or ids == (): c = m.select_color(m.color_b) c = [ci / 255. for ci in c] t = m.tmin x = [t - red - event.time, t - red - event.time] y = [ y_pos - (maxd - mind) * 0.025, y_pos + (maxd - mind) * 0.025 ] ax.plot(x, y, linewidth=1, color=c) label = m.get_label() if not label: label = '' ax.text(x[1] - x[1] * 0.005, y[1], label, color=c, fontsize=6, verticalalignment='top', horizontalalignment='right') for txt in texts: txt.set_x(xmax) vred_str = '= '+str(round(self.t_red, 2)) + 'km/s' if self.t_red \ else 'off' ax.text(0.5, 0.01, 'time window: %s - %s | Reduction velocity %s' % (util.tts(vtmin), util.tts(vtmax), vred_str), verticalalignment='bottom', horizontalalignment='center', transform=self.fig.transFigure) ax.set_ylim([ymin, ymax]) ax.set_xlim([xmin, xmax]) ax.set_ylabel('Distance [km]') ax.set_xlabel('(red.) Time [s]') self.fig.canvas.draw()
def main(): parser = OptionParser(usage=usage, description=description) parser.add_option('--force', dest='force', action='store_true', default=False, help='allow recreation of output <directory>') parser.add_option('--debug', dest='debug', action='store_true', default=False, help='print debugging information to stderr') parser.add_option('--dry-run', dest='dry_run', action='store_true', default=False, help='show available stations/channels and exit ' '(do not download waveforms)') parser.add_option('--continue', dest='continue_', action='store_true', default=False, help='continue download after a accident') parser.add_option('--local-data', dest='local_data', action='append', help='add file/directory with local data') parser.add_option('--local-stations', dest='local_stations', action='append', help='add local stations file') parser.add_option('--selection', dest='selection_file', action='append', help='add local stations file') parser.add_option( '--local-responses-resp', dest='local_responses_resp', action='append', help='add file/directory with local responses in RESP format') parser.add_option('--local-responses-pz', dest='local_responses_pz', action='append', help='add file/directory with local pole-zero responses') parser.add_option( '--local-responses-stationxml', dest='local_responses_stationxml', help='add file with local response information in StationXML format') parser.add_option( '--window', dest='window', default='full', help='set time window to choose [full, p, "<time-start>,<time-end>"' '] (time format is YYYY-MM-DD HH:MM:SS)') parser.add_option( '--out-components', choices=['enu', 'rtu'], dest='out_components', default='rtu', help='set output component orientations to radial-transverse-up [rtu] ' '(default) or east-north-up [enu]') parser.add_option('--out-units', choices=['M', 'M/S', 'M/S**2'], dest='output_units', default='M', help='set output units to displacement "M" (default),' ' velocity "M/S" or acceleration "M/S**2"') parser.add_option( '--padding-factor', type=float, default=3.0, dest='padding_factor', help='extend time window on either side, in multiples of 1/<fmin_hz> ' '(default: 5)') parser.add_option( '--zero-padding', dest='zero_pad', action='store_true', default=False, help='Extend traces by zero-padding if clean restitution requires' 'longer windows') parser.add_option( '--credentials', dest='user_credentials', action='append', default=[], metavar='SITE,USER,PASSWD', help='user credentials for specific site to access restricted data ' '(this option can be repeated)') parser.add_option( '--token', dest='auth_tokens', metavar='SITE,FILENAME', action='append', default=[], help='user authentication token for specific site to access ' 'restricted data (this option can be repeated)') parser.add_option( '--sites', dest='sites', metavar='SITE1,SITE2,...', default='geofon,iris,orfeus', help='sites to query (available: %s, default: "%%default"' % ', '.join(g_sites_available)) parser.add_option( '--band-codes', dest='priority_band_code', metavar='V,L,M,B,H,S,E,...', default='B,H', help='select and prioritize band codes (default: %default)') parser.add_option( '--instrument-codes', dest='priority_instrument_code', metavar='H,L,G,...', default='H,L', help='select and prioritize instrument codes (default: %default)') parser.add_option('--radius-min', dest='radius_min', metavar='VALUE', default=0.0, type=float, help='minimum radius [km]') parser.add_option('--nstations-wanted', dest='nstations_wanted', metavar='N', type=int, help='number of stations to select initially') (options, args) = parser.parse_args(sys.argv[1:]) print('Parsed arguments:', args) if len(args) not in (10, 7, 6): parser.print_help() sys.exit(1) if options.debug: util.setup_logging(program_name, 'debug') else: util.setup_logging(program_name, 'info') if options.local_responses_pz and options.local_responses_resp: logger.critical('cannot use local responses in PZ and RESP ' 'format at the same time') sys.exit(1) n_resp_opt = 0 for resp_opt in (options.local_responses_pz, options.local_responses_resp, options.local_responses_stationxml): if resp_opt: n_resp_opt += 1 if n_resp_opt > 1: logger.critical('can only handle local responses from either PZ or ' 'RESP or StationXML. Cannot yet merge different ' 'response formats.') sys.exit(1) if options.local_responses_resp and not options.local_stations: logger.critical('--local-responses-resp can only be used ' 'when --stations is also given.') sys.exit(1) try: ename = '' magnitude = None mt = None if len(args) == 10: time = util.str_to_time(args[1] + ' ' + args[2]) lat = float(args[3]) lon = float(args[4]) depth = float(args[5]) * km iarg = 6 elif len(args) == 7: if args[2].find(':') == -1: sname_or_date = None lat = float(args[1]) lon = float(args[2]) event = None time = None else: sname_or_date = args[1] + ' ' + args[2] iarg = 3 elif len(args) == 6: sname_or_date = args[1] iarg = 2 if len(args) in (7, 6) and sname_or_date is not None: events = get_events_by_name_or_date([sname_or_date], catalog=geofon) if len(events) == 0: logger.critical('no event found') sys.exit(1) elif len(events) > 1: logger.critical('more than one event found') sys.exit(1) event = events[0] time = event.time lat = event.lat lon = event.lon depth = event.depth ename = event.name magnitude = event.magnitude mt = event.moment_tensor radius = float(args[iarg]) * km fmin = float(args[iarg + 1]) sample_rate = float(args[iarg + 2]) eventname = args[iarg + 3] cwd = str(sys.argv[1]) event_dir = op.join(cwd, 'data', 'events', eventname) output_dir = op.join(event_dir, 'waveforms') except: raise parser.print_help() sys.exit(1) if options.force and op.isdir(event_dir): if not options.continue_: shutil.rmtree(event_dir) if op.exists(event_dir) and not options.continue_: logger.critical( 'directory "%s" exists. Delete it first or use the --force option' % event_dir) sys.exit(1) util.ensuredir(output_dir) if time is not None: event = model.Event(time=time, lat=lat, lon=lon, depth=depth, name=ename, magnitude=magnitude, moment_tensor=mt) if options.window == 'full': if event is None: logger.critical('need event for --window=full') sys.exit(1) low_velocity = 1500. timewindow = VelocityWindow(low_velocity, tpad=options.padding_factor / fmin) tmin, tmax = timewindow(time, radius, depth) elif options.window == 'p': if event is None: logger.critical('need event for --window=p') sys.exit(1) phases = list(map(cake.PhaseDef, 'P p'.split())) emod = cake.load_model() tpad = options.padding_factor / fmin timewindow = PhaseWindow(emod, phases, -tpad, tpad) arrivaltimes = [] for dist in num.linspace(0, radius, 20): try: arrivaltimes.extend(timewindow(time, dist, depth)) except NoArrival: pass if not arrivaltimes: logger.error('required phase arrival not found') sys.exit(1) tmin = min(arrivaltimes) tmax = max(arrivaltimes) else: try: stmin, stmax = options.window.split(',') tmin = util.str_to_time(stmin.strip()) tmax = util.str_to_time(stmax.strip()) timewindow = FixedWindow(tmin, tmax) except ValueError: logger.critical('invalid argument to --window: "%s"' % options.window) sys.exit(1) if event is not None: event.name = eventname tfade = tfade_factor / fmin tpad = tfade tmin -= tpad tmax += tpad tinc = None priority_band_code = options.priority_band_code.split(',') for s in priority_band_code: if len(s) != 1: logger.critical('invalid band code: %s' % s) priority_instrument_code = options.priority_instrument_code.split(',') for s in priority_instrument_code: if len(s) != 1: logger.critical('invalid instrument code: %s' % s) station_query_conf = dict(latitude=lat, longitude=lon, minradius=options.radius_min * km * cake.m2d, maxradius=radius * cake.m2d, channel=','.join('%s??' % s for s in priority_band_code)) target_sample_rate = sample_rate fmax = target_sample_rate # target_sample_rate = None # priority_band_code = ['H', 'B', 'M', 'L', 'V', 'E', 'S'] priority_units = ['M/S', 'M', 'M/S**2'] # output_units = 'M' sites = [x.strip() for x in options.sites.split(',') if x.strip()] for site in sites: if site not in g_sites_available: logger.critical('unknown FDSN site: %s' % site) sys.exit(1) for s in options.user_credentials: try: site, user, passwd = s.split(',') g_user_credentials[site] = user, passwd except ValueError: logger.critical('invalid format for user credentials: "%s"' % s) sys.exit(1) for s in options.auth_tokens: try: site, token_filename = s.split(',') with open(token_filename, 'r') as f: g_auth_tokens[site] = f.read() except (ValueError, OSError, IOError): logger.critical('cannot get token from file: %s' % token_filename) sys.exit(1) fn_template0 = \ 'data_%(network)s.%(station)s.%(location)s.%(channel)s_%(tmin)s.mseed' fn_template_raw = op.join(output_dir, 'raw', fn_template0) fn_stations_raw = op.join(output_dir, 'stations.raw.txt') fn_template_rest = op.join(output_dir, 'rest', fn_template0) fn_commandline = op.join(output_dir, 'beatdown.command') ftap = (ffade_factors[0] * fmin, fmin, fmax, ffade_factors[1] * fmax) # chapter 1: download sxs = [] for site in sites: try: extra_args = { 'iris': dict(matchtimeseries=True), }.get(site, {}) extra_args.update(station_query_conf) if site == 'geonet': extra_args.update(starttime=tmin, endtime=tmax) else: extra_args.update(startbefore=tmax, endafter=tmin, includerestricted=(site in g_user_credentials or site in g_auth_tokens)) logger.info('downloading channel information (%s)' % site) sx = fdsn.station(site=site, format='text', level='channel', **extra_args) except fdsn.EmptyResult: logger.error('No stations matching given criteria. (%s)' % site) sx = None if sx is not None: sxs.append(sx) if all(sx is None for sx in sxs) and not options.local_data: sys.exit(1) nsl_to_sites = defaultdict(list) nsl_to_station = {} if options.selection_file: logger.info('using stations from stations file!') stations = [] for fn in options.selection_file: stations.extend(model.load_stations(fn)) nsls_selected = set(s.nsl() for s in stations) else: nsls_selected = None for sx, site in zip(sxs, sites): site_stations = sx.get_pyrocko_stations() for s in site_stations: nsl = s.nsl() nsl_to_sites[nsl].append(site) if nsl not in nsl_to_station: if nsls_selected: if nsl in nsls_selected: nsl_to_station[nsl] = s else: nsl_to_station[ nsl] = s # using first site with this station logger.info('number of stations found: %i' % len(nsl_to_station)) # station weeding if options.nstations_wanted: nsls_selected = None stations_all = [ nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys()) ] for s in stations_all: s.set_event_relative_data(event) stations_selected = weeding.weed_stations(stations_all, options.nstations_wanted)[0] nsls_selected = set(s.nsl() for s in stations_selected) logger.info('number of stations selected: %i' % len(nsls_selected)) if tinc is None: tinc = 3600. have_data = set() if options.continue_: fns = glob.glob(fn_template_raw % starfill()) p = pile.make_pile(fns) else: fns = [] have_data_site = {} could_have_data_site = {} for site in sites: have_data_site[site] = set() could_have_data_site[site] = set() available_through = defaultdict(set) it = 0 nt = int(math.ceil((tmax - tmin) / tinc)) for it in range(nt): tmin_win = tmin + it * tinc tmax_win = min(tmin + (it + 1) * tinc, tmax) logger.info('time window %i/%i (%s - %s)' % (it + 1, nt, util.tts(tmin_win), util.tts(tmax_win))) have_data_this_window = set() if options.continue_: trs_avail = p.all(tmin=tmin_win, tmax=tmax_win, load_data=False) for tr in trs_avail: have_data_this_window.add(tr.nslc_id) for site, sx in zip(sites, sxs): if sx is None: continue selection = [] channels = sx.choose_channels( target_sample_rate=target_sample_rate, priority_band_code=priority_band_code, priority_units=priority_units, priority_instrument_code=priority_instrument_code, timespan=(tmin_win, tmax_win)) for nslc in sorted(channels.keys()): if nsls_selected is not None and nslc[:3] not in nsls_selected: continue could_have_data_site[site].add(nslc) if nslc not in have_data_this_window: channel = channels[nslc] if event: lat_, lon_ = event.lat, event.lon else: lat_, lon_ = lat, lon try: dist = orthodrome.distance_accurate50m_numpy( lat_, lon_, channel.latitude.value, channel.longitude.value) except: dist = orthodrome.distance_accurate50m_numpy( lat_, lon_, channel.latitude, channel.longitude) if event: depth_ = event.depth time_ = event.time else: depth_ = None time_ = None tmin_, tmax_ = timewindow(time_, dist, depth_) tmin_this = tmin_ - tpad tmax_this = float(tmax_ + tpad) tmin_req = max(tmin_win, tmin_this) tmax_req = min(tmax_win, tmax_this) if channel.sample_rate: try: deltat = 1.0 / int(channel.sample_rate.value) except: deltat = 1.0 / int(channel.sample_rate) else: deltat = 1.0 if tmin_req < tmax_req: logger.debug('deltat %f' % deltat) # extend time window by some samples because otherwise # sometimes gaps are produced # apparently the WS are only sensitive to full seconds # round to avoid gaps, increase safetiy window selection.append(nslc + (math.floor(tmin_req - deltat * 20.0), math.ceil(tmax_req + deltat * 20.0))) if options.dry_run: for (net, sta, loc, cha, tmin, tmax) in selection: available_through[net, sta, loc, cha].add(site) else: neach = 100 i = 0 nbatches = ((len(selection) - 1) // neach) + 1 while i < len(selection): selection_now = selection[i:i + neach] f = tempfile.NamedTemporaryFile() try: sbatch = '' if nbatches > 1: sbatch = ' (batch %i/%i)' % ( (i // neach) + 1, nbatches) logger.info('downloading data (%s)%s' % (site, sbatch)) data = fdsn.dataselect(site=site, selection=selection_now, **get_user_credentials(site)) while True: buf = data.read(1024) if not buf: break f.write(buf) f.flush() trs = io.load(f.name) for tr in trs: tr.fix_deltat_rounding_errors() logger.debug('cutting window: %f - %f' % (tmin_win, tmax_win)) logger.debug( 'available window: %f - %f, nsamples: %g' % (tr.tmin, tr.tmax, tr.ydata.size)) try: logger.debug('tmin before snap %f' % tr.tmin) tr.snap(interpolate=True) logger.debug('tmin after snap %f' % tr.tmin) tr.chop(tmin_win, tmax_win, snap=(math.floor, math.ceil), include_last=True) logger.debug( 'cut window: %f - %f, nsamles: %g' % (tr.tmin, tr.tmax, tr.ydata.size)) have_data.add(tr.nslc_id) have_data_site[site].add(tr.nslc_id) except trace.NoData: pass fns2 = io.save(trs, fn_template_raw) for fn in fns2: if fn in fns: logger.warn('overwriting file %s', fn) fns.extend(fns2) except fdsn.EmptyResult: pass except HTTPError: logger.warn('an error occurred while downloading data ' 'for channels \n %s' % '\n '.join('.'.join(x[:4]) for x in selection_now)) f.close() i += neach if options.dry_run: nslcs = sorted(available_through.keys()) all_channels = defaultdict(set) all_stations = defaultdict(set) def plural_s(x): return '' if x == 1 else 's' for nslc in nslcs: sites = tuple(sorted(available_through[nslc])) logger.info('selected: %s.%s.%s.%s from site%s %s' % (nslc + (plural_s(len(sites)), '+'.join(sites)))) all_channels[sites].add(nslc) all_stations[sites].add(nslc[:3]) nchannels_all = 0 nstations_all = 0 for sites in sorted(all_channels.keys(), key=lambda sites: (-len(sites), sites)): nchannels = len(all_channels[sites]) nstations = len(all_stations[sites]) nchannels_all += nchannels nstations_all += nstations logger.info('selected (%s): %i channel%s (%i station%s)' % ('+'.join(sites), nchannels, plural_s(nchannels), nstations, plural_s(nstations))) logger.info('selected total: %i channel%s (%i station%s)' % (nchannels_all, plural_s(nchannels_all), nstations_all, plural_s(nstations_all))) logger.info('dry run done.') sys.exit(0) for nslc in have_data: # if we are in continue mode, we have to guess where the data came from if not any(nslc in have_data_site[site] for site in sites): for site in sites: if nslc in could_have_data_site[site]: have_data_site[site].add(nslc) sxs = {} for site in sites: selection = [] for nslc in sorted(have_data_site[site]): selection.append(nslc + (tmin - tpad, tmax + tpad)) if selection: logger.info('downloading response information (%s)' % site) sxs[site] = fdsn.station(site=site, level='response', selection=selection) sxs[site].dump_xml(filename=op.join(output_dir, 'stations.%s.xml' % site)) # chapter 1.5: inject local data if options.local_data: have_data_site['local'] = set() plocal = pile.make_pile(options.local_data, fileformat='detect') logger.info( 'Importing local data from %s between %s (%f) and %s (%f)' % (options.local_data, util.time_to_str(tmin), tmin, util.time_to_str(tmax), tmax)) for traces in plocal.chopper_grouped(gather=lambda tr: tr.nslc_id, tmin=tmin, tmax=tmax, tinc=tinc): for tr in traces: if tr.nslc_id not in have_data: fns.extend(io.save(traces, fn_template_raw)) have_data_site['local'].add(tr.nslc_id) have_data.add(tr.nslc_id) sites.append('local') if options.local_responses_pz: sxs['local'] = epz.make_stationxml( epz.iload(options.local_responses_pz)) if options.local_responses_resp: local_stations = [] for fn in options.local_stations: local_stations.extend(model.load_stations(fn)) sxs['local'] = resp.make_stationxml( local_stations, resp.iload(options.local_responses_resp)) if options.local_responses_stationxml: sxs['local'] = stationxml.load_xml( filename=options.local_responses_stationxml) # chapter 1.6: dump raw data stations file nsl_to_station = {} for site in sites: if site in sxs: stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax)) for s in stations: nsl = s.nsl() if nsl not in nsl_to_station: nsl_to_station[nsl] = s stations = [nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys())] util.ensuredirs(fn_stations_raw) model.dump_stations(stations, fn_stations_raw) dump_commandline(sys.argv, fn_commandline) # chapter 2: restitution if not fns: logger.error('no data available') sys.exit(1) p = pile.make_pile(fns, show_progress=False) p.get_deltatmin() otinc = None if otinc is None: otinc = nice_seconds_floor(p.get_deltatmin() * 500000.) otinc = 3600. otmin = math.floor(p.tmin / otinc) * otinc otmax = math.ceil(p.tmax / otinc) * otinc otpad = tpad * 2 fns = [] rest_traces_b = [] win_b = None for traces_a in p.chopper_grouped(gather=lambda tr: tr.nslc_id, tmin=otmin, tmax=otmax, tinc=otinc, tpad=otpad): rest_traces_a = [] win_a = None for tr in traces_a: win_a = tr.wmin, tr.wmax if win_b and win_b[0] >= win_a[0]: fns.extend(cut_n_dump(rest_traces_b, win_b, fn_template_rest)) rest_traces_b = [] win_b = None response = None failure = [] for site in sites: try: if site not in sxs: continue logger.debug('Getting response for %s' % tr.__str__()) response = sxs[site].get_pyrocko_response( tr.nslc_id, timespan=(tr.tmin, tr.tmax), fake_input_units=options.output_units) break except stationxml.NoResponseInformation: failure.append('%s: no response information' % site) except stationxml.MultipleResponseInformation: failure.append('%s: multiple response information' % site) if response is None: failure = ', '.join(failure) else: failure = '' try: if tr.tmin > tmin and options.zero_pad: logger.warning( 'Trace too short for clean restitution in ' 'desired frequency band -> zero-padding!') tr.extend(tr.tmin - tfade, tr.tmax + tfade, 'repeat') rest_tr = tr.transfer(tfade, ftap, response, invert=True) rest_traces_a.append(rest_tr) except (trace.TraceTooShort, trace.NoData): failure = 'trace too short' if failure: logger.warn('failed to restitute trace %s.%s.%s.%s (%s)' % (tr.nslc_id + (failure, ))) if rest_traces_b: rest_traces = trace.degapper(rest_traces_b + rest_traces_a, deoverlap='crossfade_cos') fns.extend(cut_n_dump(rest_traces, win_b, fn_template_rest)) rest_traces_a = [] if win_a: for tr in rest_traces: try: rest_traces_a.append( tr.chop(win_a[0], win_a[1] + otpad, inplace=False)) except trace.NoData: pass rest_traces_b = rest_traces_a win_b = win_a fns.extend(cut_n_dump(rest_traces_b, win_b, fn_template_rest)) # chapter 3: rotated restituted traces for inspection if not event: sys.exit(0) fn_template1 = \ 'DISPL.%(network)s.%(station)s.%(location)s.%(channel)s' fn_waveforms = op.join(output_dir, 'prepared', fn_template1) fn_stations = op.join(output_dir, 'stations.prepared.txt') fn_event = op.join(event_dir, 'event.txt') fn_event_yaml = op.join(event_dir, 'event.yaml') nsl_to_station = {} for site in sites: if site in sxs: stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax)) for s in stations: nsl = s.nsl() if nsl not in nsl_to_station: nsl_to_station[nsl] = s p = pile.make_pile(fns, show_progress=False) deltat = None if sample_rate is not None: deltat = 1.0 / sample_rate traces_beat = [] used_stations = [] for nsl, s in nsl_to_station.items(): s.set_event_relative_data(event) traces = p.all(trace_selector=lambda tr: tr.nslc_id[:3] == nsl) if options.out_components == 'rtu': pios = s.guess_projections_to_rtu(out_channels=('R', 'T', 'Z')) elif options.out_components == 'enu': pios = s.guess_projections_to_enu(out_channels=('E', 'N', 'Z')) else: assert False for (proj, in_channels, out_channels) in pios: proc = trace.project(traces, proj, in_channels, out_channels) for tr in proc: tr_beat = heart.SeismicDataset.from_pyrocko_trace(tr) traces_beat.append(tr_beat) for ch in out_channels: if ch.name == tr.channel: s.add_channel(ch) if proc: io.save(proc, fn_waveforms) used_stations.append(s) stations = list(used_stations) util.ensuredirs(fn_stations) model.dump_stations(stations, fn_stations) model.dump_events([event], fn_event) from pyrocko.guts import dump dump([event], filename=fn_event_yaml) utility.dump_objects(op.join(cwd, 'seismic_data.pkl'), outlist=[stations, traces_beat]) logger.info('prepared waveforms from %i stations' % len(stations))
fns = [] have_data_site = {} could_have_data_site = {} for site in sites: have_data_site[site] = set() could_have_data_site[site] = set() available_through = defaultdict(set) it = 0 nt = int(math.ceil((tmax - tmin) / tinc)) for it in range(nt): tmin_win = tmin + it * tinc tmax_win = min(tmin + (it + 1) * tinc, tmax) logger.info('time window %i/%i (%s - %s)' % (it + 1, nt, util.tts(tmin_win), util.tts(tmax_win))) have_data_this_window = set() if options.continue_: trs_avail = p.all(tmin=tmin_win, tmax=tmax_win, load_data=False) for tr in trs_avail: have_data_this_window.add(tr.nslc_id) for site, sx in zip(sites, sxs): if sx is None: continue selection = [] channels = sx.choose_channels( target_sample_rate=target_sample_rate, priority_band_code=priority_band_code, priority_units=priority_units,
origin_id=evq_ml.origin_list[0].public_id, type="Mw", mag=quakeml.RealQuantity(value=emw.magnitude))) evq_ml.magnitude_list = magnitude_list qml_blank.event_parameters.event_list.append(evq_ml) dist_ins = orthodrome.distance_accurate50m(insheim_lat, insheim_lon, eml.lat, eml.lon) dist_land = orthodrome.distance_accurate50m( landau_lat, landau_lon, eml.lat, eml.lon) if dist_ins < 4500.: ins_events.append(eml) event_params_ins.append([ str(util.tts(eml.time)), str(eml.time), str(eml.lat), str(eml.lon), str(eml.depth), str(eml.magnitude), str(emw.magnitude) ]) if dist_land < 4500.: land_events.append event_params_land.append([ str(util.tts(eml.time)), str(eml.time), str(eml.lat), str(eml.lon),
def main(): parser = OptionParser(usage=usage, description=description) parser.add_option( "--force", dest="force", action="store_true", default=False, help="allow recreation of output <directory>", ) parser.add_option( "--debug", dest="debug", action="store_true", default=False, help="print debugging information to stderr", ) parser.add_option( "--dry-run", dest="dry_run", action="store_true", default=False, help="show available stations/channels and exit " "(do not download waveforms)", ) parser.add_option( "--continue", dest="continue_", action="store_true", default=False, help="continue download after a accident", ) parser.add_option( "--local-data", dest="local_data", action="append", help="add file/directory with local data", ) parser.add_option( "--local-stations", dest="local_stations", action="append", help="add local stations file", ) parser.add_option( "--local-responses-resp", dest="local_responses_resp", action="append", help="add file/directory with local responses in RESP format", ) parser.add_option( "--local-responses-pz", dest="local_responses_pz", action="append", help="add file/directory with local pole-zero responses", ) parser.add_option( "--local-responses-stationxml", dest="local_responses_stationxml", help="add file with local response information in StationXML format", ) parser.add_option( "--window", dest="window", default="full", help='set time window to choose [full, p, "<time-start>,<time-end>"' "] (time format is YYYY-MM-DD HH:MM:SS)", ) parser.add_option( "--out-components", choices=["enu", "rtu"], dest="out_components", default="rtu", help="set output component orientations to radial-transverse-up [rtu] " "(default) or east-north-up [enu]", ) parser.add_option( "--padding-factor", type=float, default=3.0, dest="padding_factor", help="extend time window on either side, in multiples of 1/<fmin_hz> " "(default: 5)", ) parser.add_option( "--credentials", dest="user_credentials", action="append", default=[], metavar="SITE,USER,PASSWD", help="user credentials for specific site to access restricted data " "(this option can be repeated)", ) parser.add_option( "--token", dest="auth_tokens", metavar="SITE,FILENAME", action="append", default=[], help="user authentication token for specific site to access " "restricted data (this option can be repeated)", ) parser.add_option( "--sites", dest="sites", metavar="SITE1,SITE2,...", # default='bgr', default="http://ws.gpi.kit.edu,bgr,http://188.246.25.142:8080", help='sites to query (available: %s, default: "%%default"' % ", ".join(g_sites_available), ) parser.add_option( "--band-codes", dest="priority_band_code", metavar="V,L,M,B,H,S,E,...", default="V,L,M,B,H,E", help="select and prioritize band codes (default: %default)", ) parser.add_option( "--instrument-codes", dest="priority_instrument_code", metavar="H,L,G,...", default="H,L,O,", help="select and prioritize instrument codes (default: %default)", ) parser.add_option( "--radius-min", dest="radius_min", metavar="VALUE", default=0.0, type=float, help="minimum radius [km]", ) parser.add_option( "--tinc", dest="tinc", metavar="VALUE", default=3600.0 * 12.0, type=float, help="length of seperate saved files in s", ) parser.add_option( "--nstations-wanted", dest="nstations_wanted", metavar="N", type=int, help="number of stations to select initially", ) (options, args) = parser.parse_args(sys.argv[1:]) if len(args) not in (9, 6, 5): parser.print_help() sys.exit(1) if options.debug: util.setup_logging(program_name, "debug") else: util.setup_logging(program_name, "info") if options.local_responses_pz and options.local_responses_resp: logger.critical("cannot use local responses in PZ and RESP " "format at the same time") sys.exit(1) n_resp_opt = 0 for resp_opt in ( options.local_responses_pz, options.local_responses_resp, options.local_responses_stationxml, ): if resp_opt: n_resp_opt += 1 if n_resp_opt > 1: logger.critical("can only handle local responses from either PZ or " "RESP or StationXML. Cannot yet merge different " "response formats.") sys.exit(1) if options.local_responses_resp and not options.local_stations: logger.critical("--local-responses-resp can only be used " "when --stations is also given.") sys.exit(1) try: ename = "" magnitude = None mt = None if len(args) == 9: time = util.str_to_time(args[0] + " " + args[1]) lat = float(args[2]) lon = float(args[3]) depth = float(args[4]) * km iarg = 5 elif len(args) == 6: if args[1].find(":") == -1: sname_or_date = None lat = float(args[0]) lon = float(args[1]) event = None time = None else: sname_or_date = args[0] + " " + args[1] iarg = 2 elif len(args) == 5: sname_or_date = args[0] iarg = 1 if len(args) in (6, 5) and sname_or_date is not None: events = get_events_by_name_or_date([sname_or_date], catalog=geofon) if len(events) == 0: logger.critical("no event found") sys.exit(1) elif len(events) > 1: logger.critical("more than one event found") sys.exit(1) event = events[0] time = event.time lat = event.lat lon = event.lon depth = event.depth ename = event.name magnitude = event.magnitude mt = event.moment_tensor radius = float(args[iarg]) * km fmin = float(args[iarg + 1]) sample_rate = float(args[iarg + 2]) eventname = args[iarg + 3] event_dir = op.join("data", "events", eventname) output_dir = op.join(event_dir, "waveforms") except: raise parser.print_help() sys.exit(1) if options.force and op.isdir(event_dir): if not options.continue_: shutil.rmtree(event_dir) if op.exists(event_dir) and not options.continue_: logger.critical( 'directory "%s" exists. Delete it first or use the --force option' % event_dir) sys.exit(1) util.ensuredir(output_dir) if time is not None: event = model.Event( time=time, lat=lat, lon=lon, depth=depth, name=ename, magnitude=magnitude, moment_tensor=mt, ) if options.window == "full": if event is None: logger.critical("need event for --window=full") sys.exit(1) low_velocity = 1500.0 timewindow = VelocityWindow(low_velocity, tpad=options.padding_factor / fmin) tmin, tmax = timewindow(time, radius, depth) elif options.window == "p": if event is None: logger.critical("need event for --window=p") sys.exit(1) phases = list(map(cake.PhaseDef, "P p".split())) emod = cake.load_model() tpad = options.padding_factor / fmin timewindow = PhaseWindow(emod, phases, -tpad, tpad) arrivaltimes = [] for dist in num.linspace(0, radius, 20): try: arrivaltimes.extend(timewindow(time, dist, depth)) except NoArrival: pass if not arrivaltimes: logger.error("required phase arrival not found") sys.exit(1) tmin = min(arrivaltimes) tmax = max(arrivaltimes) else: try: stmin, stmax = options.window.split(",") tmin = util.str_to_time(stmin.strip()) tmax = util.str_to_time(stmax.strip()) timewindow = FixedWindow(tmin, tmax) except ValueError: logger.critical('invalid argument to --window: "%s"' % options.window) sys.exit(1) if event is not None: event.name = eventname tlen = tmax - tmin tfade = tfade_factor / fmin tpad = tfade tmin -= tpad tmax += tpad priority_band_code = options.priority_band_code.split(",") for s in priority_band_code: if len(s) != 1: logger.critical("invalid band code: %s" % s) priority_instrument_code = options.priority_instrument_code.split(",") for s in priority_instrument_code: if len(s) != 1: logger.critical("invalid instrument code: %s" % s) station_query_conf = dict( latitude=lat, longitude=lon, minradius=options.radius_min * km * cake.m2d, maxradius=radius * cake.m2d, channel=",".join("?%s?" % s for s in priority_band_code), ) target_sample_rate = sample_rate fmax = target_sample_rate # target_sample_rate = None # priority_band_code = ['H', 'B', 'M', 'L', 'V', 'E', 'S'] priority_units = ["M/S", "M", "M/S**2"] output_units = "M" sites = [x.strip() for x in options.sites.split(",") if x.strip()] tinc = options.tinc # for site in sites: # if site not in g_sites_available: # logger.critical('unknown FDSN site: %s' % site) # sys.exit(1) for s in options.user_credentials: try: site, user, passwd = s.split(",") g_user_credentials[site] = user, passwd except ValueError: logger.critical('invalid format for user credentials: "%s"' % s) sys.exit(1) for s in options.auth_tokens: try: site, token_filename = s.split(",") with open(token_filename, "r") as f: g_auth_tokens[site] = f.read() except (ValueError, OSError, IOError): logger.critical("cannot get token from file: %s" % token_filename) sys.exit(1) fn_template0 = ( "data_%(network)s.%(station)s.%(location)s.%(channel)s_%(tmin)s.mseed") fn_template_raw = op.join(output_dir, "raw", fn_template0) fn_template_raw_folder = op.join(output_dir, "raw/", "traces.mseed") fn_stations_raw = op.join(output_dir, "stations.raw.txt") fn_template_rest = op.join(output_dir, "rest", fn_template0) fn_commandline = op.join(output_dir, "seigerdown.command") ftap = (ffade_factors[0] * fmin, fmin, fmax, ffade_factors[1] * fmax) # chapter 1: download sxs = [] for site in sites: try: extra_args = { "iris": dict(matchtimeseries=True), }.get(site, {}) extra_args.update(station_query_conf) if site == "geonet": extra_args.update(starttime=tmin, endtime=tmax) else: extra_args.update( startbefore=tmax, endafter=tmin, includerestricted=(site in g_user_credentials or site in g_auth_tokens), ) logger.info("downloading channel information (%s)" % site) sx = fdsn.station(site=site, format="text", level="channel", **extra_args) except fdsn.EmptyResult: logger.error("No stations matching given criteria. (%s)" % site) sx = None if sx is not None: sxs.append(sx) if all(sx is None for sx in sxs) and not options.local_data: sys.exit(1) nsl_to_sites = defaultdict(list) nsl_to_station = {} for sx, site in zip(sxs, sites): site_stations = sx.get_pyrocko_stations() for s in site_stations: nsl = s.nsl() nsl_to_sites[nsl].append(site) if nsl not in nsl_to_station: nsl_to_station[nsl] = s # using first site with this station logger.info("number of stations found: %i" % len(nsl_to_station)) # station weeding nsls_selected = None if options.nstations_wanted: stations_all = [ nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys()) ] for s in stations_all: s.set_event_relative_data(event) stations_selected = weeding.weed_stations(stations_all, options.nstations_wanted)[0] nsls_selected = set(s.nsl() for s in stations_selected) logger.info("number of stations selected: %i" % len(nsls_selected)) have_data = set() if options.continue_: fns = glob.glob(fn_template_raw % starfill()) p = pile.make_pile(fns) else: fns = [] have_data_site = {} could_have_data_site = {} for site in sites: have_data_site[site] = set() could_have_data_site[site] = set() available_through = defaultdict(set) it = 0 nt = int(math.ceil((tmax - tmin) / tinc)) for it in range(nt): tmin_win = tmin + it * tinc tmax_win = min(tmin + (it + 1) * tinc, tmax) logger.info("time window %i/%i (%s - %s)" % (it + 1, nt, util.tts(tmin_win), util.tts(tmax_win))) have_data_this_window = set() if options.continue_: trs_avail = p.all(tmin=tmin_win, tmax=tmax_win, load_data=False) for tr in trs_avail: have_data_this_window.add(tr.nslc_id) for site, sx in zip(sites, sxs): if sx is None: continue selection = [] channels = sx.choose_channels( target_sample_rate=target_sample_rate, priority_band_code=priority_band_code, priority_units=priority_units, priority_instrument_code=priority_instrument_code, timespan=(tmin_win, tmax_win), ) for nslc in sorted(channels.keys()): if nsls_selected is not None and nslc[:3] not in nsls_selected: continue could_have_data_site[site].add(nslc) if nslc not in have_data_this_window: channel = channels[nslc] if event: lat_, lon_ = event.lat, event.lon else: lat_, lon_ = lat, lon dist = orthodrome.distance_accurate50m_numpy( lat_, lon_, channel.latitude.value, channel.longitude.value) if event: depth_ = event.depth time_ = event.time else: depth_ = None time_ = None tmin_, tmax_ = timewindow(time_, dist, depth_) tmin_this = tmin_ - tpad tmax_this = tmax_ + tpad tmin_req = max(tmin_win, tmin_this) tmax_req = min(tmax_win, tmax_this) if channel.sample_rate: deltat = 1.0 / channel.sample_rate.value else: deltat = 1.0 if tmin_req < tmax_req: # extend time window by some samples because otherwise # sometimes gaps are produced selection.append(nslc + (tmin_req - deltat * 10.0, tmax_req + deltat * 10.0)) if options.dry_run: for (net, sta, loc, cha, tmin, tmax) in selection: available_through[net, sta, loc, cha].add(site) else: neach = 100 i = 0 nbatches = ((len(selection) - 1) // neach) + 1 while i < len(selection): selection_now = selection[i:i + neach] f = tempfile.NamedTemporaryFile() try: sbatch = "" if nbatches > 1: sbatch = " (batch %i/%i)" % ( (i // neach) + 1, nbatches) logger.info("downloading data (%s)%s" % (site, sbatch)) data = fdsn.dataselect(site=site, selection=selection_now, **get_user_credentials(site)) while True: buf = data.read(1024) if not buf: break f.write(buf) f.flush() trs = io.load(f.name) for tr in trs: if tr.station == "7869": tr.station = "MOER" tr.network = "LE" tr.location = "" try: tr.chop(tmin_win, tmax_win) have_data.add(tr.nslc_id) have_data_site[site].add(tr.nslc_id) except trace.NoData: pass fns2 = io.save(trs, fn_template_raw) io.save(trs, fn_template_raw_folder) for fn in fns2: if fn in fns: logger.warn("overwriting file %s", fn) fns.extend(fns2) except fdsn.EmptyResult: pass except HTTPError: logger.warn("an error occurred while downloading data " "for channels \n %s" % "\n ".join(".".join(x[:4]) for x in selection_now)) f.close() i += neach if options.dry_run: nslcs = sorted(available_through.keys()) all_channels = defaultdict(set) all_stations = defaultdict(set) def plural_s(x): return "" if x == 1 else "s" for nslc in nslcs: sites = tuple(sorted(available_through[nslc])) logger.info("selected: %s.%s.%s.%s from site%s %s" % (nslc + (plural_s(len(sites)), "+".join(sites)))) all_channels[sites].add(nslc) all_stations[sites].add(nslc[:3]) nchannels_all = 0 nstations_all = 0 for sites in sorted(all_channels.keys(), key=lambda sites: (-len(sites), sites)): nchannels = len(all_channels[sites]) nstations = len(all_stations[sites]) nchannels_all += nchannels nstations_all += nstations logger.info("selected (%s): %i channel%s (%i station%s)" % ( "+".join(sites), nchannels, plural_s(nchannels), nstations, plural_s(nstations), )) logger.info("selected total: %i channel%s (%i station%s)" % ( nchannels_all, plural_s(nchannels_all), nstations_all, plural_s(nstations_all), )) logger.info("dry run done.") sys.exit(0) for nslc in have_data: # if we are in continue mode, we have to guess where the data came from if not any(nslc in have_data_site[site] for site in sites): for site in sites: if nslc in could_have_data_site[site]: have_data_site[site].add(nslc) sxs = {} for site in sites: selection = [] for nslc in sorted(have_data_site[site]): selection.append(nslc + (tmin - tpad, tmax + tpad)) if selection: logger.info("downloading response information (%s)" % site) sxs[site] = fdsn.station(site=site, level="response", selection=selection) sited = site if site == "http://192.168.11.220:8080": sited = "bgr_internal" elif site == "http://ws.gpi.kit.edu": sited = "kit" if site == "http://188.246.25.142:8080": sited = "moer" sxs[site].dump_xml(filename=op.join(output_dir, "stations.%s.xml" % sited)) # chapter 1.5: inject local data if options.local_data: have_data_site["local"] = set() plocal = pile.make_pile(options.local_data, fileformat="detect") for traces in plocal.chopper_grouped(gather=lambda tr: tr.nslc_id, tmin=tmin, tmax=tmax, tinc=tinc): for tr in traces: if tr.station == "7869": tr.station = "MOER" tr.network = "LE" tr.location = "" if tr.nslc_id not in have_data: fns.extend(io.save(traces, fn_template_raw)) have_data_site["local"].add(tr.nslc_id) have_data.add(tr.nslc_id) sites.append("local") if options.local_responses_pz: sxs["local"] = epz.make_stationxml( epz.iload(options.local_responses_pz)) if options.local_responses_resp: local_stations = [] for fn in options.local_stations: local_stations.extend(model.load_stations(fn)) sxs["local"] = resp.make_stationxml( local_stations, resp.iload(options.local_responses_resp)) if options.local_responses_stationxml: sxs["local"] = stationxml.load_xml( filename=options.local_responses_stationxml) # chapter 1.6: dump raw data stations file nsl_to_station = {} for site in sites: if site in sxs: stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax)) for s in stations: nsl = s.nsl() if nsl not in nsl_to_station: nsl_to_station[nsl] = s stations = [nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys())] util.ensuredirs(fn_stations_raw) model.dump_stations(stations, fn_stations_raw) dump_commandline(sys.argv, fn_commandline) # chapter 2: restitution if not fns: logger.error("no data available") sys.exit(1) p = pile.make_pile(fns, show_progress=False) p.get_deltatmin() otinc = None if otinc is None: otinc = nice_seconds_floor(p.get_deltatmin() * 500000.0) otinc = 3600.0 otmin = math.floor(p.tmin / otinc) * otinc otmax = math.ceil(p.tmax / otinc) * otinc otpad = tpad * 2 fns = [] rest_traces_b = [] win_b = None for traces_a in p.chopper_grouped(gather=lambda tr: tr.nslc_id, tmin=otmin, tmax=otmax, tinc=otinc, tpad=otpad): rest_traces_a = [] win_a = None for tr in traces_a: if tr.station == "7869": tr.station = "MOER" tr.network = "LE" tr.location = "" win_a = tr.wmin, tr.wmax if win_b and win_b[0] >= win_a[0]: fns.extend(cut_n_dump(rest_traces_b, win_b, fn_template_rest)) rest_traces_b = [] win_b = None response = None failure = [] for site in sites: try: if site not in sxs: continue response = sxs[site].get_pyrocko_response( tr.nslc_id, timespan=(tr.tmin, tr.tmax), fake_input_units=output_units, ) break except stationxml.NoResponseInformation: failure.append("%s: no response information" % site) except stationxml.MultipleResponseInformation: failure.append("%s: multiple response information" % site) if response is None: failure = ", ".join(failure) else: failure = "" try: rest_tr = tr.transfer(tfade, ftap, response, invert=True) rest_traces_a.append(rest_tr) except (trace.TraceTooShort, trace.NoData): failure = "trace too short" if failure: logger.warn("failed to restitute trace %s.%s.%s.%s (%s)" % (tr.nslc_id + (failure, ))) if rest_traces_b: rest_traces = trace.degapper(rest_traces_b + rest_traces_a, deoverlap="crossfade_cos") fns.extend(cut_n_dump(rest_traces, win_b, fn_template_rest)) rest_traces_a = [] if win_a: for tr in rest_traces: if tr.station == "7869": tr.station = "MOER" tr.network = "LE" tr.location = "" try: rest_traces_a.append( tr.chop(win_a[0], win_a[1] + otpad, inplace=False)) except trace.NoData: pass rest_traces_b = rest_traces_a win_b = win_a fns.extend(cut_n_dump(rest_traces_b, win_b, fn_template_rest)) # chapter 3: rotated restituted traces for inspection if not event: sys.exit(0) fn_template1 = "DISPL.%(network)s.%(station)s.%(location)s.%(channel)s" fn_waveforms = op.join(output_dir, "prepared", fn_template1) fn_stations = op.join(output_dir, "stations.prepared.txt") fn_event = op.join(event_dir, "event.txt") nsl_to_station = {} for site in sites: if site in sxs: stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax)) for s in stations: nsl = s.nsl() if nsl not in nsl_to_station: nsl_to_station[nsl] = s p = pile.make_pile(fns, show_progress=False) deltat = None if sample_rate is not None: deltat = 1.0 / sample_rate used_stations = [] for nsl, s in nsl_to_station.items(): s.set_event_relative_data(event) traces = p.all(trace_selector=lambda tr: tr.nslc_id[:3] == nsl) keep = [] for tr in traces: if deltat is not None: try: tr.downsample_to(deltat, snap=True, allow_upsample_max=5) keep.append(tr) except util.UnavailableDecimation as e: logger.warn("Cannot downsample %s.%s.%s.%s: %s" % (tr.nslc_id + (e, ))) continue if options.out_components == "rtu": pios = s.guess_projections_to_rtu(out_channels=("R", "T", "Z")) elif options.out_components == "enu": pios = s.guess_projections_to_enu(out_channels=("E", "N", "Z")) else: assert False for (proj, in_channels, out_channels) in pios: proc = trace.project(traces, proj, in_channels, out_channels) for tr in proc: for ch in out_channels: if ch.name == tr.channel: s.add_channel(ch) if proc: io.save(proc, fn_waveforms) used_stations.append(s) stations = list(used_stations) util.ensuredirs(fn_stations) model.dump_stations(stations, fn_stations) model.dump_events([event], fn_event) logger.info("prepared waveforms from %i stations" % len(stations))
def search(config, override_tmin=None, override_tmax=None, show_detections=False, show_movie=False, show_window_traces=False, force=False, stop_after_first=False, nparallel=6, save_imax=False, bark=False): fp = config.expand_path run_path = fp(config.run_path) # if op.exists(run_path): # if force: # shutil.rmtree(run_path) # else: # raise common.LassieError( # 'run directory already exists: %s' % # run_path) util.ensuredir(run_path) write_config(config, op.join(run_path, 'config.yaml')) ifm_path_template = config.get_ifm_path_template() detections_path = config.get_detections_path() events_path = config.get_events_path() figures_path_template = config.get_figures_path_template() config.setup_image_function_contributions() ifcs = config.image_function_contributions grid = config.get_grid() receivers = config.get_receivers() norm_map = gridmod.geometrical_normalization(grid, receivers) data_paths = fp(config.data_paths) for data_path in fp(data_paths): if not op.exists(data_path): pass p = pile.make_pile(data_paths, fileformat='detect') if p.is_empty(): raise common.LassieError('no usable waveforms found') for ifc in ifcs: ifc.prescan(p) shift_tables = [] tshift_minmaxs = [] for ifc in ifcs: shift_tables.append(ifc.get_table(grid, receivers)) tshift_minmaxs.append(num.nanmin(shift_tables[-1])) tshift_minmaxs.append(num.nanmax(shift_tables[-1])) fsmooth_min = min(ifc.get_fsmooth() for ifc in ifcs) tshift_min = min(tshift_minmaxs) tshift_max = max(tshift_minmaxs) if config.detector_tpeaksearch is not None: tpeaksearch = config.detector_tpeaksearch else: tpeaksearch = (tshift_max - tshift_min) + 1.0 / fsmooth_min tpad = max(ifc.get_tpad() for ifc in ifcs) + \ (tshift_max - tshift_min) + tpeaksearch tinc = (tshift_max - tshift_min) * 10. + 3.0 * tpad tavail = p.tmax - p.tmin tinc = min(tinc, tavail - 2.0 * tpad) if tinc <= 0: raise common.LassieError('available waveforms too short \n' 'required: %g s\n' 'available: %g s\n' % (2. * tpad, tavail)) blacklist = set(tuple(s.split('.')) for s in config.blacklist) whitelist = set(tuple(s.split('.')) for s in config.whitelist) distances = grid.distances(receivers) distances_to_grid = num.min(distances, axis=0) distance_min = num.min(distances) distance_max = num.max(distances) station_index = dict( (rec.codes, i) for (i, rec) in enumerate(receivers) if rec.codes not in blacklist and ( not whitelist or rec.codes in whitelist) and ( config.distance_max is None or distances_to_grid[i] <= config.distance_max)) check_data_consistency(p, config) deltat_cf = max(p.deltats.keys()) assert deltat_cf > 0.0 while True: if not all(ifc.deltat_cf_is_available(deltat_cf * 2) for ifc in ifcs): break deltat_cf *= 2 logger.info('CF lassie sampling interval (rate): %g s (%g Hz)' % (deltat_cf, 1.0 / deltat_cf)) ngridpoints = grid.size() logger.info('number of grid points: %i' % ngridpoints) logger.info('minimum source-receiver distance: %g m' % distance_min) logger.info('maximum source-receiver distance: %g m' % distance_max) logger.info('minimum travel-time: %g s' % tshift_min) logger.info('maximum travel-time: %g s' % tshift_max) idetection = 0 tmin = override_tmin or config.tmin or p.tmin + tpad tmax = override_tmax or config.tmax or p.tmax - tpad events = config.get_events() twindows = [] if events is not None: for ev in events: if tmin <= ev.time <= tmax: twindows.append( (ev.time + tshift_min - (tshift_max - tshift_min) * config.event_time_window_factor, ev.time + tshift_min + (tshift_max - tshift_min) * config.event_time_window_factor)) else: twindows.append((tmin, tmax)) for iwindow_group, (tmin_win, tmax_win) in enumerate(twindows): nwin = int(math.ceil((tmax_win - tmin_win) / tinc)) logger.info('start processing time window group %i/%i: %s - %s' % (iwindow_group + 1, len(twindows), util.time_to_str(tmin_win), util.time_to_str(tmax_win))) logger.info('number of time windows: %i' % nwin) logger.info('time window length: %g s' % (tinc + 2.0 * tpad)) logger.info('time window payload: %g s' % tinc) logger.info('time window padding: 2 x %g s' % tpad) logger.info('time window overlap: %g%%' % (100.0 * 2.0 * tpad / (tinc + 2.0 * tpad))) iwin = -1 for trs in p.chopper( tmin=tmin_win, tmax=tmax_win, tinc=tinc, tpad=tpad, want_incomplete=config.fill_incomplete_with_zeros, trace_selector=lambda tr: tr.nslc_id[:3] in station_index): iwin += 1 trs_ok = [] for tr in trs: if tr.ydata.size == 0: logger.warn('skipping empty trace: %s.%s.%s.%s' % tr.nslc_id) continue if not num.all(num.isfinite(tr.ydata)): logger.warn('skipping trace because of invalid values: ' '%s.%s.%s.%s' % tr.nslc_id) continue trs_ok.append(tr) trs = trs_ok if not trs: continue logger.info('processing time window %i/%i: %s - %s' % (iwin + 1, nwin, util.time_to_str( trs[0].wmin), util.time_to_str(trs[0].wmax))) wmin = trs[0].wmin wmax = trs[0].wmax if config.fill_incomplete_with_zeros: trs = zero_fill(trs, wmin - tpad, wmax + tpad) t0 = math.floor(wmin / deltat_cf) * deltat_cf iwmin = int(round((wmin - tpeaksearch - t0) / deltat_cf)) iwmax = int(round((wmax + tpeaksearch - t0) / deltat_cf)) lengthout = iwmax - iwmin + 1 pdata = [] trs_debug = [] parstack_params = [] for iifc, ifc in enumerate(ifcs): dataset = ifc.preprocess(trs, wmin - tpeaksearch, wmax + tpeaksearch, tshift_max - tshift_min, deltat_cf) if not dataset: continue nstations_selected = len(dataset) nsls_selected, trs_selected = zip(*dataset) for tr in trs_selected: tr.meta = {'tabu': True} trs_debug.extend(trs + list(trs_selected)) istations_selected = num.array( [station_index[nsl] for nsl in nsls_selected], dtype=num.int) arrays = [tr.ydata.astype(num.float) for tr in trs_selected] offsets = num.array([ int(round((tr.tmin - t0) / deltat_cf)) for tr in trs_selected ], dtype=num.int32) w = ifc.get_weights(nsls_selected) weights = num.ones((ngridpoints, nstations_selected)) weights *= w[num.newaxis, :] weights *= ifc.weight shift_table = shift_tables[iifc] ok = num.isfinite(shift_table[:, istations_selected]) bad = num.logical_not(ok) shifts = -num.round(shift_table[:, istations_selected] / deltat_cf).astype(num.int32) weights[bad] = 0.0 shifts[bad] = num.max(shifts[ok]) pdata.append((list(trs_selected), shift_table, ifc)) parstack_params.append((arrays, offsets, shifts, weights)) if config.stacking_blocksize is not None: ipstep = config.stacking_blocksize frames = None else: ipstep = lengthout frames = num.zeros((ngridpoints, lengthout)) twall_start = time.time() frame_maxs = num.zeros(lengthout) frame_argmaxs = num.zeros(lengthout, dtype=num.int) ipmin = iwmin while ipmin < iwmin + lengthout: ipsize = min(ipstep, iwmin + lengthout - ipmin) if ipstep == lengthout: frames_p = frames else: frames_p = num.zeros((ngridpoints, ipsize)) for (arrays, offsets, shifts, weights) in parstack_params: frames_p, _ = parstack(arrays, offsets, shifts, weights, 0, offsetout=ipmin, lengthout=ipsize, result=frames_p, nparallel=nparallel, impl='openmp') if config.sharpness_normalization: frame_p_maxs = frames_p.max(axis=0) frame_p_means = num.abs(frames_p).mean(axis=0) frames_p *= (frame_p_maxs / frame_p_means)[num.newaxis, :] frames_p *= norm_map[:, num.newaxis] if config.ifc_count_normalization: frames_p *= 1.0 / len(ifcs) frame_maxs[ipmin-iwmin:ipmin-iwmin+ipsize] = \ frames_p.max(axis=0) frame_argmaxs[ipmin-iwmin:ipmin-iwmin+ipsize] = \ pargmax(frames_p) ipmin += ipstep del frames_p twall_end = time.time() logger.info('wallclock time for stacking: %g s' % (twall_end - twall_start)) tmin_frames = t0 + iwmin * deltat_cf tr_stackmax = trace.Trace('', 'SMAX', '', '', tmin=tmin_frames, deltat=deltat_cf, ydata=frame_maxs) tr_stackmax.meta = {'tabu': True} trs_debug.append(tr_stackmax) if show_window_traces: trace.snuffle(trs_debug) ydata_window = tr_stackmax.chop(wmin, wmax, inplace=False).get_ydata() logger.info('CF stats: min %g, max %g, median %g' % (num.min(ydata_window), num.max(ydata_window), num.median(ydata_window))) if nstations_selected != 17: logger.info( 'Warning, station outage detected! Nr of station operable: %s' % nstations_selected) detector_threshold_seiger = config.detector_threshold - ( (17 - nstations_selected) * 4 ) # 17 is maximum number of seiger stations, 4 is a mean baseline for noise if nstations_selected != 17: logger.info( 'Warning, station outage detected! Nr of station operable: %s, threshold now: %s' % (nstations_selected, detector_threshold_seiger)) tpeaks, apeaks = list( zip(*[(tpeak, apeak) for (tpeak, apeak) in zip( *tr_stackmax.peaks(detector_threshold_seiger, tpeaksearch)) if wmin <= tpeak and tpeak < wmax])) or ([], []) tr_stackmax_indx = tr_stackmax.copy(data=False) tr_stackmax_indx.set_ydata(frame_argmaxs.astype(num.int32)) tr_stackmax_indx.set_location('i') for (tpeak, apeak) in zip(tpeaks, apeaks): iframe = int(round((tpeak - tmin_frames) / deltat_cf)) imax = frame_argmaxs[iframe] latpeak, lonpeak, xpeak, ypeak, zpeak = \ grid.index_to_location(imax) idetection += 1 detection = Detection(id='%06i' % idetection, time=tpeak, location=geo.Point(lat=float(latpeak), lon=float(lonpeak), x=float(xpeak), y=float(ypeak), z=float(zpeak)), ifm=float(apeak)) if bark: common.bark() logger.info('detection found: %s' % str(detection)) f = open(detections_path, 'a') f.write( '%06i %s %g %g %g %g %g %g\n' % (idetection, util.time_to_str(tpeak, format='%Y-%m-%d %H:%M:%S.6FRAC'), apeak, latpeak, lonpeak, xpeak, ypeak, zpeak)) f.close() ev = detection.get_event() f = open(events_path, 'a') model.dump_events([ev], stream=f) f.close() if show_detections or config.save_figures: fmin = min(ifc.fmin for ifc in ifcs) fmax = min(ifc.fmax for ifc in ifcs) fn = figures_path_template % { 'id': util.tts(t0).replace(" ", "T"), 'format': 'png' } util.ensuredirs(fn) if frames is not None: frames_p = frames tmin_frames_p = tmin_frames iframe_p = iframe else: iframe_min = max( 0, int(round(iframe - tpeaksearch / deltat_cf))) iframe_max = min( lengthout - 1, int(round(iframe + tpeaksearch / deltat_cf))) ipsize = iframe_max - iframe_min + 1 frames_p = num.zeros((ngridpoints, ipsize)) tmin_frames_p = tmin_frames + iframe_min * deltat_cf iframe_p = iframe - iframe_min for (arrays, offsets, shifts, weights) \ in parstack_params: frames_p, _ = parstack(arrays, offsets, shifts, weights, 0, offsetout=iwmin + iframe_min, lengthout=ipsize, result=frames_p, nparallel=nparallel, impl='openmp') if config.sharpness_normalization: frame_p_maxs = frames_p.max(axis=0) frame_p_means = num.abs(frames_p).mean(axis=0) frames_p *= (frame_p_maxs / frame_p_means)[num.newaxis, :] frames_p *= norm_map[:, num.newaxis] if config.ifc_count_normalization: frames_p *= 1.0 / len(ifcs) try: plot.plot_detection(grid, receivers, frames_p, tmin_frames_p, deltat_cf, imax, iframe_p, xpeak, ypeak, zpeak, tr_stackmax, tpeaks, apeaks, detector_threshold_seiger, wmin, wmax, pdata, trs, fmin, fmax, idetection, tpeaksearch, movie=show_movie, show=show_detections, save_filename=fn, event=ev) except: pass del frames_p if stop_after_first: return tr_stackmax.chop(wmin, wmax) tr_stackmax_indx.chop(wmin, wmax) if save_imax is True: io.save([tr_stackmax, tr_stackmax_indx], ifm_path_template) del frames logger.info('end processing time window group: %s - %s' % (util.time_to_str(tmin_win), util.time_to_str(tmax_win))) cat = Catalog() files = glob("%s/../figures/*qml*" % run_path) files.sort(key=os.path.getmtime) for file in files: cat_read = read_events(file) for event in cat_read: cat.append(event) cat.write("%s/../all_events_stacking.qml" % run_path, format="QUAKEML")
def call(self): '''Main work routine of the snuffling.''' self.cleanup() viewer = self.get_viewer() figs = {} fig_width_inch = viewer.width() npixel_hori = float(fig_width_inch * 50) xminutes = int(self.xminutes) xseconds = xminutes * 60 self.nhours = 24 nrows = int(self.nhours) * 60 / xminutes ynormalizations = {} lines_data = {} for traces in self.chopper_selected_traces(tinc=60 * 60, fallback=True): for tr in traces: t0 = util.day_start(tr.tmin) key = (tr.nslc_id, t0) if key not in figs: fig = self.pylab(get='figure') ax = fig.add_subplot(111) figs[key] = (fig, ax) ynormalizations[key] = 0 lines_data[key] = [] tr = tr.copy(data=True) ndecimate = int((xseconds / tr.deltat) / npixel_hori) tr.downsample(ndecimate) if self.prescale == 'max': ynormalizations[key] = max(num.max(tr.ydata), ynormalizations[key]) else: ynormalizations[key] = max(num.std(tr.ydata), ynormalizations[key]) if viewer.highpass: tr.highpass(4, viewer.highpass) if viewer.lowpass and 1. / tr.deltat > 2. * viewer.lowpass: tr.lowpass(4, viewer.lowpass) t = tr.get_xdata() - t0 y = num.asarray(tr.get_ydata(), dtype=num.float) nskip = t / 3600. x = t % xseconds xdiff = num.diff(x) itmp = num.where( num.logical_or(xdiff < 0, num.abs(xdiff - tr.deltat) > 1E-4))[0] indices = num.zeros(len(itmp) + 2, dtype=num.int) indices[1:-1] = itmp indices[-1] = len(y) - 1 for i in range(len(indices) - 1): istart = indices[i] + 1 istop = indices[i + 1] lines_data[key].append( (t0, x[istart:istop], y[istart:istop], nskip[istart:istop])) ynorm = None if self.scale_global: ynorm = max(ynormalizations.values()) for key, lines in lines_data.items(): if not self.scale_global: ynorm = float(ynormalizations.get(key, 1.)) for (t0, x, y, shifts) in lines: fig, ax = figs[key] ax.plot(x / 60., y / (ynorm / self.yscale) + shifts, color='black') ax.set_title(util.tts(t0, format='%Y-%m-%d')) yticks = range(0, self.nhours + 2, 2) xticks = range(0, xminutes + 1, 1) for key, (fig, ax) in figs.items(): ax.set_xlim(0, xminutes) ax.set_ylabel('Hour') ax.set_xlabel('Minute') ax.yaxis.set_ticks(yticks) ax.xaxis.set_ticks(xticks) ax.set_ylim(-0.1, 24.1) fig.canvas.draw()
def call(self): '''Main work routine of the snuffling.''' self.cleanup() viewer = self.get_viewer() figs = {} fig_width_inch = viewer.width() npixel_hori = float(fig_width_inch*50) xminutes = int(self.xminutes) xseconds = xminutes * 60 self.nhours = 24 nrows = int(self.nhours) * 60 / xminutes ynormalizations = {} lines_data = {} for traces in self.chopper_selected_traces(tinc=60*60, fallback=True): for tr in traces: t0 = util.day_start(tr.tmin) key = (tr.nslc_id, t0) if key not in figs: fig = self.pylab(get='figure') ax = fig.add_subplot(111) figs[key] = (fig, ax) ynormalizations[key] = 0 lines_data[key] = [] tr = tr.copy(data=True) ndecimate = int((xseconds/tr.deltat) / npixel_hori) tr.downsample(ndecimate) if self.prescale == 'max': ynormalizations[key] = max(num.max(tr.ydata), ynormalizations[key]) else: ynormalizations[key] = max(num.std(tr.ydata), ynormalizations[key]) if viewer.highpass: tr.highpass(4, viewer.highpass) if viewer.lowpass and 1./tr.deltat>2.*viewer.lowpass: tr.lowpass(4, viewer.lowpass) t = tr.get_xdata() - t0 y = num.asarray(tr.get_ydata(), dtype=num.float) nskip = t / 3600. x = t % xseconds xdiff = num.diff(x) itmp = num.where(num.logical_or(xdiff < 0, num.abs(xdiff-tr.deltat) > 1E-4))[0] indices = num.zeros(len(itmp)+2, dtype=num.int) indices[1:-1] = itmp indices[-1] = len(y)-1 for i in range(len(indices)-1): istart = indices[i] + 1 istop = indices[i+1] lines_data[key].append( (t0, x[istart: istop], y[istart: istop], nskip[istart: istop]) ) ynorm = None if self.scale_global: ynorm = max(ynormalizations.values()) for key, lines in lines_data.items(): if not self.scale_global: ynorm = float(ynormalizations.get(key, 1.)) for (t0, x, y, shifts) in lines: fig, ax = figs[key] ax.plot( x/60., y/(ynorm/self.yscale) + shifts, color='black') ax.set_title(util.tts(t0, format='%Y-%m-%d')) yticks = range(0, self.nhours+2, 2) xticks = range(0, xminutes+1, 1) for key, (fig, ax) in figs.items(): ax.set_xlim(0, xminutes) ax.set_ylabel('Hour') ax.set_xlabel('Minute') ax.yaxis.set_ticks(yticks) ax.xaxis.set_ticks(xticks) ax.set_ylim(-0.1, 24.1) fig.canvas.draw()
def call(self): self.cleanup() viewer = self.get_viewer() vtmin, vtmax = viewer.get_time_range() pile = self.get_pile() traces = [ tr for tr in pile.chopper( tmin=vtmin, tmax=vtmax, trace_selector=viewer.trace_selector)] event, stations = self.get_active_event_and_stations() traces = [tr for trs in traces for tr in trs] stations_by_nsl = {s.nsl(): s for s in self.get_stations()} stations = [ stations_by_nsl.get(station_key(tr), None) for tr in traces] distances = [ ortho.distance_accurate50m(event, s) for s in stations if s is not None] distances = [d/1000. for d in distances] maxd = max(distances) mind = min(distances) distances = dict(zip([s.nsl() for s in stations], distances)) matching_traces = [x for x in traces if util.match_nslc( self.get_station_patterns(stations), x.nslc_id)] if self.add_markers: markers = self.get_markers() markers = [ m for m in markers if m.tmax <= vtmax and m.tmin >= vtmin and m.selected] markers = dict(zip([tuple(m.nslc_ids) for m in markers], markers)) if self.fig is None or self.fframe.closed or not self._live_update: self.fframe = self.pylab(get='figure_frame') self.fig = self.fframe.gcf() if self._live_update: self.fig.clf() ymin = mind-0.06*(maxd-mind) ymax = maxd+0.06*(maxd-mind) ax = self.fig.add_subplot(111) xmin = 9E9 xmax = -xmin texts = [] manual_scale = 0.1 * (maxd-mind)*self.yscale if self.ampl_scaler == 'total min/max': max_trace = max( matching_traces, key=lambda x: max(abs(x.get_ydata()))) tr_maxy = max(abs(max_trace.get_ydata())) ampl_scale = float(tr_maxy) for tr in matching_traces: if viewer.highpass: tr.highpass(4, viewer.highpass) if viewer.lowpass: tr.lowpass(4, viewer.lowpass) if tr.nslc_id[:3] not in distances.keys(): continue if self.t_red: red = distances[tr.nslc_id[:3]]/self.t_red else: red = 0. y_pos = distances[tr.nslc_id[:3]] xdata = tr.get_xdata()-red-event.time xmin = min(xmin, min(xdata)) xmax = max(xmax, max(xdata)) tr_ydata = tr.get_ydata() if self.ampl_scaler == 'trace min/max': ampl_scale = float(max(abs(tr_ydata))) elif self.ampl_scaler == 'standard deviation': ampl_scale = float(num.std(tr_ydata)) ydata = (tr_ydata/ampl_scale * manual_scale) + y_pos ax.plot(xdata, ydata, c='black', linewidth=0.2) if self.fill_between: ax.fill_between( xdata, y_pos, ydata, where=ydata > y_pos, color='black', alpha=0.5) texts.append( ax.text( xmax, y_pos, '%s.%s.%s.%s' % tr.nslc_id, horizontalalignment='right', fontsize=6.)) if self.add_markers: for ids, m in markers.items(): if m.match_nslc(tr.nslc_id) or ids == (): c = m.select_color(m.color_b) c = [ci/255. for ci in c] t = m.tmin x = [t-red-event.time, t-red-event.time] y = [y_pos-(maxd-mind)*0.025, y_pos+(maxd-mind)*0.025] ax.plot(x, y, linewidth=1, color=c) label = m.get_label() if not label: label = '' ax.text(x[1]-x[1]*0.005, y[1], label, color=c, fontsize=6, verticalalignment='top', horizontalalignment='right') for txt in texts: txt.set_x(xmax) vred_str = '= '+str(round(self.t_red, 2)) + 'km/s' if self.t_red \ else 'off' ax.text(0.5, 0.01, 'time window: %s - %s | Reduction velocity %s' % (util.tts(vtmin), util.tts(vtmax), vred_str), verticalalignment='bottom', horizontalalignment='center', transform=self.fig.transFigure) ax.set_ylim([ymin, ymax]) ax.set_xlim([xmin, xmax]) ax.set_ylabel('Distance [km]') ax.set_xlabel('(red.) Time [s]') self.fig.canvas.draw()
def tts(t): if t is None: return '?' else: return util.tts(t, format='%Y-%m-%d')
def tts(t): if t is None: return '?' else: return util.tts(t, format='%Y-%m-%d')
def load_data_archieve(validation_data, gf_freq, duration=4, wanted_start=None, wanted_end=None): folder = validation_data pathlist = Path(folder).glob('day*') waveforms = [] stations = [] if wanted_start is not None: try: wanted_start = util.stt(wanted_start) wanted_end = util.stt(wanted_end) except: pass from pyrocko import pile paths = [] safecon = 0 for path in sorted(pathlist): path = str(path) d2 = float(str(path)[-12:]) d1 = float(str(path)[-25:-13]) if wanted_start is not None: do_safety_files = False if (d1 >= wanted_start and d2 <= wanted_end) or (d2 - wanted_end < 86400. and d2 - wanted_end > 0. and safecon == 0): st = model.load_stations(path + "/waveforms/stations.raw.txt") d_diff = d2 - d1 tr_packages = int(d_diff / duration) #for tr in traces: # tr.downsample_to(gf_freq) # if safecon == 0: pathlist_waveform_files = Path(path + "/waveforms/rest/").glob( '*.mseed') wanted_start_str = util.tts(wanted_start)[14:16] diff_to_full = float(wanted_start_str) max_diff = 55. min_diff = 5. if diff_to_full > max_diff or diff_to_full < min_diff: do_safety_files = True for path_wave in sorted(pathlist_waveform_files): path_wave = str(path_wave) p1 = path_wave[-25:-15] p2 = path_wave[-14:-12] p3 = path_wave[-11:-9] p4 = path_wave[-8:-6] try: file_time = util.stt(p1 + " " + p2 + ":" + p3 + ":" + p4) tdiff = file_time - wanted_start if do_safety_files is True: if float(p2) - float( util.tts(wanted_start)[11:13]) == 0: paths.append(str(path_wave)) if diff_to_full > max_diff and float(p2) - float( util.tts(wanted_start)[11:13]) == 1.: paths.append(str(path_wave)) if diff_to_full < min_diff and float(p2) - float( util.tts(wanted_start)[11:13]) == -1.: paths.append(str(path_wave)) else: if float(p2) - float( util.tts(wanted_start)[11:13]) == 0: paths.append(str(path_wave)) except: pass safecon += 1 p = pile.make_pile(paths) for traces in p.chopper(tmin=wanted_start, tinc=duration): if traces: if traces[0].tmax < wanted_end: # for i in range(0, tr_packages): # traces = traces #for tr in traces: # tr.chop(tr.tmin+i*duration, # tr.tmin+i*duration+duration) #tr.downsample_to(gf_freq) waveforms.append(traces) stations.append(st) # else: # traces = io.load(path+"/waveforms/rest/traces.mseed") # st = model.load_stations(path+"/waveforms/stations.raw.txt") # for tr in traces: # tr.downsample_to(gf_freq) # waveforms.append(traces) # stations.append(st) return waveforms, stations