def test_conversions(self): from pyrocko import model from pyrocko.fdsn import station, resp, enhanced_sacpz t = util.str_to_time('2014-01-01 00:00:00') codes = 'GE', 'EIL', '', 'BHZ' resp_fpath = common.test_data_file('test1.resp') stations = [ model.Station(*codes[:3], lat=29.669901, lon=34.951199, elevation=210.0, depth=0.0) ] sx_resp = resp.make_stationxml(stations, resp.iload_filename(resp_fpath)) pr_sx_resp = sx_resp.get_pyrocko_response(codes, time=t, fake_input_units='M/S') pr_evresp = trace.Evalresp(resp_fpath, nslc_id=codes, target='vel', time=t) sacpz_fpath = common.test_data_file('test1.sacpz') sx_sacpz = enhanced_sacpz.make_stationxml( enhanced_sacpz.iload_filename(sacpz_fpath)) pr_sx_sacpz = sx_sacpz.get_pyrocko_response(codes, time=t, fake_input_units='M/S') pr_sacpz = trace.PoleZeroResponse(*pz.read_sac_zpk(sacpz_fpath)) try: pr_sacpz.zeros.remove(0.0j) except ValueError: pr_sacpz.poles.append(0.0j) sxml_geofon_fpath = common.test_data_file('test1.stationxml') sx_geofon = station.load_xml(filename=sxml_geofon_fpath) pr_sx_geofon = sx_geofon.get_pyrocko_response(codes, time=t, fake_input_units='M/S') sxml_iris_fpath = common.test_data_file('test2.stationxml') sx_iris = station.load_xml(filename=sxml_iris_fpath) pr_sx_iris = sx_iris.get_pyrocko_response(codes, time=t, fake_input_units='M/S') freqs = num.logspace(num.log10(0.001), num.log10(1.0), num=1000) tf_ref = pr_evresp.evaluate(freqs) for pr in [ pr_sx_resp, pr_sx_sacpz, pr_sacpz, pr_sx_geofon, pr_sx_iris ]: tf = pr.evaluate(freqs) # plot_tfs(freqs, [tf_ref, tf]) assert cnumeqrel(tf_ref, tf, 0.01)
def test_conversions(self): from pyrocko import model from pyrocko.fdsn import station, resp, enhanced_sacpz t = util.str_to_time('2014-01-01 00:00:00') codes = 'GE', 'EIL', '', 'BHZ' resp_fpath = common.test_data_file('test1.resp') stations = [model.Station( *codes[:3], lat=29.669901, lon=34.951199, elevation=210.0, depth=0.0)] sx_resp = resp.make_stationxml( stations, resp.iload_filename(resp_fpath)) pr_sx_resp = sx_resp.get_pyrocko_response( codes, time=t, fake_input_units='M/S') pr_evresp = trace.Evalresp( resp_fpath, nslc_id=codes, target='vel', time=t) sacpz_fpath = common.test_data_file('test1.sacpz') sx_sacpz = enhanced_sacpz.make_stationxml( enhanced_sacpz.iload_filename(sacpz_fpath)) pr_sx_sacpz = sx_sacpz.get_pyrocko_response( codes, time=t, fake_input_units='M/S') pr_sacpz = trace.PoleZeroResponse(*pz.read_sac_zpk(sacpz_fpath)) try: pr_sacpz.zeros.remove(0.0j) except ValueError: pr_sacpz.poles.append(0.0j) sxml_geofon_fpath = common.test_data_file('test1.stationxml') sx_geofon = station.load_xml(filename=sxml_geofon_fpath) pr_sx_geofon = sx_geofon.get_pyrocko_response( codes, time=t, fake_input_units='M/S') sxml_iris_fpath = common.test_data_file('test2.stationxml') sx_iris = station.load_xml(filename=sxml_iris_fpath) pr_sx_iris = sx_iris.get_pyrocko_response( codes, time=t, fake_input_units='M/S') freqs = num.logspace(num.log10(0.001), num.log10(1.0), num=1000) tf_ref = pr_evresp.evaluate(freqs) for pr in [pr_sx_resp, pr_sx_sacpz, pr_sacpz, pr_sx_geofon, pr_sx_iris]: tf = pr.evaluate(freqs) # plot_tfs(freqs, [tf_ref, tf]) assert cnumeqrel(tf_ref, tf, 0.01)
def station(url=g_url, site=g_default_site, majorversion=1, parsed=True, **kwargs): url = fillurl(url, site, 'station', majorversion) params = fix_params(kwargs) if parsed: from pyrocko.fdsn import station format = params.get('format', 'xml') if format == 'text': if params.get('level', 'station') == 'channel': return station.load_channel_table( stream=_request(url, **params)) else: raise InvalidRequest('if format="text" shall be parsed, ' 'level="channel" is required') elif format == 'xml': assert params.get('format', 'xml') == 'xml' return station.load_xml(stream=_request(url, **params)) else: raise InvalidRequest('format must be "xml" or "text"') else: return _request(url, **params)
def test_read_samples(self): ok = False for fn in ['geeil.iris.xml', 'geeil.geofon.xml']: fpath = common.test_data_file(fn) x = fdsn_station.load_xml(filename=fpath) for network in x.network_list: assert network.code == 'GE' for station in network.station_list: assert station.code == 'EIL' for channel in station.channel_list: assert channel.code[:2] == 'BH' for stage in channel.response.stage_list: ok = True assert ok pstations = x.get_pyrocko_stations() assert len(pstations) in (3, 4) for s in x.get_pyrocko_stations(): assert len(s.get_channels()) == 3 assert len( x.get_pyrocko_stations(time=stt('2010-01-15 10:00:00'))) == 1 new = fdsn_station.FDSNStationXML.from_pyrocko_stations(pstations) assert len(new.get_pyrocko_stations()) in (3, 4) for s in new.get_pyrocko_stations(): assert len(s.get_channels()) == 3
def add_stations(self, stations=None, pyrocko_stations_filename=None, stationxml_filenames=None): if stations is not None: for station in stations: self.stations[station.nsl()] = station if pyrocko_stations_filename is not None: logger.debug('loading stations from file %s' % pyrocko_stations_filename) for station in model.load_stations(pyrocko_stations_filename): self.stations[station.nsl()] = station if stationxml_filenames is not None and len(stationxml_filenames) > 0: for stationxml_filename in stationxml_filenames: logger.debug('loading stations from StationXML file %s' % stationxml_filename) sx = fs.load_xml(filename=stationxml_filename) for station in sx.get_pyrocko_stations(): channels = station.get_channels() if len(channels) == 1 and channels[0].name.endswith('Z'): logger.warning( 'Station %s has vertical component' ' information only, adding mocked channels.' % station.nsl_string()) station.add_channel(model.Channel('N')) station.add_channel(model.Channel('E')) self.stations[station.nsl()] = station
def add_responses(self, sacpz_dirname=None, stationxml_filenames=None): if sacpz_dirname: logger.debug('Loading SAC PZ responses from %s' % sacpz_dirname) for x in enhanced_sacpz.iload_dirname(sacpz_dirname): self.responses[x.codes].append(x) if stationxml_filenames: for stationxml_filename in stationxml_filenames: logger.debug('Loading StationXML responses from %s' % stationxml_filename) self.responses_stationxml.append( fs.load_xml(filename=stationxml_filename))
def add_stations(self, stations=None, pyrocko_stations_filename=None, stationxml_filenames=None): if stations is not None: for station in stations: self.stations[station.nsl()] = station if pyrocko_stations_filename is not None: logger.debug('Loading stations from file "%s"...' % pyrocko_stations_filename) for station in model.load_stations(pyrocko_stations_filename): self.stations[station.nsl()] = station if stationxml_filenames is not None and len(stationxml_filenames) > 0: for stationxml_filename in stationxml_filenames: if not op.exists(stationxml_filename): continue logger.debug('Loading stations from StationXML file "%s"...' % stationxml_filename) sx = fs.load_xml(filename=stationxml_filename) ev = self.get_event() stations = sx.get_pyrocko_stations(time=ev.time) if len(stations) == 0: logger.warning( 'No stations found for time %s in file "%s".' % (util.time_to_str(ev.time), stationxml_filename)) for station in stations: logger.debug('Adding station: %s.%s.%s' % station.nsl()) channels = station.get_channels() if len(channels) == 1 and channels[0].name.endswith('Z'): logger.warning( 'Station "%s" has vertical component' ' information only, adding mocked channels.' % station.nsl_string()) station.add_channel( model.Channel(channels[0].name[:-1] + 'N')) station.add_channel( model.Channel(channels[0].name[:-1] + 'E')) self.stations[station.nsl()] = station
def gen_real_stations(tmin=util.stt('2014-01-01 16:10:00.000'), tmax=util.stt('2014-01-01 16:39:59.000')): stations = [] selection = [ ('*', '*', '*', 'BH*', tmin, tmax), ] request_response = ws.station(site='iris', selection=selection, level='response') request_response.dump_xml(filename='stations.xml') sx = fs.load_xml(filename='stations.xml') for station in sx.get_pyrocko_stations(): stations.append(station) return stations
def station(url=g_url, site=g_default_site, majorversion=1, parsed=True, selection=None, **kwargs): url = fillurl(url, site, 'station', majorversion) params = fix_params(kwargs) if selection: l = [] for k, v in params.iteritems(): l.append('%s=%s' % (k, v)) for (network, station, location, channel, tmin, tmax) in selection: if location == '': location = '--' l.append(' '.join((network, station, location, channel, sdatetime(tmin), sdatetime(tmax)))) params = dict(post='\n'.join(l)) if parsed: from pyrocko.fdsn import station format = params.get('format', 'xml') if format == 'text': if params.get('level', 'station') == 'channel': return station.load_channel_table( stream=_request(url, **params)) else: raise InvalidRequest('if format="text" shall be parsed, ' 'level="channel" is required') elif format == 'xml': assert params.get('format', 'xml') == 'xml' return station.load_xml(stream=_request(url, **params)) else: raise InvalidRequest('format must be "xml" or "text"') else: return _request(url, **params)
def test_read_samples(self): ok = False for fn in ["geeil.iris.xml", "geeil.geofon.xml"]: fpath = common.test_data_file(fn) x = fdsn_station.load_xml(filename=fpath) for network in x.network_list: assert network.code == "GE" for station in network.station_list: assert station.code == "EIL" for channel in station.channel_list: assert channel.code[:2] == "BH" for stage in channel.response.stage_list: ok = True assert ok assert len(x.get_pyrocko_stations()) in (3, 4) for s in x.get_pyrocko_stations(): assert len(s.get_channels()) == 3 assert len(x.get_pyrocko_stations(time=stt("2010-01-15 10:00:00"))) == 1
def load_response_information(filename, format, nslc_patterns=None, fake_input_units=None): from pyrocko import pz, trace from pyrocko.fdsn import resp as fresp resps = [] labels = [] if format == 'sacpz': if fake_input_units is not None: raise Exception( 'cannot guess true input units from plain SAC PZ files') zeros, poles, constant = pz.read_sac_zpk(filename) resp = trace.PoleZeroResponse(zeros=zeros, poles=poles, constant=constant) resps.append(resp) labels.append(filename) elif format == 'resp': for resp in list(fresp.iload_filename(filename)): if nslc_patterns is not None and not util.match_nslc( nslc_patterns, resp.codes): continue units = '' if resp.response.instrument_sensitivity: s = resp.response.instrument_sensitivity if s.input_units and s.output_units: units = ', %s -> %s' % (fake_input_units or s.input_units.name, s.output_units.name) resps.append( resp.response.get_pyrocko_response( resp.codes, fake_input_units=fake_input_units)) labels.append('%s (%s.%s.%s.%s, %s - %s%s)' % ((filename, ) + resp.codes + (tts(resp.start_date), tts(resp.end_date), units))) elif format == 'stationxml': from pyrocko.fdsn import station as fs sx = fs.load_xml(filename=filename) for network in sx.network_list: for station in network.station_list: for channel in station.channel_list: nslc = (network.code, station.code, channel.location_code, channel.code) if nslc_patterns is not None and not util.match_nslc( nslc_patterns, nslc): continue units = '' if channel.response.instrument_sensitivity: s = channel.response.instrument_sensitivity if s.input_units and s.output_units: units = ', %s -> %s' % (fake_input_units or s.input_units.name, s.output_units.name) resps.append( channel.response.get_pyrocko_response( nslc, fake_input_units=fake_input_units)) labels.append('%s (%s.%s.%s.%s, %s - %s%s)' % ((filename, ) + nslc + (tts(channel.start_date), tts(channel.end_date), units))) return resps, labels
def cached_load_stationxml(fn): if fn not in g_sx_cache: g_sx_cache[fn] = fs.load_xml(filename=fn) return g_sx_cache[fn]
def main(): parser = OptionParser( usage=usage, description=description) parser.add_option( '--force', dest='force', action='store_true', default=False, help='allow recreation of output <directory>') parser.add_option( '--debug', dest='debug', action='store_true', default=False, help='print debugging information to stderr') parser.add_option( '--dry-run', dest='dry_run', action='store_true', default=False, help='show available stations/channels and exit ' '(do not download waveforms)') parser.add_option( '--continue', dest='continue_', action='store_true', default=False, help='continue download after a accident') parser.add_option( '--local-data', dest='local_data', action='append', help='add file/directory with local data') parser.add_option( '--local-stations', dest='local_stations', action='append', help='add local stations file') parser.add_option( '--local-responses-resp', dest='local_responses_resp', action='append', help='add file/directory with local responses in RESP format') parser.add_option( '--local-responses-pz', dest='local_responses_pz', action='append', help='add file/directory with local pole-zero responses') parser.add_option( '--local-responses-stationxml', dest='local_responses_stationxml', help='add file with local response information in StationXML format') parser.add_option( '--window', dest='window', default='full', help='set time window to choose [full, p, "<time-start>,<time-end>"' '] (time format is YYYY-MM-DD HH:MM:SS)') parser.add_option( '--out-components', choices=['enu', 'rtu'], dest='out_components', default='rtu', help='set output component orientations to radial-transverse-up [rtu] ' '(default) or east-north-up [enu]') parser.add_option( '--padding-factor', type=float, default=6.0, dest='padding_factor', help='extend time window on either side, in multiples of 1/<fmin_hz> ' '(default: 5)') parser.add_option( '--credentials', dest='user_credentials', action='append', default=[], metavar='SITE,USER,PASSWD', help='user credentials for specific site to access restricted data ' '(this option can be repeated)') parser.add_option( '--token', dest='auth_tokens', metavar='SITE,FILENAME', action='append', default=[], help='user authentication token for specific site to access ' 'restricted data (this option can be repeated)') parser.add_option( '--sites', dest='sites', metavar='SITE1,SITE2,...', default='geofon,iris', help='sites to query (available: %s, default: "%%default"' % ', '.join(g_sites_available)) parser.add_option( '--band-codes', dest='priority_band_code', metavar='V,L,M,B,H,S,E,...', default='B,H', help='select and prioritize band codes (default: %default)') parser.add_option( '--instrument-codes', dest='priority_instrument_code', metavar='H,L,G,...', default='H,L', help='select and prioritize instrument codes (default: %default)') parser.add_option( '--radius-min', dest='radius_min', metavar='VALUE', default=0.0, type=float, help='minimum radius [km]') parser.add_option( '--nstations-wanted', dest='nstations_wanted', metavar='N', type=int, help='number of stations to select initially') parser.add_option( '--magmin', dest='magmin', metavar='VALUE', default=6.0, type=float, help='minimum magnitude of events') parser.add_option( '--minlen', dest='minlen', metavar='VALUE', default=2100.0, type=float, help='minimum length of traces') parser.add_option( '--selection', dest='selection_file', action='append', help='add local stations file') (options, args) = parser.parse_args(sys.argv[1:]) magmin = options.magmin minlen = options.minlen if len(args) not in (9, 6, 5): parser.print_help() sys.exit(1) if options.debug: util.setup_logging(program_name, 'debug') else: util.setup_logging(program_name, 'info') if options.local_responses_pz and options.local_responses_resp: logger.critical('cannot use local responses in PZ and RESP ' 'format at the same time') sys.exit(1) n_resp_opt = 0 for resp_opt in ( options.local_responses_pz, options.local_responses_resp, options.local_responses_stationxml): if resp_opt: n_resp_opt += 1 if n_resp_opt > 1: logger.critical('can only handle local responses from either PZ or ' 'RESP or StationXML. Cannot yet merge different ' 'response formats.') sys.exit(1) if options.local_responses_resp and not options.local_stations: logger.critical('--local-responses-resp can only be used ' 'when --stations is also given.') sys.exit(1) try: ename = '' magnitude = None mt = None if len(args) == 10: time = util.str_to_time(args[1] + ' ' + args[2]) lat = float(args[3]) lon = float(args[4]) depth = float(args[5]) * km iarg = 6 elif len(args) == 7: if args[2].find(':') == -1: sname_or_date = None lat = float(args[1]) lon = float(args[2]) event = None time = None else: sname_or_date = args[1] + ' ' + args[2] iarg = 2 elif len(args) == 6: sname_or_date = args[1] iarg = 2 if len(args) in (7, 6) and sname_or_date is not None: events = get_events_by_name_or_date([sname_or_date], catalog=geofon, magmin=magmin) if len(events) == 0: logger.critical('no event found') sys.exit(1) elif len(events) > 1: logger.critical('more than one event found') sys.exit(1) event = events[0] time = event.time lat = event.lat lon = event.lon depth = event.depth ename = event.name magnitude = event.magnitude mt = event.moment_tensor radius = float(args[iarg])*km fmin = float(args[iarg+1]) sample_rate = float(args[iarg+2]) eventname = args[iarg+3] cwd = str(sys.argv[1]) event_dir = op.join('events', cwd, 'data') output_dir = op.join(event_dir) except: raise parser.print_help() sys.exit(1) if options.force and op.isdir(event_dir): if not options.continue_: shutil.rmtree(event_dir) if op.exists(event_dir) and not options.continue_: logger.critical( 'directory "%s" exists. Delete it first or use the --force option' % event_dir) sys.exit(1) util.ensuredir(output_dir) if time is not None: event = model.Event( time=time, lat=lat, lon=lon, depth=depth, name=ename, magnitude=magnitude, moment_tensor=mt) if options.window == 'full': if event is None: logger.critical('need event for --window=full') sys.exit(1) low_velocity = 1500. timewindow = VelocityWindow( low_velocity, tpad=options.padding_factor/fmin) tmin, tmax = timewindow(time, radius, depth) elif options.window == 'p': if event is None: logger.critical('need event for --window=p') sys.exit(1) phases = list(map(cake.PhaseDef, 'P p'.split())) emod = cake.load_model() tpad = options.padding_factor / fmin timewindow = PhaseWindow(emod, phases, -tpad, tpad) arrivaltimes = [] for dist in num.linspace(0, radius, 20): try: arrivaltimes.extend(timewindow(time, dist, depth)) except NoArrival: pass if not arrivaltimes: logger.error('required phase arrival not found') sys.exit(1) tmin = min(arrivaltimes) tmax = max(arrivaltimes) else: try: stmin, stmax = options.window.split(',') tmin = util.str_to_time(stmin.strip()) tmax = util.str_to_time(stmax.strip()) timewindow = FixedWindow(tmin, tmax) except ValueError: logger.critical('invalid argument to --window: "%s"' % options.window) sys.exit(1) if event is not None: event.name = eventname tlen = tmax - tmin tfade = tfade_factor / fmin tpad = tfade tmin -= tpad+1000. tmax += tpad+1000. tinc = None priority_band_code = options.priority_band_code.split(',') for s in priority_band_code: if len(s) != 1: logger.critical('invalid band code: %s' % s) priority_instrument_code = options.priority_instrument_code.split(',') for s in priority_instrument_code: if len(s) != 1: logger.critical('invalid instrument code: %s' % s) station_query_conf = dict( latitude=lat, longitude=lon, minradius=options.radius_min*km*cake.m2d, maxradius=radius*cake.m2d, channel=','.join('?%s?' % s for s in priority_band_code)) target_sample_rate = sample_rate fmax = target_sample_rate # target_sample_rate = None # priority_band_code = ['H', 'B', 'M', 'L', 'V', 'E', 'S'] priority_units = ['M/S', 'M', 'M/S**2'] output_units = 'M' sites = [x.strip() for x in options.sites.split(',') if x.strip()] for site in sites: if site not in g_sites_available: logger.critical('unknown FDSN site: %s' % site) sys.exit(1) for s in options.user_credentials: try: site, user, passwd = s.split(',') g_user_credentials[site] = user, passwd except ValueError: logger.critical('invalid format for user credentials: "%s"' % s) sys.exit(1) for s in options.auth_tokens: try: site, token_filename = s.split(',') with open(token_filename, 'r') as f: g_auth_tokens[site] = f.read() except (ValueError, OSError, IOError): logger.critical('cannot get token from file: %s' % token_filename) sys.exit(1) fn_template0 = \ 'data_%(network)s.%(station)s.%(location)s.%(channel)s_%(tmin)s.mseed' fn_template_raw = op.join(output_dir, 'raw', fn_template0) fn_stations_raw = op.join(output_dir, 'stations.txt') fn_template_rest = op.join(output_dir, 'rest', fn_template0) fn_commandline = op.join(output_dir, 'palantiriown.command') ftap = (ffade_factors[0]*fmin, fmin, fmax, ffade_factors[1]*fmax) # chapter 1: download sxs = [] for site in sites: try: extra_args = { 'iris': dict(matchtimeseries=True), }.get(site, {}) extra_args.update(station_query_conf) if site == 'geonet': extra_args.update( starttime=tmin, endtime=tmax) else: extra_args.update( startbefore=tmax, endafter=tmin, includerestricted=( site in g_user_credentials or site in g_auth_tokens)) logger.info('downloading channel information (%s)' % site) sx = ws.station( site=site, format='text', level='channel', **extra_args) except ws.EmptyResult: logger.error('No stations matching given criteria.') sx = None sxs.append(sx) if all(sx is None for sx in sxs) and not options.local_data: sys.exit(1) nsl_to_sites = defaultdict(list) nsl_to_station = {} if options.selection_file: logger.info('using stations from stations file!') stations = [] for fn in options.selection_file: stations.extend(model.load_stations(fn)) nsls_selected = set(s.nsl() for s in stations) else: nsls_selected = None for sx, site in zip(sxs, sites): site_stations = sx.get_pyrocko_stations() for s in site_stations: nsl = s.nsl() nsl_to_sites[nsl].append(site) if nsl not in nsl_to_station: if nsls_selected: if nsl in nsls_selected: nsl_to_station[nsl] = s else: nsl_to_station[nsl] = s # using first site with this station logger.info('number of stations found: %i' % len(nsl_to_station)) # station weeding nsls_selected = None if options.nstations_wanted: stations_all = [ nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys())] for s in stations_all: s.set_event_relative_data(event) stations_selected = weeding.weed_stations( stations_all, options.nstations_wanted)[0] nsls_selected = set(s.nsl() for s in stations_selected) logger.info('number of stations selected: %i' % len(nsls_selected)) if tinc is None: tinc = 3600. have_data = set() if options.continue_: fns = glob.glob(fn_template_raw % starfill()) p = pile.make_pile(fns) else: fns = [] have_data_site = {} could_have_data_site = {} for site in sites: have_data_site[site] = set() could_have_data_site[site] = set() available_through = defaultdict(set) it = 0 nt = int(math.ceil((tmax - tmin) / tinc)) for it in range(nt): tmin_win = tmin + it * tinc tmax_win = min(tmin + (it + 1) * tinc, tmax) logger.info('time window %i/%i (%s - %s)' % (it+1, nt, util.tts(tmin_win), util.tts(tmax_win))) have_data_this_window = set() if options.continue_: trs_avail = p.all(tmin=tmin_win, tmax=tmax_win, load_data=False) for tr in trs_avail: have_data_this_window.add(tr.nslc_id) for site, sx in zip(sites, sxs): if sx is None: continue selection = [] channels = sx.choose_channels( target_sample_rate=target_sample_rate, priority_band_code=priority_band_code, priority_units=priority_units, priority_instrument_code=priority_instrument_code, timespan=(tmin_win, tmax_win)) for nslc in sorted(channels.keys()): if nsls_selected is not None and nslc[:3] not in nsls_selected: continue could_have_data_site[site].add(nslc) if nslc not in have_data_this_window: channel = channels[nslc] if event: lat_, lon_ = event.lat, event.lon else: lat_, lon_ = lat, lon dist = orthodrome.distance_accurate50m_numpy( lat_, lon_, channel.latitude.value, channel.longitude.value) if event: depth_ = event.depth time_ = event.time else: depth_ = None time_ = None try: tmin_, tmax_ = timewindow(time_, dist, depth_) tmin_this = tmin_ - tpad tmax_this = tmax_ + tpad tmin_req = max(tmin_win, tmin_this) tmax_req = min(tmax_win, tmax_this) if channel.sample_rate: deltat = 1.0 / channel.sample_rate.value else: deltat = 1.0 if tmin_req < tmax_req: # extend time window by some samples because otherwise # sometimes gaps are produced selection.append( nslc + ( tmin_req-deltat*10.0, tmax_req+deltat*10.0)) except: pass if options.dry_run: for (net, sta, loc, cha, tmin, tmax) in selection: available_through[net, sta, loc, cha].add(site) else: neach = 100 i = 0 nbatches = ((len(selection)-1) // neach) + 1 while i < len(selection): selection_now = selection[i:i+neach] f = tempfile.NamedTemporaryFile() try: sbatch = '' if nbatches > 1: sbatch = ' (batch %i/%i)' % ( (i//neach) + 1, nbatches) logger.info('downloading data (%s)%s' % (site, sbatch)) data = ws.dataselect( site=site, selection=selection_now, **get_user_credentials(site)) while True: buf = data.read(1024) if not buf: break f.write(buf) f.flush() trs = io.load(f.name) for tr in trs: try: tr.chop(tmin_win, tmax_win) have_data.add(tr.nslc_id) have_data_site[site].add(tr.nslc_id) except trace.NoData: pass fns2 = io.save(trs, fn_template_raw) for fn in fns2: if fn in fns: logger.warn('overwriting file %s', fn) fns.extend(fns2) except ws.EmptyResult: pass except HTTPError: logger.warn( 'an error occurred while downloading data ' 'for channels \n %s' % '\n '.join( '.'.join(x[:4]) for x in selection_now)) f.close() i += neach if options.dry_run: nslcs = sorted(available_through.keys()) all_channels = defaultdict(set) all_stations = defaultdict(set) def plural_s(x): return '' if x == 1 else 's' for nslc in nslcs: sites = tuple(sorted(available_through[nslc])) logger.info('selected: %s.%s.%s.%s from site%s %s' % ( nslc + (plural_s(len(sites)), '+'.join(sites)))) all_channels[sites].add(nslc) all_stations[sites].add(nslc[:3]) nchannels_all = 0 nstations_all = 0 for sites in sorted( all_channels.keys(), key=lambda sites: (-len(sites), sites)): nchannels = len(all_channels[sites]) nstations = len(all_stations[sites]) nchannels_all += nchannels nstations_all += nstations logger.info( 'selected (%s): %i channel%s (%i station%s)' % ( '+'.join(sites), nchannels, plural_s(nchannels), nstations, plural_s(nstations))) logger.info( 'selected total: %i channel%s (%i station%s)' % ( nchannels_all, plural_s(nchannels_all), nstations_all, plural_s(nstations_all))) logger.info('dry run done.') sys.exit(0) for nslc in have_data: # if we are in continue mode, we have to guess where the data came from if not any(nslc in have_data_site[site] for site in sites): for site in sites: if nslc in could_have_data_site[site]: have_data_site[site].add(nslc) sxs = {} for site in sites: selection = [] for nslc in sorted(have_data_site[site]): selection.append(nslc + (tmin-tpad, tmax+tpad)) if selection: logger.info('downloading response information (%s)' % site) sxs[site] = ws.station( site=site, level='response', selection=selection) sxs[site].dump_xml( filename=op.join(output_dir, 'stations.%s.xml' % site)) # chapter 1.5: inject local data if options.local_data: have_data_site['local'] = set() plocal = pile.make_pile(options.local_data, fileformat='detect') for traces in plocal.chopper_grouped( gather=lambda tr: tr.nslc_id, tmin=tmin, tmax=tmax, tinc=tinc): for tr in traces: if tr.nslc_id not in have_data: fns.extend(io.save(traces, fn_template_raw)) have_data_site['local'].add(tr.nslc_id) have_data.add(tr.nslc_id) sites.append('local') if options.local_responses_pz: sxs['local'] = epz.make_stationxml( epz.iload(options.local_responses_pz)) if options.local_responses_resp: local_stations = [] for fn in options.local_stations: local_stations.extend( model.load_stations(fn)) sxs['local'] = resp.make_stationxml( local_stations, resp.iload(options.local_responses_resp)) if options.local_responses_stationxml: sxs['local'] = station.load_xml( filename=options.local_responses_stationxml) # chapter 1.6: dump raw data stations file #nsl_to_station = {} for site in sites: if site in sxs: stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax)) for s in stations: nsl = s.nsl() if nsl not in nsl_to_station: nsl_to_station[nsl] = s stations = [ nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys())] util.ensuredirs(fn_stations_raw) model.dump_stations(stations, fn_stations_raw) dump_commandline(sys.argv, fn_commandline) # chapter 2: restitution if not fns: logger.error('no data available') sys.exit(1) p = pile.make_pile(fns, show_progress=False) p.get_deltatmin() otinc = None if otinc is None: otinc = nice_seconds_floor(p.get_deltatmin() * 500000.) otinc = 3600. otmin = math.floor(p.tmin / otinc) * otinc otmax = math.ceil(p.tmax / otinc) * otinc otpad = tpad*2 fns = [] rest_traces_b = [] win_b = None for traces_a in p.chopper_grouped( gather=lambda tr: tr.nslc_id, tmin=otmin, tmax=otmax, tinc=otinc, tpad=otpad): rest_traces_a = [] win_a = None for tr in traces_a: win_a = tr.wmin, tr.wmax if win_b and win_b[0] >= win_a[0]: fns.extend(cut_n_dump(rest_traces_b, win_b, fn_template_rest)) rest_traces_b = [] win_b = None response = None failure = [] for site in sites: try: if site not in sxs: continue response = sxs[site].get_pyrocko_response( tr.nslc_id, timespan=(tr.tmin, tr.tmax), fake_input_units=output_units) break except station.NoResponseInformation: failure.append('%s: no response information' % site) except station.MultipleResponseInformation: failure.append('%s: multiple response information' % site) if response is None: failure = ', '.join(failure) else: failure = '' try: rest_tr = tr.transfer(tfade, ftap, response, invert=True) rest_traces_a.append(rest_tr) except (trace.TraceTooShort, trace.NoData, trace.InfiniteResponse): failure = 'trace too short' if failure: logger.warn('failed to restitute trace %s.%s.%s.%s (%s)' % (tr.nslc_id + (failure,))) if rest_traces_b: try: rest_traces = trace.degapper(rest_traces_b + rest_traces_a, deoverlap='crossfade_cos') fns.extend(cut_n_dump(rest_traces, win_b, fn_template_rest)) rest_traces_a = [] if win_a: for tr in rest_traces: try: rest_traces_a.append( tr.chop(win_a[0], win_a[1]+otpad, inplace=False)) except trace.NoData: pass except: pass rest_traces_b = rest_traces_a win_b = win_a fns.extend(cut_n_dump(rest_traces_b, win_b, fn_template_rest)) # chapter 3: rotated restituted traces for inspection if not event: sys.exit(0) fn_template1 = \ 'DISPL.%(network)s.%(station)s.%(location)s.%(channel)s' fn_waveforms = op.join(output_dir, 'prepared', fn_template1) fn_stations_prep = op.join(output_dir, 'stations_disp.txt') fn_event = op.join(event_dir, 'event.txt') nsl_to_station = {} for site in sites: if site in sxs: stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax)) for s in stations: nsl = s.nsl() if nsl not in nsl_to_station: nsl_to_station[nsl] = s p = pile.make_pile(fns, show_progress=False) deltat = None if sample_rate is not None: deltat = 1.0 / sample_rate used_stations = [] for nsl, s in nsl_to_station.items(): s.set_event_relative_data(event) traces = p.all(trace_selector=lambda tr: tr.nslc_id[:3] == nsl) traces = trace.degapper(traces, maxgap=50, maxlap=50) keep = [] for tr in traces: if deltat is not None: try: tr.downsample_to(deltat, snap=True, allow_upsample_max=5) keep.append(tr) except util.UnavailableDecimation as e: logger.warn('Cannot downsample %s.%s.%s.%s: %s' % (tr.nslc_id + (e,))) continue if options.out_components == 'rtu': pios = s.guess_projections_to_rtu(out_channels=('R', 'T', 'Z')) elif options.out_components == 'enu': pios = s.guess_projections_to_enu(out_channels=('E', 'N', 'Z')) else: assert False trss = [] for (proj, in_channels, out_channels) in pios: proc = trace.project(traces, proj, in_channels, out_channels) for tr in proc: for ch in out_channels: if ch.name == tr.channel: s.add_channel(ch) trss.append(tr) if proc: if tr.tmax - tr.tmin >= minlen: io.save(proc, fn_waveforms) used_stations.append(s) gaps = [] prep_stations = list(used_stations) prep_stations_one = [] cluster_stations_one = [] prep_stations_cluster = prep_stations.copy() for st in prep_stations: for channel in ['BHE', 'BHN', 'BHZ', 'BH1', 'BH2']: try: st.remove_channel_by_name(channel) except: pass prep_stations_one.append(st) for st in prep_stations_cluster: for channel in ['R', 'T']: try: st.remove_channel_by_name(channel) except: pass cluster_stations_one.append(st) util.ensuredirs(fn_stations_prep) model.dump_events([event], fn_event) from subprocess import call script = "cat"+" "+ output_dir+"/rest/*.mseed" +"> "+ output_dir+"/traces.mseed" call(script, shell=True) script = "cat"+" "+ output_dir+"/prepared/*..*" +"> "+ output_dir+"/traces_rotated.mseed" call(script, shell=True) traces = io.load(output_dir+"/traces_rotated.mseed") for tr in traces: tr.ydata = num.diff(tr.ydata) io.save(traces, output_dir+"/traces_velocity.mseed") cluster_stations_ones = [] for st in cluster_stations_one: add = 0 for tr in traces: if st.station == tr.station: add = 1 for stx in cluster_stations_ones: if stx.station == st.station: add = 0 if add == 1: cluster_stations_ones.append(st) prep_stations_ones = [] for st in prep_stations_one: add = 0 for tr in traces: if st.station == tr.station: add = 1 for stx in prep_stations_ones: if stx.station == st.station: add = 0 if add == 1: prep_stations_ones.append(st) gaps = [] remove = [] for tr in traces: for st in cluster_stations_ones: for channel in st.channels: if tr.station == st.station and tr.location == st.location and channel.name == tr.channel and tr.location == st.location and tr.network == st.network: gaps.append(st.station) remove = [x for x in gaps if gaps.count(x) > 1] for re in remove: for st in cluster_stations_ones: if st.station == re: try: cluster_stations_ones.remove(st) except Exception: pass gaps = [] remove = [] for tr in traces: for st in prep_stations_ones: for channel in st.channels: if tr.station == st.station and tr.location == st.location and channel.name == tr.channel and tr.location == st.location and tr.network == st.network: gaps.append(st.station) remove = [x for x in gaps if gaps.count(x) > 1] for re in remove: for st in prep_stations_ones: if st.station == re: try: prep_stations_ones.remove(st) except Exception: pass fn_stations_cluster = op.join(output_dir, 'stations_cluster.txt') model.dump_stations(prep_stations_ones, fn_stations_prep) model.dump_stations(cluster_stations_ones, fn_stations_cluster) logger.info('prepared waveforms from %i stations' % len(stations))
def test_read_big(self): for site in ['iris']: fpath = common.test_data_file('%s_1014-01-01_all.xml' % site) fdsn_station.load_xml(filename=fpath)
sites.append('local') if options.local_responses_pz: sxs['local'] = epz.make_stationxml( epz.iload(options.local_responses_pz)) if options.local_responses_resp: local_stations = [] for fn in options.local_stations: local_stations.extend(model.load_stations(fn)) sxs['local'] = resp.make_stationxml( local_stations, resp.iload(options.local_responses_resp)) if options.local_responses_stationxml: sxs['local'] = station.load_xml( filename=options.local_responses_stationxml) # chapter 1.6: dump raw data stations file nsl_to_station = {} for site in sites: if site in sxs: stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax)) for s in stations: nsl = s.nsl() if nsl not in nsl_to_station: nsl_to_station[nsl] = s stations = [nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys())] util.ensuredirs(fn_stations_raw)
def snuffler_from_commandline(args=sys.argv): usage = '''usage: %prog [options] waveforms ...''' parser = OptionParser(usage=usage) parser.add_option( '--format', dest='format', default='detect', choices=pyrocko.io.allowed_formats('load'), help='assume input files are of given FORMAT. Choices: %s' % pyrocko.io.allowed_formats('load', 'cli_help', 'detect')) parser.add_option( '--pattern', dest='regex', metavar='REGEX', help='only include files whose paths match REGEX') parser.add_option( '--stations', dest='station_fns', action='append', default=[], metavar='STATIONS', help='read station information from file STATIONS') parser.add_option( '--stationxml', dest='stationxml_fns', action='append', default=[], metavar='STATIONSXML', help='read station information from XML file STATIONSXML') parser.add_option( '--event', '--events', dest='event_fns', action='append', default=[], metavar='EVENT', help='read event information from file EVENT') parser.add_option( '--markers', dest='marker_fns', action='append', default=[], metavar='MARKERS', help='read marker information file MARKERS') parser.add_option( '--follow', type='float', dest='follow', metavar='N', help='follow real time with a window of N seconds') parser.add_option( '--cache', dest='cache_dir', default=pyrocko.config.config().cache_dir, metavar='DIR', help='use directory DIR to cache trace metadata ' '(default=\'%default\')') parser.add_option( '--force-cache', dest='force_cache', action='store_true', default=False, help='use the cache even when trace attribute spoofing is active ' '(may have silly consequences)') parser.add_option( '--store-path', dest='store_path', metavar='PATH_TEMPLATE', help='store data received through streams to PATH_TEMPLATE') parser.add_option( '--store-interval', type='float', dest='store_interval', default=600, metavar='N', help='dump stream data to file every N seconds [default: %default]') parser.add_option( '--ntracks', type='int', dest='ntracks', default=24, metavar='N', help='initially use N waveform tracks in viewer [default: %default]') parser.add_option( '--opengl', dest='opengl', action='store_true', default=False, help='use OpenGL for drawing') parser.add_option( '--debug', dest='debug', action='store_true', default=False, help='print debugging information to stderr') options, args = parser.parse_args(list(args[1:])) if options.debug: pyrocko.util.setup_logging('snuffler', 'debug') else: pyrocko.util.setup_logging('snuffler', 'warning') pile = pyrocko.pile.Pile() stations = [] for stations_fn in options.station_fns: stations.extend(pyrocko.model.load_stations(stations_fn)) for stationxml_fn in options.stationxml_fns: stations.extend( fdsn_station.load_xml( filename=stationxml_fn).get_pyrocko_stations()) events = [] for event_fn in options.event_fns: events.extend(pyrocko.model.Event.load_catalog(event_fn)) markers = [] for marker_fn in options.marker_fns: markers.extend(pyrocko.pile_viewer.Marker.load_markers(marker_fn)) return snuffle( pile, stations=stations, events=events, markers=markers, ntracks=options.ntracks, follow=options.follow, controls=True, opengl=options.opengl, paths=args, cache_dir=options.cache_dir, regex=options.regex, format=options.format, force_cache=options.force_cache, store_path=options.store_path, store_interval=options.store_interval)
def load_response_information( filename, format, nslc_patterns=None, fake_input_units=None): from pyrocko import pz, trace from pyrocko.io import resp as fresp resps = [] labels = [] if format == 'sacpz': if fake_input_units is not None: raise Exception( 'cannot guess true input units from plain SAC PZ files') zeros, poles, constant = pz.read_sac_zpk(filename) resp = trace.PoleZeroResponse( zeros=zeros, poles=poles, constant=constant) resps.append(resp) labels.append(filename) elif format == 'pf': if fake_input_units is not None: raise Exception( 'cannot guess true input units from plain response files') resp = guts.load(filename=filename) resps.append(resp) labels.append(filename) elif format == 'resp': for resp in list(fresp.iload_filename(filename)): if nslc_patterns is not None and not util.match_nslc( nslc_patterns, resp.codes): continue units = '' if resp.response.instrument_sensitivity: s = resp.response.instrument_sensitivity if s.input_units and s.output_units: units = ', %s -> %s' % ( fake_input_units or s.input_units.name, s.output_units.name) resps.append(resp.response.get_pyrocko_response( resp.codes, fake_input_units=fake_input_units)) labels.append('%s (%s.%s.%s.%s, %s - %s%s)' % ( (filename, ) + resp.codes + (tts(resp.start_date), tts(resp.end_date), units))) elif format == 'stationxml': from pyrocko.fdsn import station as fs sx = fs.load_xml(filename=filename) for network in sx.network_list: for station in network.station_list: for channel in station.channel_list: nslc = ( network.code, station.code, channel.location_code, channel.code) if nslc_patterns is not None and not util.match_nslc( nslc_patterns, nslc): continue if not channel.response: logger.warn( 'no response for channel %s.%s.%s.%s given.' % nslc) continue units = '' if channel.response.instrument_sensitivity: s = channel.response.instrument_sensitivity if s.input_units and s.output_units: units = ', %s -> %s' % ( fake_input_units or s.input_units.name, s.output_units.name) resps.append(channel.response.get_pyrocko_response( nslc, fake_input_units=fake_input_units)) labels.append( '%s (%s.%s.%s.%s, %s - %s%s)' % ( (filename, ) + nslc + (tts(channel.start_date), tts(channel.end_date), units))) return resps, labels