def grok_sacpz(data): pzlines = [] d = {} responses = [] float_keys = ('latitude', 'longitude', 'elevation', 'depth', 'dip', 'azimuth', 'sample') string_keys = ('input', 'output', 'network', 'station', 'location', 'channel') time_keys = ('start', 'end') for line in data.splitlines(): line = line.strip() if line.startswith('*'): if pzlines: if any(pzlines): d['zpk'] = pz.read_sac_zpk(string='\n'.join(pzlines)) responses.append(d) d = {} pzlines = [] m = re.match(r'^\* ([A-Z]+)[^:]*:(.*)$', line) if m: k, v = m.group(1).lower(), m.group(2).strip() if k in d: assert False, 'duplicate entry? %s' % k if k in float_keys: d[k] = float(v) elif k in string_keys: d[k] = v elif k in time_keys: d[k] = tdatetime(v) else: pzlines.append(line) if pzlines and any(pzlines): d['zpk'] = pz.read_sac_zpk(string='\n'.join(pzlines)) responses.append(d) cis = {} for kwargs in responses: try: for k in float_keys + string_keys + time_keys: if k not in kwargs: logger.error('Missing entry: %s' % k) raise Exception() ci = ChannelInfo(**kwargs) cis[nslc(ci)] = ci except Exception: logger.error('Error while parsing SACPZ data') return cis
def grok_sacpz(data): pzlines = [] d = {} responses = [] float_keys = ('latitude', 'longitude', 'elevation', 'depth', 'dip', 'azimuth', 'sample') string_keys = ('input', 'output', 'network', 'station', 'location', 'channel') time_keys = ('start', 'end') for line in data.splitlines(): line = line.strip() if line.startswith('*'): if pzlines: if any(pzlines): d['zpk'] = pz.read_sac_zpk(string='\n'.join(pzlines)) responses.append(d) d = {} pzlines = [] m = re.match(r'^\* ([A-Z]+)[^:]*:(.*)$', line) if m: k, v = m.group(1).lower(), m.group(2).strip() if k in d: assert False, 'duplicate entry? %s' % k if k in float_keys: d[k] = float(v) elif k in string_keys: d[k] = v elif k in time_keys: d[k] = tdatetime(v) else: pzlines.append(line) if pzlines and any(pzlines): d['zpk'] = pz.read_sac_zpk(string='\n'.join(pzlines)) responses.append(d) cis = {} for kwargs in responses: try: for k in float_keys + string_keys + time_keys: if k not in kwargs: logger.error('Missing entry: %s' % k) raise Exception() ci = ChannelInfo(**kwargs) cis[nslc(ci)] = ci except: logger.error('Error while parsing SACPZ data') return cis
def read_enhanced_sac_pz(filename): zeros, poles, constant, comments = pz.read_sac_zpk(filename=filename, get_comments=True) d = {} for line in comments: toks = line.split(':', 1) if len(toks) == 2: temp = toks[0].strip('* \t') for k in ('network', 'station', 'location', 'channel', 'start', 'end', 'latitude', 'longitude', 'depth', 'elevation', 'dip', 'azimuth', 'input unit', 'output unit'): if temp.lower().startswith(k): d[k] = toks[1].strip() response = trace.PoleZeroResponse(zeros, poles, constant) try: channel = Channel( nslc=(d['network'], d['station'], d['location'], d['channel']), tmin=util.str_to_time(d['start'], format='%Y-%m-%dT%H:%M:%S'), tmax=util.str_to_time(d['end'], format='%Y-%m-%dT%H:%M:%S'), lat=float(d['latitude']), lon=float(d['longitude']), elevation=float(d['elevation']), depth=float(d['depth']), dip=float(d['dip']), azimuth=float(d['azimuth']), input_unit=d['input unit'], output_unit=d['output unit'], response=response) except: raise EnhancedSacPzError('cannot get all required information from file %s' % filename) return channel
def test_evalresp(self, plot=False): testdir = os.path.dirname(__file__) freqs = num.logspace(num.log10(0.001), num.log10(10.), num=1000) transfer = evalresp.evalresp(sta_list='BSEG', cha_list='BHZ', net_code='GR', locid='', instant=util.str_to_time('2012-01-01 00:00:00'), freqs=freqs, units='DIS', file=os.path.join(testdir, 'response', 'RESP.GR.BSEG..BHZ'), rtype='CS')[0][4] pzfn = 'SAC_PZs_GR_BSEG_BHZ__2008.254.00.00.00.0000_2599.365.23.59.59.99999' zeros, poles, constant = pz.read_sac_zpk(filename=os.path.join( testdir, 'response', pzfn)) resp = trace.PoleZeroResponse(zeros, poles, constant) transfer2 = resp.evaluate(freqs) if plot: import pylab as lab lab.plot(freqs, num.imag(transfer)) lab.plot(freqs, num.imag(transfer2)) lab.gca().loglog() lab.show() assert numeq(transfer, transfer2, 1e-4)
def iload_fh(f): zeros, poles, constant, comments = pz.read_sac_zpk(file=f, get_comments=True) d = {} for line in comments: toks = line.split(':', 1) if len(toks) == 2: temp = toks[0].strip('* \t') for k in ('network', 'station', 'location', 'channel', 'start', 'end', 'latitude', 'longitude', 'depth', 'elevation', 'dip', 'azimuth', 'input unit', 'output unit'): if temp.lower().startswith(k): d[k] = toks[1].strip() response = trace.PoleZeroResponse(zeros, poles, constant) try: yield EnhancedSacPzResponse( codes=(d['network'], d['station'], d['location'], d['channel']), tmin=util.str_to_time(d['start'], format='%Y-%m-%dT%H:%M:%S'), tmax=dummy_aware_str_to_time(d['end']), lat=float(d['latitude']), lon=float(d['longitude']), elevation=float(d['elevation']), depth=float(d['depth']), dip=float(d['dip']), azimuth=float(d['azimuth']), input_unit=d['input unit'], output_unit=d['output unit'], response=response) except: raise EnhancedSacPzError('cannot get all required information')
def test_evalresp(self, plot=False): resp_fpath = common.test_data_file('test2.resp') freqs = num.logspace(num.log10(0.001), num.log10(10.), num=1000) transfer = evalresp.evalresp( sta_list='BSEG', cha_list='BHZ', net_code='GR', locid='', instant=util.str_to_time('2012-01-01 00:00:00'), freqs=freqs, units='DIS', file=resp_fpath, rtype='CS')[0][4] pz_fpath = common.test_data_file('test2.sacpz') zeros, poles, constant = pz.read_sac_zpk(pz_fpath) resp = trace.PoleZeroResponse(zeros, poles, constant) transfer2 = resp.evaluate(freqs) if plot: plot_tfs(freqs, [transfer, transfer2]) assert numeq(transfer, transfer2, 1e-4)
def _get_polezero(self, tr): fnt = pjoin(self._dirpath, 'polezero-%s.txt' % st_nslc) fn = tr.fill_template(fnt) if os.path.exists(fn): return pz.read_sac_zpk(fn) else: raise FileNotFound(fn)
def iload_fh(f, time_format='%Y-%m-%dT%H:%M:%S'): zeros, poles, constant, comments = pz.read_sac_zpk(file=f, get_comments=True) d = {} for line in comments: toks = line.split(':', 1) if len(toks) == 2: temp = toks[0].strip('* \t') for k in ('network', 'station', 'location', 'channel', 'start', 'end', 'latitude', 'longitude', 'depth', 'elevation', 'dip', 'azimuth', 'input unit', 'output unit'): if temp.lower().startswith(k): d[k] = toks[1].strip() response = trace.PoleZeroResponse(zeros, poles, constant) try: yield EnhancedSacPzResponse( codes=(d['network'], d['station'], d['location'], d['channel']), tmin=util.str_to_time(d['start'], format=time_format), tmax=dummy_aware_str_to_time(d['end']), lat=float(d['latitude']), lon=float(d['longitude']), elevation=float(d['elevation']), depth=float(d['depth']), dip=float(d['dip']), azimuth=float(d['azimuth']), input_unit=d['input unit'], output_unit=d['output unit'], response=response) except KeyError as e: raise EnhancedSacPzError( 'cannot get all required information "%s"' % e.args[0])
def test_conversions(self): from pyrocko import model from pyrocko.fdsn import station, resp, enhanced_sacpz t = util.str_to_time('2014-01-01 00:00:00') codes = 'GE', 'EIL', '', 'BHZ' resp_fpath = common.test_data_file('test1.resp') stations = [ model.Station(*codes[:3], lat=29.669901, lon=34.951199, elevation=210.0, depth=0.0) ] sx_resp = resp.make_stationxml(stations, resp.iload_filename(resp_fpath)) pr_sx_resp = sx_resp.get_pyrocko_response(codes, time=t, fake_input_units='M/S') pr_evresp = trace.Evalresp(resp_fpath, nslc_id=codes, target='vel', time=t) sacpz_fpath = common.test_data_file('test1.sacpz') sx_sacpz = enhanced_sacpz.make_stationxml( enhanced_sacpz.iload_filename(sacpz_fpath)) pr_sx_sacpz = sx_sacpz.get_pyrocko_response(codes, time=t, fake_input_units='M/S') pr_sacpz = trace.PoleZeroResponse(*pz.read_sac_zpk(sacpz_fpath)) try: pr_sacpz.zeros.remove(0.0j) except ValueError: pr_sacpz.poles.append(0.0j) sxml_geofon_fpath = common.test_data_file('test1.stationxml') sx_geofon = station.load_xml(filename=sxml_geofon_fpath) pr_sx_geofon = sx_geofon.get_pyrocko_response(codes, time=t, fake_input_units='M/S') sxml_iris_fpath = common.test_data_file('test2.stationxml') sx_iris = station.load_xml(filename=sxml_iris_fpath) pr_sx_iris = sx_iris.get_pyrocko_response(codes, time=t, fake_input_units='M/S') freqs = num.logspace(num.log10(0.001), num.log10(1.0), num=1000) tf_ref = pr_evresp.evaluate(freqs) for pr in [ pr_sx_resp, pr_sx_sacpz, pr_sacpz, pr_sx_geofon, pr_sx_iris ]: tf = pr.evaluate(freqs) # plot_tfs(freqs, [tf_ref, tf]) assert cnumeqrel(tf_ref, tf, 0.01)
def test_conversions(self): from pyrocko import model from pyrocko.io import resp, enhanced_sacpz from pyrocko.io import stationxml t = util.str_to_time('2014-01-01 00:00:00') codes = 'GE', 'EIL', '', 'BHZ' resp_fpath = common.test_data_file('test1.resp') stations = [model.Station( *codes[:3], lat=29.669901, lon=34.951199, elevation=210.0, depth=0.0)] sx_resp = resp.make_stationxml( stations, resp.iload_filename(resp_fpath)) pr_sx_resp = sx_resp.get_pyrocko_response( codes, time=t, fake_input_units='M/S') pr_evresp = trace.Evalresp( resp_fpath, nslc_id=codes, target='vel', time=t) sacpz_fpath = common.test_data_file('test1.sacpz') sx_sacpz = enhanced_sacpz.make_stationxml( enhanced_sacpz.iload_filename(sacpz_fpath)) pr_sx_sacpz = sx_sacpz.get_pyrocko_response( codes, time=t, fake_input_units='M/S') pr_sacpz = trace.PoleZeroResponse(*pz.read_sac_zpk(sacpz_fpath)) try: pr_sacpz.zeros.remove(0.0j) except ValueError: pr_sacpz.poles.append(0.0j) sxml_geofon_fpath = common.test_data_file('test1.stationxml') sx_geofon = stationxml.load_xml(filename=sxml_geofon_fpath) pr_sx_geofon = sx_geofon.get_pyrocko_response( codes, time=t, fake_input_units='M/S') sxml_iris_fpath = common.test_data_file('test2.stationxml') sx_iris = stationxml.load_xml(filename=sxml_iris_fpath) pr_sx_iris = sx_iris.get_pyrocko_response( codes, time=t, fake_input_units='M/S') freqs = num.logspace(num.log10(0.001), num.log10(1.0), num=1000) tf_ref = pr_evresp.evaluate(freqs) for pr in [pr_sx_resp, pr_sx_sacpz, pr_sacpz, pr_sx_geofon, pr_sx_iris]: tf = pr.evaluate(freqs) # plot_tfs(freqs, [tf_ref, tf]) assert cnumeqrel(tf_ref, tf, 0.01)
def load_poles_zeros(super_dir='', fns=''): pzs = {} for fn in fns: zeros, poles, constant = pz.read_sac_zpk(pjoin(super_dir, fn)) #if quantity=='displacement': # print 'WARNING: The simulation needs to be checked!' # zeros.append(0.j)\ # remove one zero -> differentiate #zeros.pop() #zeros.append(0.0j) response = trace.PoleZeroResponse(zeros, poles, num.complex(constant)) pzs[fn] = response return pzs
def restitute_pz(tr_fn): traces = io.load(tr_fn) out_traces = [] for tr in traces: try: try: zeros, poles, constant = pz.read_sac_zpk(pole_zeros['%s.%s'%(tr.station, tr.channel)]) except keyerror: print 'skip ', '.'.join(tr.nslc_id[1:]) continue zeros.append(0.0j) digitizer_gain = 1e6 constant *= digitizer_gain # for the conversion of hz-> iw: nzeros = len(zeros) npoles = len(poles) constant *= (2*num.pi)**(npoles-nzeros) if tr.station=='nkc' or tr.station=='zhc': constant *= normalization_factors[tr.station] t_taper = 30 f_taper = (0.05, 0.08, 50., 75.), # frequency domain taper in [hz] else: t_taper = 5. f_taper = (0.3, 0.6, 50., 75.), # frequency domain taper in [hz] print tr.station, constant pz_transfer = trace.polezeroresponse(zeros, poles, constant) displacement = tr.transfer( t_taper, # rise and fall of time domain taper in [s] *f_taper, # frequency domain taper in [hz] transfer_function=pz_transfer, invert=true) except trace.tracetooshort: continue out_traces.append(displacement) tr_fn = tr_fn.replace(inputdir, outputdir) print tr_fn io.save(out_traces, tr_fn) del traces
def load_response_information(filename, format, nslc_patterns=None, fake_input_units=None): from pyrocko import pz, trace from pyrocko.fdsn import resp as fresp resps = [] labels = [] if format == 'sacpz': if fake_input_units is not None: raise Exception( 'cannot guess true input units from plain SAC PZ files') zeros, poles, constant = pz.read_sac_zpk(filename) resp = trace.PoleZeroResponse(zeros=zeros, poles=poles, constant=constant) resps.append(resp) labels.append(filename) elif format == 'resp': for resp in list(fresp.iload_filename(filename)): if nslc_patterns is not None and not util.match_nslc( nslc_patterns, resp.codes): continue units = '' if resp.response.instrument_sensitivity: s = resp.response.instrument_sensitivity if s.input_units and s.output_units: units = ', %s -> %s' % (fake_input_units or s.input_units.name, s.output_units.name) resps.append( resp.response.get_pyrocko_response( resp.codes, fake_input_units=fake_input_units)) labels.append('%s (%s.%s.%s.%s, %s - %s%s)' % ((filename, ) + resp.codes + (tts(resp.start_date), tts(resp.end_date), units))) elif format == 'stationxml': from pyrocko.fdsn import station as fs sx = fs.load_xml(filename=filename) for network in sx.network_list: for station in network.station_list: for channel in station.channel_list: nslc = (network.code, station.code, channel.location_code, channel.code) if nslc_patterns is not None and not util.match_nslc( nslc_patterns, nslc): continue units = '' if channel.response.instrument_sensitivity: s = channel.response.instrument_sensitivity if s.input_units and s.output_units: units = ', %s -> %s' % (fake_input_units or s.input_units.name, s.output_units.name) resps.append( channel.response.get_pyrocko_response( nslc, fake_input_units=fake_input_units)) labels.append('%s (%s.%s.%s.%s, %s - %s%s)' % ((filename, ) + nslc + (tts(channel.start_date), tts(channel.end_date), units))) return resps, labels
from pyrocko import pz, io, trace # read poles and zeros from SAC format pole-zero file zeros, poles, constant = pz.read_sac_zpk('STS2-Generic.polezero.txt') # one more zero to convert from velocity->counts to displacement->counts zeros.append(0.0j) rest_sts2 = trace.PoleZeroResponse(zeros=zeros, poles=poles, constant=constant) traces = io.load('test.mseed') out_traces = list(traces) for tr in traces: displacement = tr.transfer( 1000., # rise and fall of time window taper in [s] (0.001, 0.002, 5., 10.), # frequency domain taper in [Hz] transfer_function=rest_sts2, invert=True) # to change to (counts->displacement) # change channel id, so we can distinguish the traces in a trace viewer. displacement.set_codes(channel='D' + tr.channel[-1]) out_traces.append(displacement) io.save(out_traces, 'displacement.mseed')
def load_response_information( filename, format, nslc_patterns=None, fake_input_units=None): from pyrocko import pz, trace from pyrocko.io import resp as fresp resps = [] labels = [] if format == 'sacpz': if fake_input_units is not None: raise Exception( 'cannot guess true input units from plain SAC PZ files') zeros, poles, constant = pz.read_sac_zpk(filename) resp = trace.PoleZeroResponse( zeros=zeros, poles=poles, constant=constant) resps.append(resp) labels.append(filename) elif format == 'pf': if fake_input_units is not None: raise Exception( 'cannot guess true input units from plain response files') resp = guts.load(filename=filename) resps.append(resp) labels.append(filename) elif format == 'resp': for resp in list(fresp.iload_filename(filename)): if nslc_patterns is not None and not util.match_nslc( nslc_patterns, resp.codes): continue units = '' if resp.response.instrument_sensitivity: s = resp.response.instrument_sensitivity if s.input_units and s.output_units: units = ', %s -> %s' % ( fake_input_units or s.input_units.name, s.output_units.name) resps.append(resp.response.get_pyrocko_response( resp.codes, fake_input_units=fake_input_units)) labels.append('%s (%s.%s.%s.%s, %s - %s%s)' % ( (filename, ) + resp.codes + (tts(resp.start_date), tts(resp.end_date), units))) elif format == 'stationxml': from pyrocko.fdsn import station as fs sx = fs.load_xml(filename=filename) for network in sx.network_list: for station in network.station_list: for channel in station.channel_list: nslc = ( network.code, station.code, channel.location_code, channel.code) if nslc_patterns is not None and not util.match_nslc( nslc_patterns, nslc): continue if not channel.response: logger.warn( 'no response for channel %s.%s.%s.%s given.' % nslc) continue units = '' if channel.response.instrument_sensitivity: s = channel.response.instrument_sensitivity if s.input_units and s.output_units: units = ', %s -> %s' % ( fake_input_units or s.input_units.name, s.output_units.name) resps.append(channel.response.get_pyrocko_response( nslc, fake_input_units=fake_input_units)) labels.append( '%s (%s.%s.%s.%s, %s - %s%s)' % ( (filename, ) + nslc + (tts(channel.start_date), tts(channel.end_date), units))) return resps, labels
from pyrocko import pz, io, trace # read poles and zeros from SAC format pole-zero file zeros, poles, constant = pz.read_sac_zpk('STS2-Generic.polezero.txt') zeros.append(0.0j) # one more for displacement # create pole-zero response function object for restitution, so poles and zeros # from the response file are swapped here. rest_sts2 = trace.PoleZeroResponse(poles, zeros, 1./constant) traces = io.load('test.mseed') out_traces = [] for tr in traces: displacement = tr.transfer( 1000., # rise and fall of time domain taper in [s] (0.001, 0.002, 5., 10.), # frequency domain taper in [Hz] transfer_function=rest_sts2) # change channel id, so we can distinguish the traces in a trace viewer. displacement.set_codes(channel='D'+tr.channel[-1]) out_traces.append(displacement) io.save(out_traces, 'displacement.mseed')
def test_conversions(self): from pyrocko import model from pyrocko.io import resp, enhanced_sacpz from pyrocko.io import stationxml t = util.str_to_time('2014-01-01 00:00:00') codes = 'GE', 'EIL', '', 'BHZ' resp_fpath = common.test_data_file('test1.resp') stations = [model.Station( *codes[:3], lat=29.669901, lon=34.951199, elevation=210.0, depth=0.0)] sx_resp = resp.make_stationxml( stations, resp.iload_filename(resp_fpath)) sx_resp.validate() assert sx_resp.network_list[0].station_list[0].channel_list[0] \ .dip is None stations[0].set_channels_by_name('BHE', 'BHN', 'BHZ') sx_resp2 = resp.make_stationxml( stations, resp.iload_filename(resp_fpath)) sx_resp2.validate() assert sx_resp2.network_list[0].station_list[0].channel_list[0] \ .dip.value == -90.0 pr_sx_resp = sx_resp.get_pyrocko_response( codes, time=t, fake_input_units='M/S') pr_evresp = trace.Evalresp( resp_fpath, nslc_id=codes, target='vel', time=t) sacpz_fpath = common.test_data_file('test1.sacpz') sx_sacpz = enhanced_sacpz.make_stationxml( enhanced_sacpz.iload_filename(sacpz_fpath)) pr_sx_sacpz = sx_sacpz.get_pyrocko_response( codes, time=t, fake_input_units='M/S') pr_sacpz = trace.PoleZeroResponse(*pz.read_sac_zpk(sacpz_fpath)) try: pr_sacpz.zeros.remove(0.0j) except ValueError: pr_sacpz.poles.append(0.0j) sx_sacpz_resp = \ sx_sacpz.network_list[0].station_list[0].channel_list[0].response sx_sacpz_resp2 = pz.read_to_stationxml_response( input_unit=sx_sacpz_resp.instrument_sensitivity.input_units.name, output_unit=sx_sacpz_resp.instrument_sensitivity.output_units.name, normalization_frequency=10., filename=sacpz_fpath) pr_sx_sacpz2 = sx_sacpz_resp2.get_pyrocko_response(codes) try: pr_sx_sacpz2.responses[0].zeros.remove(0.0j) except ValueError: pr_sx_sacpz2.responses[0].poles.append(0.0j) sxml_geofon_fpath = common.test_data_file('test1.stationxml') sx_geofon = stationxml.load_xml(filename=sxml_geofon_fpath) pr_sx_geofon = sx_geofon.get_pyrocko_response( codes, time=t, fake_input_units='M/S') sxml_iris_fpath = common.test_data_file('test2.stationxml') sx_iris = stationxml.load_xml(filename=sxml_iris_fpath) pr_sx_iris = sx_iris.get_pyrocko_response( codes, time=t, fake_input_units='M/S') freqs = num.logspace(num.log10(0.001), num.log10(1.0), num=1000) tf_ref = pr_evresp.evaluate(freqs) for pr in [pr_sx_resp, pr_sx_sacpz, pr_sacpz, pr_sx_geofon, pr_sx_iris, pr_sx_sacpz2]: tf = pr.evaluate(freqs) # plot_tfs(freqs, [tf_ref, tf]) assert cnumeqrel(tf_ref, tf, 0.01)