def test_setitem(self): """ Tests __setitem__ method of AttribDict class. """ # 1 ad = AttribDict() ad['test'] = 'NEW' self.assertEqual(ad['test'], 'NEW') self.assertEqual(ad.test, 'NEW') self.assertEqual(ad.get('test'), 'NEW') self.assertEqual(ad.__getattr__('test'), 'NEW') self.assertEqual(ad.__getitem__('test'), 'NEW') self.assertEqual(ad.__dict__['test'], 'NEW') self.assertEqual(ad.__dict__.get('test'), 'NEW') self.assertIn('test', ad) self.assertIn('test', ad.__dict__) # 2 ad = AttribDict() ad.__setitem__('test', 'NEW') self.assertEqual(ad['test'], 'NEW') self.assertEqual(ad.test, 'NEW') self.assertEqual(ad.get('test'), 'NEW') self.assertEqual(ad.__getattr__('test'), 'NEW') self.assertEqual(ad.__getitem__('test'), 'NEW') self.assertEqual(ad.__dict__['test'], 'NEW') self.assertEqual(ad.__dict__.get('test'), 'NEW') self.assertIn('test', ad) self.assertIn('test', ad.__dict__)
def test_setitem(self): """ Tests __setitem__ method of AttribDict class. """ # 1 ad = AttribDict() ad["test"] = "NEW" self.assertEqual(ad["test"], "NEW") self.assertEqual(ad.test, "NEW") self.assertEqual(ad.get("test"), "NEW") self.assertEqual(ad.__getattr__("test"), "NEW") self.assertEqual(ad.__getitem__("test"), "NEW") self.assertEqual(ad.__dict__["test"], "NEW") self.assertEqual(ad.__dict__.get("test"), "NEW") self.assertTrue("test" in ad) self.assertTrue("test" in ad.__dict__) # 2 ad = AttribDict() ad.__setitem__("test", "NEW") self.assertEqual(ad["test"], "NEW") self.assertEqual(ad.test, "NEW") self.assertEqual(ad.get("test"), "NEW") self.assertEqual(ad.__getattr__("test"), "NEW") self.assertEqual(ad.__getitem__("test"), "NEW") self.assertEqual(ad.__dict__["test"], "NEW") self.assertEqual(ad.__dict__.get("test"), "NEW") self.assertTrue("test" in ad) self.assertTrue("test" in ad.__dict__)
def get_model_info(self, model_name): """ Get some information about a particular model. .. rubric:: Example >>> from obspy.clients.syngine import Client >>> c = Client() >>> db_info = c.get_model_info(model_name="ak135f_5s") >>> print(db_info.period) 5.125 :param model_name: The name of the model. Case insensitive. :type model_name: str :returns: A dictionary with more information about any model. :rtype: :class:`obspy.core.util.attribdict.AttribDict` """ model_name = model_name.strip().lower() r = self._download(self._get_url("info"), params={"model": model_name}) info = AttribDict(compatibility.get_json_from_response(r)) # Convert slip and sliprate into numpy arrays for easier handling. info.slip = np.array(info.slip, dtype=np.float64) info.sliprate = np.array(info.sliprate, dtype=np.float64) return info
def get_model_info(self, model_name): """ Get some information about a particular model. .. rubric:: Example >>> from obspy.clients.syngine import Client >>> c = Client() >>> db_info = c.get_model_info(model_name="ak135f_5s") >>> print(db_info.period) 5.125 :param model_name: The name of the model. Case insensitive. :type model_name: str :returns: A dictionary with more information about any model. :rtype: :class:`obspy.core.util.attribdict.AttribDict` """ model_name = model_name.strip().lower() r = self._download(self._get_url("info"), params={"model": model_name}) info = AttribDict(get_json(r)) # Convert slip and sliprate into numpy arrays for easier handling. info.slip = np.array(info.slip, dtype=np.float64) info.sliprate = np.array(info.sliprate, dtype=np.float64) return info
def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) for key in self.extra_keys: if not hasattr(self, 'extra'): self.extra = AttribDict() self.extra[key] = {'value': 0, 'namespace': ns}
def __init__(self,stime=UTCDateTime('2002-12-22'),etime=UTCDateTime('2003-12-31'),min_mag=5.5,max_mag=9.0, search_center=(38.54,28.63), min_search_radius=30,max_search_radius=95, min_dep=0.,max_dep=800.): mag_type='Mw' self.magnitude_range= AttribDict({'mag_type': mag_type,'min_mag':min_mag,'max_mag':max_mag}) self.time_range= AttribDict({'stime':stime,'etime':etime}) self.distance_range= AttribDict({'center_lat': search_center[0],'center_lon':search_center[1], 'min_radius_in_deg':min_search_radius, 'max_radius_in_deg':max_search_radius}) self.depth_range= AttribDict({'min_depth':min_dep, 'max_depth':max_dep})
def __init__(self,phase_selected='P',length_before_phase=50,length_after_phase=150,length_before_origin=100, length_after_origin=2000, model1d='ak135'): self.phase_window= AttribDict({'phase_selected': phase_selected, 'length_before_phase': length_before_phase, 'length_after_phase': length_after_phase}) self.data_window=AttribDict({'length_before_origin':length_before_origin, 'length_after_origin': length_after_origin}) self.ref_model1d= model1d if self.phase_window['length_before_phase'] < 0: sys.exit('set window length_before_phase > 0') if phase_selected != 'P' and phase_selected != 'S': sys.exit('This is not a phase for receiver functions: '+phase_selected)
def test_init(self): """ Tests initialization of AttribDict class. """ ad = AttribDict({'test': 'NEW'}) self.assertEqual(ad['test'], 'NEW') self.assertEqual(ad.test, 'NEW') self.assertEqual(ad.get('test'), 'NEW') self.assertEqual(ad.__getattr__('test'), 'NEW') self.assertEqual(ad.__getitem__('test'), 'NEW') self.assertEqual(ad.__dict__['test'], 'NEW') self.assertEqual(ad.__dict__.get('test'), 'NEW') self.assertIn('test', ad) self.assertIn('test', ad.__dict__)
def test_init(self): """ Tests initialization of AttribDict class. """ ad = AttribDict({'test': 'NEW'}) assert ad['test'] == 'NEW' assert ad.test == 'NEW' assert ad.get('test') == 'NEW' assert ad.__getattr__('test') == 'NEW' assert ad.__getitem__('test') == 'NEW' assert ad.__dict__['test'] == 'NEW' assert ad.__dict__.get('test') == 'NEW' assert 'test' in ad assert 'test' in ad.__dict__
def test_popitem(self): """ Tests pop method of AttribDict class. """ ad = AttribDict() ad["test2"] = "test" # removing via popitem temp = ad.popitem() self.assertEquals(temp, ("test2", "test")) self.assertFalse("test2" in ad) self.assertFalse("test2" in ad.__dict__) self.assertFalse(hasattr(ad, "test2")) # popitem for empty AttribDict raises a KeyError self.assertRaises(KeyError, ad.popitem)
def test_init(self): """ Tests initialization of AttribDict class. """ ad = AttribDict({"test": "NEW"}) self.assertEqual(ad["test"], "NEW") self.assertEqual(ad.test, "NEW") self.assertEqual(ad.get("test"), "NEW") self.assertEqual(ad.__getattr__("test"), "NEW") self.assertEqual(ad.__getitem__("test"), "NEW") self.assertEqual(ad.__dict__["test"], "NEW") self.assertEqual(ad.__dict__.get("test"), "NEW") self.assertTrue("test" in ad) self.assertTrue("test" in ad.__dict__)
def test_popitem(self): """ Tests pop method of AttribDict class. """ ad = AttribDict() ad['test2'] = 'test' # removing via popitem temp = ad.popitem() self.assertEqual(temp, ('test2', 'test')) self.assertFalse('test2' in ad) self.assertFalse('test2' in ad.__dict__) self.assertFalse(hasattr(ad, 'test2')) # popitem for empty AttribDict raises a KeyError self.assertRaises(KeyError, ad.popitem)
def test_popitem(self): """ Tests pop method of AttribDict class. """ ad = AttribDict() ad['test2'] = 'test' # removing via popitem temp = ad.popitem() assert temp == ('test2', 'test') assert not ('test2' in ad) assert not ('test2' in ad.__dict__) assert not hasattr(ad, 'test2') # popitem for empty AttribDict raises a KeyError with pytest.raises(KeyError): ad.popitem()
def test_get_paz(self): t = UTCDateTime('20090808') c = self.client # test the deprecated call too for one/two releases data = c.station.get_paz('BW.MANZ..EHZ', t) self.assertEqual(data['zeros'], [0j, 0j]) self.assertEqual(data['sensitivity'], 2516800000.0) self.assertEqual(len(data['poles']), 5) self.assertEqual(data['poles'][0], (-0.037004 + 0.037016j)) self.assertEqual(data['poles'][1], (-0.037004 - 0.037016j)) self.assertEqual(data['poles'][2], (-251.33 + 0j)) self.assertEqual(data['poles'][3], (-131.03999999999999 - 467.29000000000002j)) self.assertEqual(data['poles'][4], (-131.03999999999999 + 467.29000000000002j)) self.assertEqual(data['gain'], 60077000.0) # test some not allowed wildcards t = UTCDateTime('20120501') self.assertRaises(ValueError, c.station.get_paz, "BW.RLAS..BJ*", t) self.assertRaises(ValueError, c.station.get_paz, "BW.RLAS..*", t) self.assertRaises(ValueError, c.station.get_paz, "BW.RLAS..BJ?", t) self.assertRaises(ValueError, c.station.get_paz, "BW.R*..BJZ", t) # test with a XSEED file with a referenced PAZ response info (see #364) t = UTCDateTime("2012-05-10") result = AttribDict( {'gain': 1.0, 'poles': [0j], 'sensitivity': 6319100000000.0, 'digitizer_gain': 1000000.0, 'seismometer_gain': 6319100.0, 'zeros': [0j]}) data = c.station.get_paz("BW.RLAS..BJZ", t) self.assertEqual(data, result)
def sachdr2sitechan(header): """ Provide a sac header dictionary, get a sitechan table dictionary. """ sac_sitechan = [('kstnm', 'sta'), ('kcmpnm', 'chan'), ('cmpaz', 'hang'), ('cmpinc', 'vang'), ('stdp', 'edepth')] sitechandict = AttribDict() for hdr, col in sac_sitechan: val = header.get(hdr, None) sitechandict[col] = val if val != SACDEFAULT[hdr] else None try: sitechandict['edepth'] /= 1000.0 except (TypeError, KeyError): #edepth is None or missing pass sitechandict = _cast_float(sitechandict, ['hang', 'vang', 'edepth']) sitechandict = _clean_str(sitechandict, ['sta', 'chan']) sitechandict['sta'] = sitechandict['sta'].strip()[:6] return [sitechandict] or []
def export_sac(db, filename, pair, components, filterid, corr, ncorr=0, sac_format=None, maxlag=None, cc_sampling_rate=None): if sac_format is None: sac_format = get_config(db, "sac_format") if maxlag is None: maxlag = float(get_config(db, "maxlag")) if cc_sampling_rate is None: cc_sampling_rate = float(get_config(db, "cc_sampling_rate")) try: os.makedirs(os.path.split(filename)[0]) except: pass filename += ".SAC" mytrace = Trace(data=corr) mytrace.stats['station'] = pair mytrace.stats['sampling_rate'] = cc_sampling_rate mytrace.stats.sac = AttribDict() mytrace.stats.sac.b = -maxlag mytrace.stats.sac.depmin = np.min(corr) mytrace.stats.sac.depmax = np.max(corr) mytrace.stats.sac.depmen = np.mean(corr) mytrace.stats.sac.scale = 1 mytrace.stats.sac.npts = len(corr) st = Stream(traces=[mytrace, ]) st.write(filename, format='SAC') del st return
def insert_coordinates(stream, coordFile): """ Helper function to write coordinate details into an ObsPy Stream object headers from a text file for array analysis. :type stream: obspy stream object. :param stream: obspy stream object containing data for each array station. :type coordFile: string. :param coordFile: text file with headers trace.id, longitude, latitude and elevation. :return: Stream object, where each trace.stats contains an obspy.core.util.AttribDict with 'latitude', 'longitude' (in degrees) and 'elevation' (in km), or 'x', 'y', 'elevation' (in km) items/attributes. """ coordinates = open(coordFile, 'r') for line in coordinates: c = (line.strip('\n').split('\t')) for tr in stream: if tr.id == c[0]: tr.stats.coordinates = AttribDict({ 'latitude': c[2], 'elevation': c[3], 'longitude': c[1] }) return stream
def write_sac(stream,sac_info,inv,suffix,debug): for tr in stream: chan_info=inv.get_channel_metadata(tr.id) tr.stats.sac=AttribDict() tr.stats.sac.stla=chan_info['latitude'] tr.stats.sac.stlo=chan_info['longitude'] tr.stats.sac.stel=chan_info['elevation'] tr.stats.sac.stdp=chan_info['local_depth'] tr.stats.sac.cmpaz=chan_info['azimuth'] tr.stats.sac.cmpinc=chan_info['dip'] khole=float(tr.stats.starttime-sac_info['otime']) tr.stats.sac.b=khole tr.stats.sac.o=0.0 #tr.stats.sac.o=float(tr.stats.starttime-sac_info['otime']) tr.stats.sac.evlo=sac_info['evlo'] tr.stats.sac.evla=sac_info['evla'] tr.stats.sac.evdp=sac_info['evdp'] tr.stats.sac.evel=sac_info['evel'] tr.stats.sac.khole=tr.stats.location t=tr.stats.starttime.strftime('%Y%j%H%M%S') #outfile=sac_info['outdir']+"/"+tr.id+"."+t+suffix #outfile=sac_info['outdir']+"/"+sac_info['evid']+"."+tr.id+suffix #outfile=sac_info['outdir']+"/"+sac_info['evid']+"."+tr.id+suffix outfile=f"{sac_info['outdir']}/{sac_info['evid']}.{tr.id}{suffix}" outfile=Path(outfile) if debug > 0: print("*** writing files: ",outfile) tr.write(filename=outfile.as_posix(),format='SAC')
def update_stats(tr, stla, stlo, stel, cha): """ Function to include SAC metadata to :class:`~obspy.core.Trace` objects Parameters ---------- tr : :class:`~obspy.core.Trace` object Trace object to update stla : float Latitude of station stlo : float Longitude of station cha : str Channel for component Returns ------- tr : :class:`~obspy.core.Trace` object Updated trace object """ tr.stats.sac = AttribDict() tr.stats.sac.stla = stla tr.stats.sac.stlo = stlo tr.stats.sac.stel = stel tr.stats.channel = cha return tr
def info(self): try: return self.__cached_info except: pass self.__cached_info = AttribDict(self._get_info()) return self.__cached_info
def _convert_to_stream(receiver, components, data, dt_out, starttime, add_band_code=True): # Convert to an ObsPy Stream object. st = Stream() band_code = get_band_code(dt_out) instaseis_header = AttribDict(mu=data["mu"]) for comp in components: tr = Trace( data=data[comp], header={ "delta": dt_out, "starttime": starttime, "station": receiver.station, "network": receiver.network, "location": receiver.location, "channel": add_band_code * (band_code + "X") + comp, "instaseis": instaseis_header, }, ) st += tr return st
def read(cls, filename): """ Read station file_ and return instance of Stations. Format has to be like in Stations.example. """ with open(filename, 'r') as file_: filedata = file_.read() # Create an iterator over matches in Stations file_ st_matches = re.finditer(cls.regex, filedata, re.VERBOSE + re.MULTILINE) # Create a list of dictionaries of PDE data st_list = [i.groupdict() for i in st_matches] st_dic = {} for i in st_list: st_dic[i['name']] = AttribDict({ 'latitude': float(i['latitude']), 'longitude': float(i['longitude']), 'info': i['info'] }) try: st_dic[i['name']]['elevation'] = float(i['elevation']) except TypeError: pass log.info('Read station information of stations %s from file_ %s' % (' '.join(sorted(st_dic.keys())), filename)) return cls(st_dic)
def parseFreeForm(self, free_form_str, attrib_dict): """ Parse the free form section stored in free_form_str and save it in attrib_dict. """ # Separate the strings. strings = free_form_str.split(self.string_terminator) # This is not fully according to the SEG-2 format specification (or # rather the specification only speaks about on offset of 2 bytes # between strings and a string_terminator between two free form # strings. The file I have show the following separation between two # strings: 'random offset byte', 'string_terminator', # 'random offset byte' # Therefore every string has to be at least 3 bytes wide to be # acceptable after being split at the string terminator. strings = [_i for _i in strings if len(_i) >= 3] # Every string has the structure OPTION<SPACE>VALUE. Write to # stream.stats attribute. for string in strings: string = string.strip() string = string.split(' ') key = string[0].strip() value = ' '.join(string[1:]).strip() setattr(attrib_dict, key, value) # Parse the notes string again. if hasattr(attrib_dict, 'NOTE'): notes = attrib_dict.NOTE.split(self.line_terminator) attrib_dict.NOTE = AttribDict() for note in notes: note = note.strip() note = note.split(' ') key = note[0].strip() value = ' '.join(note[1:]).strip() setattr(attrib_dict.NOTE, key, value)
def setUp(self): self.event = read_events()[0] self.station = AttribDict({ 'latitude': 41.818 - 66.7, 'longitude': 79.689, 'elevation': 365.4 })
def on_station_view_itemClicked(self, item, column): t = item.type() def get_station(item): station = item.parent().text(0) if "." not in station: station = item.parent().parent().text(0) + "." + station return station if t == STATION_VIEW_ITEM_TYPES["NETWORK"]: pass elif t == STATION_VIEW_ITEM_TYPES["STATION"]: pass elif t == STATION_VIEW_ITEM_TYPES["STATIONXML"]: station = get_station(item) for v in self._open_files.values(): if ( station in v["contents"] and v["contents"][station]["has_stationxml"] ): try: v["ds"].waveforms[station].StationXML.plot_response( 0.001 ) except Exception: continue break else: msg_box = QtGui.QMessageBox() msg_box.setText("Could not find StationXML document.") msg_box.exec_() return elif t == STATION_VIEW_ITEM_TYPES["WAVEFORM"]: station = get_station(item) tag = item.text(0) self._state["current_waveform_tag"] = tag self._state["current_station_objects"] = {} st = obspy.Stream() for filename, info in self._open_files.items(): if station not in info["ds"].waveforms: continue _station = info["ds"].waveforms[station] self._state["current_station_objects"][filename] = _station if tag not in _station: continue # Store the color for each trace. _st = _station[tag] for tr in _st: tr.stats.__color = info["color"] tr.stats.sextant = AttribDict() tr.stats.sextant.filename = filename st += _st self.st = st self.update_waveform_plot() else: pass
def attach_paz(tr, paz_file): ''' Attach tr.stats.paz AttribDict to trace from GSE2 paz_file This is experimental code, nevertheless it might be useful. It makes several assumption on the gse2 paz format which are valid for the geophysical observatory in Fuerstenfeldbruck but might be wrong in other cases. Attaches to a trace a paz AttribDict containing poles zeros and gain. The A0_normalization_factor is set to 1.0. :param tr: An ObsPy trace object containing the calib and gse2 calper attributes :param paz_file: path to pazfile or file pointer >>> from obspy.core import Trace >>> import io >>> tr = Trace(header={'calib': .094856, 'gse2': {'calper': 1}}) >>> f = io.StringIO( ... """CAL1 RJOB LE-3D Z M24 PAZ 010824 0001 ... 2 ... -4.39823 4.48709 ... -4.39823 -4.48709 ... 3 ... 0.0 0.0 ... 0.0 0.0 ... 0.0 0.0 ... 0.4""") >>> attach_paz(tr, f) >>> print(round(tr.stats.paz.sensitivity / 10E3) * 10E3) 671140000.0 ''' poles, zeros, seismometer_gain = read_paz(paz_file) # remove zero at 0,0j to undo integration in GSE PAZ for i, zero in enumerate(list(zeros)): if zero == complex(0, 0j): zeros.pop(i) break else: raise Exception("Could not remove (0,0j) zero to undo GSE integration") # ftp://www.orfeus-eu.org/pub/software/conversion/GSE_UTI/gse2001.pdf # page 3 calibration = tr.stats.calib * 2 * np.pi / tr.stats.gse2.calper # fill up ObsPy Poles and Zeros AttribDict tr.stats.paz = AttribDict() # convert seismometer gain from [muVolt/nm/s] to [Volt/m/s] tr.stats.paz.seismometer_gain = seismometer_gain * 1e3 # convert digitizer gain [count/muVolt] to [count/Volt] tr.stats.paz.digitizer_gain = 1e6 / calibration tr.stats.paz.poles = poles tr.stats.paz.zeros = zeros tr.stats.paz.sensitivity = tr.stats.paz.digitizer_gain * \ tr.stats.paz.seismometer_gain # A0_normalization_factor convention for gse2 paz in Observatory in FFB tr.stats.paz.gain = 1.0
def test_in_place(self): self.sr = 10 # sampling rate st = AttribDict({'sampling_rate': self.sr}) testtro = Trace(np.ones(1000), header=st) testtr = testtro.copy() self.assertEqual(testtr, testtro) pu.cos_taper(testtr, 5, False) self.assertNotEqual(testtr, testtro)
def test_compare_with_dict(self): """ Checks if AttribDict is still comparable to a dict object. """ adict = {'test': 1} ad = AttribDict(adict) self.assertEqual(ad, adict) self.assertEqual(adict, ad)
def test_init_argument(self): """ Tests initialization of AttribDict with various arguments. """ # one dict works as expected ad = AttribDict({'test': 1}) assert ad.test == 1 # multiple dicts results into TypeError with pytest.raises(TypeError): AttribDict({}, {}) with pytest.raises(TypeError): AttribDict({}, {}, blah=1) # non-dicts results into TypeError with pytest.raises(TypeError): AttribDict(1) with pytest.raises(TypeError): AttribDict(object())
def test_compare_with_dict(self): """ Checks if AttribDict is still comparable to a dict object. """ adict = {'test': 1} ad = AttribDict(adict) assert ad == adict assert adict == ad
def sachdr2arrival(header, pickmap=None): """Similar to sachdr2assoc, but produces a list of up to 10 Arrival dictionaries. Same header->phase mapping applies, unless otherwise stated. """ # puts t[0-9] times into arrival.time if they're not null # puts corresponding kt[0-9] phase name into arrival.iphase # if a kt[0-9] phase name is null and its t[0-9] values isn't, # phase names are pulled from the pick2phase dictionary pick2phase = { 't0': 'P', 't1': 'Pn', 't2': 'Pg', 't3': 'S', 't4': 'Sn', 't5': 'Sg', 't6': 'Lg', 't7': 'LR', 't8': 'Rg', 't9': 'pP' } if pickmap: pick2phase.update(pickmap) # simple translations arrivaldict = AttribDict() if header.get('kstnm', None) not in (SACDEFAULT['kstnm'], None): arrivaldict['sta'] = header['kstnm'] if header.get('kcmpnm', None) not in (SACDEFAULT['kcmpnm'], None): arrivaldict['chan'] = header['kcmpnm'] # phases and arrival times t0 = get_sac_reftime(header) arrivals = [] for key in pick2phase: kkey = 'k' + key # if there's a value in t[0-9] if header.get('key', None) not in (SACDEFAULT[key], None): # TODO: This seems broken...t isn't defined yet itime = t + header[key] iarrival = { 'time': itime.timestamp, 'jdate': int(itime.strftime('%Y%j')) } # if the phase name kt[0-9] is null if header[kkey] == SACDEFAULT[kkey]: # take it from the pick2phase map iarrival['iphase'] = pick2phase[key] else: # take it directly iarrival['iphase'] = header[kkey] iarrival.update(arrivaldict) # TODO: wtf is iassoc?? arrivals.append(iassoc) return arrivals
def test_inside_geobounds(self): obj = AttribDict() obj.latitude = 48.8566 obj.longitude = 2.3522 ret = inside_geobounds(obj, minlatitude=48, maxlatitude=49, minlongitude=2, maxlongitude=3) self.assertTrue(ret) ret = inside_geobounds(obj, latitude=48, longitude=2, minradius=1, maxradius=2) self.assertFalse(ret) # Test for wrapping around longitude +/- 180° obj.latitude = -41.2865 obj.longitude = 174.7762 ret = inside_geobounds(obj, minlongitude=170, maxlongitude=-170) self.assertTrue(ret) obj.longitude = -175. ret = inside_geobounds(obj, minlongitude=170, maxlongitude=-170) self.assertTrue(ret) ret = inside_geobounds(obj, minlongitude=170, maxlongitude=190) self.assertTrue(ret)
def test_clear(self): """ Tests clear method of AttribDict class. """ ad = AttribDict() ad.test = 1 ad['test2'] = 'test' # removing via pop ad.clear() self.assertFalse('test' in ad) self.assertFalse('test2' in ad) self.assertFalse('test' in ad.__dict__) self.assertFalse('test2' in ad.__dict__) self.assertFalse(hasattr(ad, 'test')) self.assertFalse(hasattr(ad, 'test2')) # class attributes should be still present self.assertTrue(hasattr(ad, 'readonly')) self.assertTrue(hasattr(ad, 'defaults'))
def test_pop(self): """ Tests pop method of AttribDict class. """ ad = AttribDict() ad.test = 1 ad['test2'] = 'test' # removing via pop temp = ad.pop('test') self.assertEqual(temp, 1) self.assertFalse('test' in ad) self.assertIn('test2', ad) self.assertFalse('test' in ad.__dict__) self.assertIn('test2', ad.__dict__) self.assertFalse(hasattr(ad, 'test')) self.assertTrue(hasattr(ad, 'test2')) # using pop() for not existing element raises a KeyError self.assertRaises(KeyError, ad.pop, 'test')
def test_clear(self): """ Tests clear method of AttribDict class. """ ad = AttribDict() ad.test = 1 ad["test2"] = "test" # removing via pop ad.clear() self.assertFalse("test" in ad) self.assertFalse("test2" in ad) self.assertFalse("test" in ad.__dict__) self.assertFalse("test2" in ad.__dict__) self.assertFalse(hasattr(ad, "test")) self.assertFalse(hasattr(ad, "test2")) # class attributes should be still present self.assertTrue(hasattr(ad, "readonly")) self.assertTrue(hasattr(ad, "defaults"))
def test_pop(self): """ Tests pop method of AttribDict class. """ ad = AttribDict() ad.test = 1 ad["test2"] = "test" # removing via pop temp = ad.pop("test") self.assertEquals(temp, 1) self.assertFalse("test" in ad) self.assertTrue("test2" in ad) self.assertFalse("test" in ad.__dict__) self.assertTrue("test2" in ad.__dict__) self.assertFalse(hasattr(ad, "test")) self.assertTrue(hasattr(ad, "test2")) # using pop() for not existing element raises a KeyError self.assertRaises(KeyError, ad.pop, "test")
def _write_format_specific_header(self, format): st = self.stats format = format.lower() if format == 'q': format = 'sh' elif format == 'h5': return elif format == 'sac': # workaround for obspy issue 1285, fixed in obspy v1.0.1 # and obspy issue 1457, fixed in obspy v1.0.2 # https://github.com/obspy/obspy/pull/1285 # https://github.com/obspy/obspy/pull/1457 from obspy.io.sac.util import obspy_to_sac_header self.stats.pop('sac', None) self.stats.sac = obspy_to_sac_header(self.stats) # workaround for obspy issue 1507, introduced in obspy v1.0.2 # and fixed in obspy v1.0.3 self.stats.sac.lpspol = True self.stats.sac.lcalda = False try: header_map = zip(_HEADERS, _FORMATHEADERS[format]) except KeyError: if format != 'h5': msg = ("rf in-/output of file format '%s' is not supported" % format) warnings.warn(msg) return if format not in st: st[format] = AttribDict({}) if format == 'sh': comment = {} for head, head_format in header_map: if format == 'sh' and head_format == 'COMMENT': try: comment[head] = st[head] except KeyError: pass continue try: val = st[head] except KeyError: continue try: convert = _HEADER_CONVERSIONS[format][head][1] val = convert(st, head) except KeyError: pass st[format][head_format] = val if format == 'sh' and len(comment) > 0: # workaround for obspy issue #1456, fixed in obspy v1.0.2 # https://github.com/obspy/obspy/pull/1456 for k, v in comment.items(): try: comment[k] = np.asscalar(v) except AttributeError: pass st[format]['COMMENT'] = json.dumps(comment, separators=(',', ':'))
def test_delete(self): """ Tests delete method of AttribDict class. """ ad = AttribDict() ad.test = 1 ad["test2"] = "test" # deleting test using dictionary del ad["test"] self.assertFalse("test" in ad) self.assertTrue("test2" in ad) self.assertFalse("test" in ad.__dict__) self.assertTrue("test2" in ad.__dict__) self.assertFalse(hasattr(ad, "test")) self.assertTrue(hasattr(ad, "test2")) # deleting test2 using attribute del ad.test2 self.assertFalse("test2" in ad) self.assertFalse("test2" in ad.__dict__) self.assertFalse(hasattr(ad, "test2"))
def test_delete(self): """ Tests delete method of AttribDict class. """ ad = AttribDict() ad.test = 1 ad['test2'] = 'test' # deleting test using dictionary del ad['test'] self.assertFalse('test' in ad) self.assertIn('test2', ad) self.assertFalse('test' in ad.__dict__) self.assertIn('test2', ad.__dict__) self.assertFalse(hasattr(ad, 'test')) self.assertTrue(hasattr(ad, 'test2')) # deleting test2 using attribute del ad.test2 self.assertFalse('test2' in ad) self.assertFalse('test2' in ad.__dict__) self.assertFalse(hasattr(ad, 'test2'))
def test_defaults(self): """ Tests default of __getitem__/__getattr__ methods of AttribDict class. """ # 1 ad = AttribDict() ad['test'] = 'NEW' self.assertEqual(ad.__getitem__('test'), 'NEW') self.assertEqual(ad.__getitem__('xxx', 'blub'), 'blub') self.assertEqual(ad.__getitem__('test', 'blub'), 'NEW') self.assertEqual(ad.__getattr__('test'), 'NEW') self.assertEqual(ad.__getattr__('xxx', 'blub'), 'blub') self.assertEqual(ad.__getattr__('test', 'blub'), 'NEW') # should raise KeyError without default item self.assertRaises(KeyError, ad.__getitem__, 'xxx') self.assertRaises(AttributeError, ad.__getattr__, 'xxx') # 2 ad2 = AttribDict(defaults={'test2': 'NEW'}) self.assertEqual(ad2.__getitem__('test2'), 'NEW') self.assertRaises(KeyError, ad2.__getitem__, 'xxx')
def _read_SES3D(fh, headonly=False): """ Internal SES3D parsing routine. """ # Import here to avoid circular imports. from obspy.core import AttribDict, Trace, Stream # Read the header. component = fh.readline().split()[0].lower() npts = int(fh.readline().split()[-1]) delta = float(fh.readline().split()[-1]) # Skip receiver location line. fh.readline() rec_loc = fh.readline().split() rec_x, rec_y, rec_z = map(float, [rec_loc[1], rec_loc[3], rec_loc[5]]) # Skip the source location line. fh.readline() src_loc = fh.readline().split() src_x, src_y, src_z = map(float, [src_loc[1], src_loc[3], src_loc[5]]) # Read the data. if headonly is False: data = np.array(map(float, fh.readlines()), dtype="float32") else: data = np.array([]) ses3d = AttribDict() ses3d.receiver_latitude = rotations.colat2lat(rec_x) ses3d.receiver_longitude = rec_y ses3d.receiver_depth_in_m = rec_z ses3d.source_latitude = rotations.colat2lat(src_x) ses3d.source_longitude = src_y ses3d.source_depth_in_m = src_z header = { "delta": delta, "channel": COMPONENT_MAP[component], "ses3d": ses3d, "npts": npts } # Setup Obspy Stream/Trace structure. tr = Trace(data=data, header=header) # Small check. if headonly is False and npts != tr.stats.npts: msg = "The sample count specified in the header does not match " + \ "the actual data count." warnings.warn(msg) return Stream(traces=[tr])
def test_defaults(self): """ Tests default of __getitem__/__getattr__ methods of AttribDict class. """ # 1 ad = AttribDict() ad["test"] = "NEW" self.assertEqual(ad.__getitem__("test"), "NEW") self.assertEqual(ad.__getitem__("xxx", "blub"), "blub") self.assertEqual(ad.__getitem__("test", "blub"), "NEW") self.assertEqual(ad.__getattr__("test"), "NEW") self.assertEqual(ad.__getattr__("xxx", "blub"), "blub") self.assertEqual(ad.__getattr__("test", "blub"), "NEW") # should raise KeyError without default item self.assertRaises(KeyError, ad.__getitem__, "xxx") self.assertRaises(KeyError, ad.__getattr__, "xxx")
def test_pretty_str(self): """ Test _pretty_str method of AttribDict. """ # 1 ad = AttribDict({'test1': 1, 'test2': 2}) out = ' test1: 1\n test2: 2' self.assertEqual(ad._pretty_str(), out) # 2 ad = AttribDict({'test1': 1, 'test2': 2}) out = ' test2: 2\n test1: 1' self.assertEqual(ad._pretty_str(priorized_keys=['test2']), out) # 3 ad = AttribDict({'test1': 1, 'test2': 2}) out = ' test1: 1\n test2: 2' self.assertEqual(ad._pretty_str(min_label_length=6), out)
def rfstats(obj=None, event=None, station=None, phase='P', dist_range='default', tt_model='iasp91', pp_depth=None, pp_phase=None, model='iasp91'): """ Calculate ray specific values like slowness for given event and station. :param obj: `~obspy.core.trace.Stats` object with event and/or station attributes. Can be None if both event and station are given. It is possible to specify a stream object, too. Then, rfstats will be called for each Trace.stats object and traces outside dist_range will be discarded. :param event: ObsPy `~obspy.core.event.event.Event` object :param station: dictionary like object with items latitude, longitude and elevation :param phase: string with phase. Usually this will be 'P' or 'S' for P and S receiver functions, respectively. :type dist_range: tuple of length 2 :param dist_range: if epicentral of event is not in this intervall, None is returned by this function,\n if phase == 'P' defaults to (30, 90),\n if phase == 'S' defaults to (50, 85) :param tt_model: model for travel time calculation. (see the `obspy.taup` module, default: iasp91) :param pp_depth: Depth for piercing point calculation (in km, default: None -> No calculation) :param pp_phase: Phase for pp calculation (default: 'S' for P-receiver function and 'P' for S-receiver function) :param model: Path to model file for pp calculation (see `.SimpleModel`, default: iasp91) :return: `~obspy.core.trace.Stats` object with event and station attributes, distance, back_azimuth, inclination, onset and slowness or None if epicentral distance is not in the given interval. Stream instance if stream was specified instead of stats. """ if isinstance(obj, (Stream, RFStream)): stream = obj kwargs = {'event': event, 'station': station, 'phase': phase, 'dist_range': dist_range, 'tt_model': tt_model, 'pp_depth': pp_depth, 'pp_phase': pp_phase, 'model': model} traces = [] for tr in stream: if rfstats(tr.stats, **kwargs) is not None: traces.append(tr) stream.traces = traces return stream if dist_range == 'default' and phase.upper() in 'PS': dist_range = (30, 90) if phase.upper() == 'P' else (50, 85) stats = AttribDict({}) if obj is None else obj if event is not None and station is not None: stats.update(obj2stats(event=event, station=station)) dist, baz, _ = gps2dist_azimuth(stats.station_latitude, stats.station_longitude, stats.event_latitude, stats.event_longitude) dist = dist / 1000 / DEG2KM if dist_range and not dist_range[0] <= dist <= dist_range[1]: return tt_model = TauPyModel(model=tt_model) arrivals = tt_model.get_travel_times(stats.event_depth, dist, (phase,)) if len(arrivals) == 0: raise Exception('TauPy does not return phase %s at distance %s' % (phase, dist)) if len(arrivals) > 1: msg = ('TauPy returns more than one arrival for phase %s at ' 'distance -> take first arrival') warnings.warn(msg % (phase, dist)) arrival = arrivals[0] onset = stats.event_time + arrival.time inc = arrival.incident_angle slowness = arrival.ray_param_sec_degree stats.update({'distance': dist, 'back_azimuth': baz, 'inclination': inc, 'onset': onset, 'slowness': slowness, 'phase': phase}) if pp_depth is not None: model = load_model(model) if pp_phase is None: pp_phase = 'S' if phase.upper().endswith('P') else 'P' model.ppoint(stats, pp_depth, phase=pp_phase) return stats
NET = "BW" STATIONS = ("KW1", "KW2", "KW3") CHANNEL = "EHZ" # search from 2h before now to 1h before now T1 = UTCDateTime() - (60 * 60 * 2) T2 = T1 + (60 * 60 * 1) + 30 PAR = dict(LOW=10.0, # bandpass low corner HIGH=20.0, # bandpass high corner STA=0.5, # length of sta in seconds LTA=10, # length of lta in seconds ON=3.5, # trigger on threshold OFF=1, # trigger off threshold ALLOWANCE=1.2, # time in seconds to extend trigger-off time MAXLEN=10, # maximum trigger length in seconds MIN_STATIONS=3) # minimum of coinciding stations for alert PAR = AttribDict(PAR) SUMMARY = "/scratch/kw_trigger/kw_trigger.txt" PLOTDIR = "/scratch/kw_trigger/" MAILTO = ["megies"] client = Client("http://10.153.82.3:8080", timeout=60) st = Stream() num_stations = 0 exceptions = [] for station in STATIONS: try: # we request 60s more at start and end and cut them off later to avoid # a false trigger due to the tapering during instrument correction tmp = client.waveform.getWaveform(NET, station, "", CHANNEL, T1 - 180, T2 + 180, getPAZ=True,
def read_xh_0_98(filename, byte_order): """ Reads the given file to an ObsPy Stream object for the XH format version 0.98. :param filename: The file to read. :type filename: str """ st = obspy.Stream() with io.open(filename, "rb") as fh: while True: header = fh.read(1024) if len(header) < 1024: break header = np.frombuffer( header, dtype=header_0_98.get_header_dtype(byte_order)) header = _record_array_to_dict(header) # Convert both times. ref_origin_time = obspy.UTCDateTime( header["ot_year"], header["ot_month"], header["ot_day"], header["ot_hour"], header["ot_minute"], header["ot_second"]) starttime = obspy.UTCDateTime( header["tstart_year"], header["tstart_month"], header["tstart_day"], header["tstart_hour"], header["tstart_minute"], header["tstart_second"]) data = np.frombuffer(fh.read(header["ndata"] * 4), dtype=byte_order + "f4") tr = obspy.Trace(data=data) tr.stats.network = header["netw"] tr.stats.station = header["stnm"] tr.stats.channel = header_0_98.CHANNEL_MAP[header["chid"]] tr.stats.location = header_0_98.LOCATION_MAP[header["locc"]] # The name 'delta' for the sampling rate is a bit odd but it # appears to be interpreted correctly here. tr.stats.sampling_rate = header["delta"] tr.stats.starttime = starttime # The reason why this is here is a bit complicated and has to do # with ObsPy interna. It can be anywhere if one uses a stable ObsPy # version or does not jump between ObsPy versions which most users # don't do. from obspy.station.response import (InstrumentSensitivity, Response, PolesZerosResponseStage) # Build the instrument response. # XXX: The instrument response definition in XH is very basic and # so the definition is not complete and might not work with ObsPy. response = Response() sensitivity = InstrumentSensitivity( # Random choice, but not really used by anything so it should # be ok. frequency=1.0, # Assume the DS field is the total sensitivity. value=header["DS"], # This is true for most commonly used instruments. input_units="M/S", input_units_description="Velocity in Meters Per Second", output_units="COUNTS", output_units_description="Digital Counts") paz = PolesZerosResponseStage( stage_sequence_number=1, # We assume DS is somehow the total sensitivity. As we have # only one stage we also make it he gain of this stage. stage_gain_frequency=1.0, stage_gain=header["DS"], input_units="M/S", output_units="V", pz_transfer_function_type="LAPLACE (RADIANS/SECOND)", # Arbitrary frequency. Usually 1 for most instruments. Must be # correct to assure everything works as expected! normalization_frequency=1.0, normalization_factor=header["A0"], zeros=[CustomComplex(_i) for _i in header["zero"]], poles=[CustomComplex(_i) for _i in header["pole"]]) response.instrument_sensitivity = sensitivity response.response_stages.append(paz) # Attach to trace object. tr.stats.response = response # Assemble XH specific header. xh_header = AttribDict() xh_header.reference_time = ref_origin_time xh_header.source_latitude = header["elat"] xh_header.source_longitude = header["elon"] xh_header.source_depth_in_km = header["edep"] xh_header.source_body_wave_magnitude = header["Mb"] xh_header.source_surface_wave_magnitude = header["Ms"] xh_header.source_moment_magnitude = header["Mw"] xh_header.receiver_latitude = header["slat"] xh_header.receiver_longitude = header["slon"] xh_header.receiver_elevation_in_m = header["elev"] xh_header.sensor_azimuth = header["azim"] xh_header.sensor_inclination = header["incl"] xh_header.maximum_amplitude = header["maxamp"] xh_header.waveform_quality = header["qual"] # XXX: Should this be applied to the seismogram start time? xh_header.static_time_shift_in_sec = header["tshift"] xh_header.comment = header["rcomment"] xh_header.event_code = header["evtcd"] xh_header.cmt_code = header["cmtcd"] xh_header.phase_picks = header["tpcks"] xh_header.floats = header["flt"] xh_header.integers = header["intg"] xh_header.waveform_type = header["wavf"] tr.stats.xh = xh_header st.traces.append(tr) return st
def test_setdefault(self): """ Tests setdefault method of AttribDict class. """ ad = AttribDict() # 1 default = ad.setdefault('test', 'NEW') self.assertEqual(default, 'NEW') self.assertEqual(ad['test'], 'NEW') self.assertEqual(ad.test, 'NEW') self.assertEqual(ad.get('test'), 'NEW') self.assertEqual(ad.__getattr__('test'), 'NEW') self.assertEqual(ad.__getitem__('test'), 'NEW') self.assertEqual(ad.__dict__['test'], 'NEW') self.assertEqual(ad.__dict__.get('test'), 'NEW') self.assertIn('test', ad) self.assertIn('test', ad.__dict__) # 2 - existing key should not be overwritten default = ad.setdefault('test', 'SOMETHINGDIFFERENT') self.assertEqual(default, 'NEW') self.assertEqual(ad['test'], 'NEW') self.assertEqual(ad.test, 'NEW') self.assertEqual(ad.get('test'), 'NEW') self.assertEqual(ad.__getattr__('test'), 'NEW') self.assertEqual(ad.__getitem__('test'), 'NEW') self.assertEqual(ad.__dict__['test'], 'NEW') self.assertEqual(ad.__dict__.get('test'), 'NEW') self.assertIn('test', ad) self.assertIn('test', ad.__dict__) # 3 - default value isNone ad = AttribDict() default = ad.setdefault('test') self.assertEqual(default, None) self.assertEqual(ad['test'], None) self.assertEqual(ad.test, None) self.assertEqual(ad.get('test'), None) self.assertEqual(ad.__getattr__('test'), None) self.assertEqual(ad.__getitem__('test'), None) self.assertEqual(ad.__dict__['test'], None) self.assertEqual(ad.__dict__.get('test'), None) self.assertIn('test', ad) self.assertIn('test', ad.__dict__)
def test_setdefault(self): """ Tests setdefault method of AttribDict class. """ ad = AttribDict() # 1 default = ad.setdefault("test", "NEW") self.assertEqual(default, "NEW") self.assertEqual(ad["test"], "NEW") self.assertEqual(ad.test, "NEW") self.assertEqual(ad.get("test"), "NEW") self.assertEqual(ad.__getattr__("test"), "NEW") self.assertEqual(ad.__getitem__("test"), "NEW") self.assertEqual(ad.__dict__["test"], "NEW") self.assertEqual(ad.__dict__.get("test"), "NEW") self.assertTrue("test" in ad) self.assertTrue("test" in ad.__dict__) # 2 - existing key should not be overwritten default = ad.setdefault("test", "SOMETHINGDIFFERENT") self.assertEqual(default, "NEW") self.assertEqual(ad["test"], "NEW") self.assertEqual(ad.test, "NEW") self.assertEqual(ad.get("test"), "NEW") self.assertEqual(ad.__getattr__("test"), "NEW") self.assertEqual(ad.__getitem__("test"), "NEW") self.assertEqual(ad.__dict__["test"], "NEW") self.assertEqual(ad.__dict__.get("test"), "NEW") self.assertTrue("test" in ad) self.assertTrue("test" in ad.__dict__) # 3 - default value isNone ad = AttribDict() default = ad.setdefault("test") self.assertEqual(default, None) self.assertEqual(ad["test"], None) self.assertEqual(ad.test, None) self.assertEqual(ad.get("test"), None) self.assertEqual(ad.__getattr__("test"), None) self.assertEqual(ad.__getitem__("test"), None) self.assertEqual(ad.__dict__["test"], None) self.assertEqual(ad.__dict__.get("test"), None) self.assertTrue("test" in ad) self.assertTrue("test" in ad.__dict__)
def obspy_nsc2sacpz(net, sta, cha, input_unit=None): """ Convert an individual ObsPy inventory channel to a sacpz object """ if input_unit is not None: input_unit = rectify_unit(input_unit) pz = AttribDict( net = net.code, sta = sta.code, loc = cha.location_code, cha = cha.code ) pz.depth = cha.depth pz.start_date = str(cha.start_date)[:19] # if cha.end_date is not None: # if cha.end_date.timestamp > 1<<31: # cha.end_date = None if cha.end_date is not None: pz["end_date"] = str(cha.end_date)[:19] else: pz["end_date"] = "2599-12-31T23:59:59" # pz["created"] = str(inventory.created)[:19] pz.description = cha.description pz.dip = cha.dip pz.azimuth = cha.azimuth pz.fsamp = cha.sample_rate pz.lat = cha.latitude pz.lon = cha.longitude pz.ele = cha.elevation pz_stage = None for stage in cha.response.response_stages: if type(stage) == PolesZerosResponseStage: if not stage.pz_transfer_function_type.upper().startswith("LAPLACE"): continue if len(stage.poles)>0 or len(stage.zeros)>0: # if pz_stage is None: pz_stage = stage # else: # sys.stderr.write("%s: more than one PZ stage found - skipping this one\n" % nslc(pz)) if not pz_stage: return pz.a0 = pz_stage.normalization_factor if not cha.sensor.manufacturer and not cha.sensor.model: # sloppy XML produced by the IRIS fdsnws pz.sensor_type = cha.sensor.type else: # properly populated XML pz.sensor_type = "%s %s" % (cha.sensor.manufacturer, cha.sensor.model) pz.sni = cha.response.instrument_sensitivity.input_units.upper() pz.sno = cha.response.instrument_sensitivity.output_units.upper() if not pz.sno or pz.sno == "None": sys.stderr.write("Warning: %s: setting empty OutputUnits to 'COUNTS'\n" % nslc(pz)) pz.sno = "COUNTS" pz.sensitivity_value = cha.response.instrument_sensitivity.value pz.sensitivity_frequency = cha.response.instrument_sensitivity.frequency pz.sgn = pz_stage.stage_gain pz.sgf = pz_stage.stage_gain_frequency pz.inu = "%s / %s" % (pz_stage.output_units, pz_stage.input_units) if pz_stage.pz_transfer_function_type == "LAPLACE (RADIANS/SECOND)": factor = 1 elif pz_stage.pz_transfer_function_type == "LAPLACE (HERTZ)": factor = 6.283185307179586 else: raise TypeError, "%s: unknown transfer function type '%s'\n" % (nslc(pz),pz_stage.pz_transfer_function_type) if input_unit is not None: if input_unit != pz_stage.input_units: dnz = valid_units[pz["sni"]]-valid_units[input_unit] if dnz == 0: pass # nothing to do elif 0 < dnz <=2: # add one or two zeros pz_stage.zeros.extend(dnz*[0.]) else: raise NotImplementedError, "removal of zeros not implemented" pz.snj = input_unit else: pz.snj = pz.sni zeros = [] poles = [] sacpz = "ZEROS %d\n" % len(pz_stage.zeros) for zero in pz_stage.zeros: zero = complex(zero)*factor pz.a0 /= factor sacpz += " %+.6e %+.6e\n" % (zero.real, zero.imag) zeros.append(zero) sacpz += "POLES %d\n" % len(pz_stage.poles) for pole in pz_stage.poles: pole = complex(pole)*factor pz.a0 *= factor sacpz += " %+.6e %+.6e\n" % (pole.real, pole.imag) poles.append(pole) sacpz += "CONSTANT %.6e\n" % (pz["a0"]*pz["sensitivity_value"]) sacpz = pz_header_template % pz + sacpz pz.sacpz = sacpz pz.poles = poles pz.zeros = zeros return pz