def testEventExtras(self): tempdir = self.make_tempdir() eextra = model.Event(lat=12., lon=12.) data = [ (dict(i=1, f=1.0, n=None, b=True, s='abc', e=eextra), None), ({1: 'abc'}, guts.ValidationError), ({'e': model.Event(lat=1, lon=1)}, guts.ValidationError)] for d, exc in data: ev1 = model.Event( lat=10., lon=11., depth=4000., magnitude=5., extras=d) fn = pjoin(tempdir, 'test.events') with self.assertRaises(model.EventExtrasDumpError): model.dump_events([ev1], fn) if exc is None: ev1.validate() ev2 = guts.load(string=ev1.dump()) for k in d: assert isinstance(ev2.extras[k], type(d[k])) else: with self.assertRaises(exc): ev1.validate()
def to_pyrocko_events(catalog): ''' Convert ObsPy catalog object to list of Pyrocko event objects. :param catalog: :py:class:`obspy.Catalog <obspy.core.event.Catalog>` object :returns: list of :py:class:`pyrocko.model.Event` objects or ``None`` if catalog is ``None`` ''' obspy_catalog = catalog if obspy_catalog is None: return None from pyrocko import model events = [] for obspy_event in obspy_catalog: for origin in obspy_event.origins: events.append( model.Event(name=obspy_event.resource_id + origin.resource_id, time=origin.time.timestamp, lat=origin.latitude, lon=origin.longitude, depth=origin.depth, region=origin.region)) return events
def testProjectionsZOnly(self): km = 1000. ev = model.Event(lat=-10, lon=150., depth=0.0) for azi in num.linspace(0., 360., 37): lat, lon = orthodrome.ne_to_latlon(ev.lat, ev.lon, 10. * km * math.cos(azi), 10. * km * math.sin(azi)) sta = model.Station(lat=lat, lon=lon) sta.set_event_relative_data(ev) sta.set_channels_by_name('BHZ', 'BHN', 'BHE') traces = [ trace.Trace(channel='BHZ', ydata=num.array([1.0])), ] projected = [] for m, in_channels, out_channels in sta.guess_projections_to_enu(): projected.extend( trace.project(traces, m, in_channels, out_channels)) def g(traces, cha): for tr in traces: if tr.channel == cha: return tr z = g(projected, 'U') assert (near(z.ydata[0], 1.0, 0.001))
def _get_events_from_file(self): rdseed_event_file = os.path.join(self.tempdir, 'rdseed.events') if not os.path.isfile(rdseed_event_file): return [] f = open(rdseed_event_file, 'r') events = [] for line in f: toks = line.split(', ') if len(toks) == 9: datetime = toks[1].split('.')[0] lat = toks[2] lon = toks[3] format = '%Y/%m/%d %H:%M:%S' secs = calendar.timegm(time.strptime(datetime, format)) e = model.Event(lat=float(toks[2]), lon=float(toks[3]), depth=float(toks[4]) * 1000., magnitude=float(toks[8]), time=secs) events.append(e) else: raise Exception('Event description in unrecognized format') f.close() return events
def get_event(self, name): logger.debug('In Geofon.get_event("%s")' % name) if name not in self.events: url = 'http://geofon.gfz-potsdam.de/eqinfo/event.php?id=%s' % name logger.debug('Opening URL: %s' % url) page = urlopen(url).read() logger.debug('Received page (%i bytes)' % len(page)) try: d = self._parse_event_page(page) ev = model.Event(lat=d['epicenter'][0], lon=d['epicenter'][1], time=d['time'], name=name, depth=d['depth'], magnitude=d['magnitude'], region=d['region'], catalog='GEOFON') if d['have_moment_tensor']: ev.moment_tensor = True self.events[name] = ev except NotFound: raise NotFound(url) # reraise with url ev = self.events[name] if ev.moment_tensor is True: ev.moment_tensor = self.get_mt(ev) return ev
def gen_random_tectonic_event( scenario_id, magmin=-0.5, magmax=3., depmin=5, depmax=10, latmin=49.09586, latmax=49.25, lonmin=8.0578, lonmax=8.20578, timemin=util.str_to_time('2007-01-01 16:10:00.000'), timemax=util.str_to_time('2020-01-01 16:10:00.000')): name = "scenario" + str(scenario_id) depth = rand(depmin, depmax) * km magnitude = rand(magmin, magmax) lat = randlat(latmin, latmax) lon = rand(lonmin, lonmax) time = rand(timemin, timemax) event = model.Event(name=name, lat=lat, lon=lon, magnitude=magnitude, depth=depth, time=time) return event
def test_writeread(self): nslc_ids = [('', 'STA', '', '*')] event = model.Event(lat=111., lon=111., depth=111., time=111.) _marker = marker.Marker(nslc_ids=nslc_ids, tmin=1., tmax=10.) emarker = marker.EventMarker(event=event) pmarker = marker.PhaseMarker(nslc_ids=nslc_ids, tmin=1., tmax=10.) pmarker.set_event(event) emarker.set_alerted(True) markers = [_marker, emarker, pmarker] fn = tempfile.mkstemp()[1] marker.save_markers(markers, fn) in_markers = marker.load_markers(fn) in__marker, in_emarker, in_pmarker = in_markers for i, m in enumerate(in_markers): if not isinstance(m, marker.EventMarker): assert (m.tmax - m.tmin) == 9. else: assert not m.is_alerted() marker.associate_phases_to_events([in_pmarker, in_emarker]) in_event = in_pmarker.get_event() assert all((in_event.lat == 111., in_event.lon == 111., in_event.depth == 111., in_event.time == 111.)) assert in_pmarker.get_event_hash() == in_event.get_hash() assert in_pmarker.get_event_time() == 111.
def complete(data): try: t = calendar.timegm((data.year, data.month, data.day, data.hour, data.minute, data.seconds)) m = num.array([ data.mrr, data.mrt, data.mrp, data.mrt, data.mtt, data.mtp, data.mrp, data.mtp, data.mpp ], dtype=num.float).reshape(3, 3) m *= 10.0**(data.exponent - 7) mt = MomentTensor(m_up_south_east=m) ev = model.Event(lat=data.lat, lon=data.lon, time=t, name=data.eventname, depth=data.depth_km * 1000., magnitude=float(mt.moment_magnitude()), duration=data.half_duration * 2., region=data.region.rstrip(), catalog=data.catalog) ev.moment_tensor = mt events.append(ev) except AttributeError: pass
def testIOEvent(self): tempdir = tempfile.mkdtemp(prefix='pyrocko-model') fn = pjoin(tempdir, 'event.txt') e1 = model.Event(10., 20., 1234567890., 'bubu', depth=10., region='taka tuka land', moment_tensor=moment_tensor.MomentTensor(strike=45., dip=90), magnitude=5.1, magnitude_type='Mw', tags=['cluster:-1', 'custom_magnitude:2.5']) guts.dump(e1, filename=fn) e2 = guts.load(filename=fn) assert e1.region == e2.region assert e1.name == e2.name assert e1.lat == e2.lat assert e1.lon == e2.lon assert e1.time == e2.time assert e1.region == e2.region assert e1.magnitude == e2.magnitude assert e1.magnitude_type == e2.magnitude_type assert e1.get_hash() == e2.get_hash() assert e1.tags == e2.tags fn2 = pjoin(tempdir, 'events.txt') guts.dump_all([e1, e2], filename=fn2) with self.assertRaises(model.OneEventRequired): model.load_one_event(fn2) shutil.rmtree(tempdir)
def testIOEvent(self): tempdir = tempfile.mkdtemp() fn = pjoin(tempdir, 'event.txt') e1 = model.Event(10., 20., 1234567890., 'bubu', region='taka tuka land') e1.dump(fn) e2 = model.Event(load=fn) assert e1.region == e2.region assert e1.name == e2.name assert e1.lat == e2.lat assert e1.lon == e2.lon assert e1.time == e2.time assert e1.region == e2.region shutil.rmtree(tempdir)
def pyrocko_event(self): '''Considers only the *preferred* origin and magnitude''' lat, lon, depth = self.preferred_origin.position_values() otime = self.preferred_origin.time.value return model.Event( name=self.public_id, lat=lat, lon=lon, time=otime, depth=depth, magnitude=self.preferred_magnitude.mag.value)
def get_event(self): lat, lon = geo.point_coords(self.location, system='latlon') event = model.Event(lat=lat, lon=lon, depth=self.location.z, time=self.time, name='%s (%g)' % (self.id, self.ifm)) return event
def _parse_events_page(self, page): logger.debug('In Geofon._parse_events_page(...)') # fix broken tags page = re.sub(br' ([^;])', b' \\1', page) page = re.sub(br'border=0', b'border="0"', page) page = re.sub(br'<(link|meta).*?>', b'', page, flags=re.DOTALL) page = re.sub(br'</html>.*', b'</html>', page, flags=re.DOTALL) doc = self.parse_xml(page) events = [] for tr in doc.getElementsByTagName("tr"): logger.debug('Found <tr> tag') tds = tr.getElementsByTagName("td") if len(tds) != 8: logger.debug('Does not contain 8 <td> tags.') continue elinks = tds[0].getElementsByTagName("a") if len(elinks) != 1 or not elinks[0].getAttribute('href'): logger.debug('Could not find link to event details page.') continue link = elinks[0].getAttribute('href').encode('ascii') m = re.search(br'\?id=(gfz[0-9]+[a-z]+)$', link) if not m: logger.debug('Could not find event id.') continue eid = m.group(1) vals = [getTextR(td) for td in tds] tevent = calendar.timegm( time.strptime(vals[0][:19], '%Y-%m-%d %H:%M:%S')) mag = float(vals[1]) epicenter = parse_location((vals[2]+' '+vals[3])) depth = float(vals[4])*1000. region = vals[7] ev = model.Event( lat=epicenter[0], lon=epicenter[1], time=tevent, name=str(eid.decode('ascii')), depth=depth, magnitude=mag, region=str(region), catalog='GEOFON') if vals[6] == 'MT': ev.moment_tensor = True logger.debug('Adding event from GEOFON catalog: %s' % ev) events.append(ev) return events
def test_click_non_dialogs(self): '''Click through many menu option combinations that do not require further interaction. Activate options in pairs of two. ''' pv = self.pile_viewer non_dialog_actions = [ 'Indivdual Scale', 'Common Scale', 'Common Scale per Station', 'Common Scale per Component', 'Scaling based on Minimum and Maximum', 'Scaling based on Mean +- 2 x Std. Deviation', 'Scaling based on Mean +- 4 x Std. Deviation', 'Sort by Names', 'Sort by Distance', 'Sort by Azimuth', 'Sort by Distance in 12 Azimuthal Blocks', 'Sort by Backazimuth', '3D distances', 'Subsort by Network, Station, Location, Channel', 'Subsort by Network, Station, Channel, Location', 'Subsort by Station, Network, Channel, Location', 'Subsort by Location, Network, Station, Channel', 'Subsort by Channel, Network, Station, Location', 'Subsort by Network, Station, Channel (Grouped by Location)', 'Subsort by Station, Network, Channel (Grouped by Location)', ] options = [ 'Antialiasing', 'Liberal Fetch Optimization', 'Clip Traces', 'Show Boxes', 'Color Traces', 'Show Scale Ranges', 'Show Scale Axes', 'Show Zero Lines', 'Fix Scale Ranges', 'Allow Downsampling', 'Allow Degapping', 'FFT Filtering', 'Bandpass is Lowpass + Highpass', 'Watch Files' ] # create an event marker and activate it self.add_one_pick() QTest.keyPress(self.pile_viewer, 'A') QTest.keyPress(self.pile_viewer, 'e') event = model.Event() markers = pv.viewer.get_markers() self.assertEqual(len(markers), 1) markers[0]._event = event right_click_menu = self.pile_viewer.viewer.menu for action_name in non_dialog_actions: for oa in options: for ob in options: self.click_menu_item(right_click_menu, action_name) self.click_menu_item(right_click_menu, oa) self.click_menu_item(right_click_menu, ob) options.remove(oa)
def testIOEventOld(self): tempdir = tempfile.mkdtemp(prefix='pyrocko-model') fn = pjoin(tempdir, 'event.txt') e1 = model.Event(10., 20., 1234567890., 'bubu', region='taka tuka land', magnitude=5.1, magnitude_type='Mw') e1.olddump(fn) e2 = model.Event(load=fn) assert e1.region == e2.region assert e1.name == e2.name assert e1.lat == e2.lat assert e1.lon == e2.lon assert e1.time == e2.time assert e1.region == e2.region assert e1.magnitude == e2.magnitude assert e1.magnitude_type == e2.magnitude_type shutil.rmtree(tempdir)
def testIOEventOld(self): tempdir = self.make_tempdir() fn = pjoin(tempdir, 'event.txt') e1 = model.Event( 10., 20., time=1234567890., name='bubu', region='taka tuka land', magnitude=5.1, magnitude_type='Mw', tags=['cluster:-1', 'custom_magnitude:2.5']) e1.olddump(fn) e2 = model.Event(load=fn) assert e1.region == e2.region assert e1.name == e2.name assert e1.lat == e2.lat assert e1.lon == e2.lon assert e1.time == e2.time assert e1.region == e2.region assert e1.magnitude == e2.magnitude assert e1.magnitude_type == e2.magnitude_type assert e1.tags == e2.tags
def retrieve(self): url = 'http://home.uni-leipzig.de/collm/auswertung_temp.html' f = ws_request(url) text = f.read() sec = 0 events = {} for line in text.splitlines(): line = line.strip() if line == '<PRE>': sec += 1 continue if sec == 1 and not line: sec += 1 continue if sec == 1: t = line.split(' ', 1) name = t[0] sdate = t[1][0:11] stime = t[1][12:22] sloc = t[1][23:36] sdepth = t[1][37:42] smag = t[1][51:55] region = t[1][60:] sday, smon, syear = sdate.split('-') smon = { 'Jan': '01', 'Feb': '02', 'Mar': '03', 'Apr': '04', 'Mai': '05', 'Jun': '06', 'Jul': '07', 'Aug': '08', 'Sep': '09', 'Okt': '10', 'Nov': '11', 'Dez': '12'}[smon] time = util.str_to_time( '%s-%s-%s %s' % (syear, smon, sday, stime)) slat, slon = sloc.split(';') ev = model.Event( time=time, lat=float(slat), lon=float(slon), depth=float(sdepth) * 1000., magnitude=float(smag), magnitude_type='Ml', name=name, region=region, catalog='Saxony') events[name] = ev self._events = events
def __call__(self): # Change strike within Snuffler with the added scroll bar. #strike = 0 #dip = 90 #rake = 0 #moment = 7.00e20 depth = 3000 rise_time = 1 scale = 1E21 mxx = 1. * scale mxy = 1. * scale myz = 1. * scale mxz = 1. * scale #explosion source source_params = dict( zip([ 'mxx', 'myy', 'mzz', 'mxy', 'mxz', 'myz', 'depth', 'rise-time' ], [mxx, mxx, mxx, mxy, mxz, myz, depth, rise_time])) s = source.Source(sourcetype='moment_tensor', sourceparams=source_params) #strike dip rake #s = source.Source('bilateral', #sourceparams_str ='0 0 0 %g %g %g %g %g 0 0 0 0 1 %g' % (depth, moment, strike, dip, rake, rise_time)) self.seis.set_source(s) recs = self.seis.get_receivers_snapshot(which_seismograms=('syn', ), which_spectra=(), which_processing='tapered') trs = [] for rec in recs: for t in rec.get_traces(): t.shift(rise_time * 0.5) trs.append(t) io.save( trs, 'mseeds/%(network)s_%(station)s_%(location)s_%(channel)s.mseed') # Create event: ref_event = model.Event(lat=self.olat, lon=self.olon, depth=depth, time=self.otime, name='Reference Event') synthetic_event_marker = gui_util.EventMarker(event=ref_event) gui_util.Marker.save_markers([synthetic_event_marker], 'reference_marker.txt')
def from_attributes(vals): nslc_ids, tmin, tmax, kind = Marker.parse_attributes( vals[1:] + ['None']) lat, lon, depth, magnitude = [ str_to_float_or_none(x) for x in vals[5:9]] catalog, name, region = [ str_to_str_or_none(x) for x in vals[9:]] e = model.Event( lat, lon, tmin, name, depth, magnitude, region, catalog=catalog) marker = EventMarker( e, kind, event_hash=str_to_str_or_none(vals[4])) return marker
def convert_to_event_marker(self, lat=0., lon=0.): if isinstance(self, EventMarker): return if isinstance(self, PhaseMarker): self.convert_to_marker() self.__class__ = EventMarker self._event = model.Event(lat, lon, self.tmin, name='Event') self._event_hash = self._event.get_hash() self._active = False self.tmax = self.tmin self.nslc_ids = []
def retrieve(self, **kwargs): import yaml kwargs['format'] = 'yaml' url = 'http://kinherd.org/quakes/%s' % self.catalog f = ws_request(url, **kwargs) names = [] for eq in yaml.safe_load_all(f): tref_eq = calendar.timegm(eq['reference_time'].timetuple()) pset = eq['parametersets'][0] tref = calendar.timegm(pset['reference_time'].timetuple()) tpost = calendar.timegm(pset['posted_time'].timetuple()) params = pset['parameters'] mt = MomentTensor( strike=params['strike'], dip=params['dip'], rake=params['slip_rake'], scalar_moment=params['moment']) event = model.Event( time = tref + params['time'], lat=params['latitude'], lon=params['longitude'], depth=params['depth'], magnitude=params['magnitude'], duration=params['rise_time'], name = eq['name'], catalog=self.catalog, moment_tensor = mt) event.ext_confidence_intervals = {} trans = { 'latitude': 'lat', 'longitude': 'lon' } for par in 'latitude longitude depth magnitude'.split(): event.ext_confidence_intervals[trans.get(par, par)] = \ (params[par+'_ci_low'], params[par+'_ci_high']) event.ext_posted_time = tpost name = eq['name'] self.events[name] = event names.append(name) return names
def call(self): self.mycleanup() self.detections = [] i_detection = 0 zpeak = 0. lat = 0. lon = 0. for traces in self.chopper_selected_traces( mode='all', trace_selector=lambda x: x.station == "SMAX", fallback=True): tr_smax = [tr for tr in traces if tr.location == ''] tr_i = [tr for tr in traces if tr.location == 'i'] if not tr_i: tr_i = [None] * len(tr_smax) for tr_i, tr_stackmax in zip(tr_i, tr_smax): tpeaks, apeaks = tr_stackmax.peaks(self.detector_threshold, self.tsearch) if self.level_trace: ltrace = tr_stackmax.copy(data=False) ltrace.set_ydata( num.ones(tr_stackmax.data_len()) * self.detector_threshold) self.add_trace(ltrace) for t, a in zip(tpeaks, apeaks): if tr_i: lat, lon, xpeak, ypeak, zpeak = \ self.grid.index_to_location(tr_i(t)[1]) lat, lon = orthodrome.ne_to_latlon( lat, lon, xpeak, ypeak) e = model.Event(time=t, name="%s-%s" % (i_detection, a), lat=lat, lon=lon, depth=zpeak) self.detections.append( gui_util.EventMarker(event=e, kind=int(self.marker_kind[0]))) i_detection += 1 self.add_markers(self.detections) if self.hold_figure: self.show_comparison()
def test_drawing_optimization(self): n = 505 lats = num.random.uniform(-90., 90., n) lons = num.random.uniform(-180., 180., n) events = [] for i, (lat, lon) in enumerate(zip(lats, lons)): events.append( model.Event(time=i, lat=lat, lon=lon, name='XXXX%s' % i)) self.viewer.add_event(events[-1]) assert len(self.viewer.markers) == 1 self.viewer.add_events(events) assert len(self.viewer.markers) == n + 1 self.viewer.set_time_range(-500., 5000) self.viewer.set_time_range(0., None) self.viewer.set_time_range(None, 0.)
def detect(self, tinc=None, detector_threshold=1.8): '''Detect events Summarizes the energy of layers in your network that have their *is_detector* flag set to *True*. :param tinc: time increment to step through the dataset. :param detector_threshold: triggers a detection when summed energy exceeds this value. ''' tpeaksearch = 5. self.tinc_detect = tinc or 1.0 fn_detector_trace = 'detector.mseed' fn_detections = 'detections.pf' with self.sess as default: self.est = tf.estimator.Estimator( model_fn=self.model, model_dir=self.get_outdir()) detector_level = [] for ip, p in enumerate(self.est.predict( input_fn=self.generate_detect_dataset, yield_single_examples=True)): detector_level.append(p['level']) print(self.config.prediction_data_generator.tstart_data, self.tinc_detect) tr = trace.Trace( tmin=self.config.prediction_data_generator.tstart_data, ydata=num.array(detector_level), deltat=self.tinc_detect) tpeaks, apeaks = tr.peaks(detector_threshold, tpeaksearch) logger.info('Fount %i detections' % len(tpeaks)) markers = [] for (tpeak, apeak) in zip(tpeaks, apeaks): markers.append( EventMarker(pmodel.Event(time=tpeak, name=str(apeak)))) logger.info('Saving detections: %s' % fn_detections) Marker.save_markers(markers, fn_detections) logger.info('Saving detector level: %s' % fn_detector_trace) io.save([tr], fn_detector_trace)
def _parse_events_page(self, page): import json doc = json.loads(page) events = [] for feat in doc['features']: props = feat['properties'] geo = feat['geometry'] lon, lat, depth = [float(x) for x in geo['coordinates']] t = util.str_to_time('1970-01-01 00:00:00') + \ props['time'] * 0.001 if props['mag'] is not None: mag = float(props['mag']) else: mag = None if props['place'] is not None: region = props['place'].encode('ascii', 'replace') else: region = None catalog = str(props['net'].upper()) name = 'USGS-%s-' % catalog + util.time_to_str( t, format='%Y-%m-%d_%H-%M-%S.3FRAC') ev = model.Event( lat=lat, lon=lon, time=t, name=name, depth=depth*1000., magnitude=mag, region=region, catalog=catalog) events.append(ev) return events
def _json_feature_to_event(self, feature): name = feature['id'] lon, lat, depth = feature['geometry']['coordinates'] depth *= 1000. properties = feature['properties'] magnitude = properties['mag'] magnitude_type = properties['magType'] region = properties['place'] tevent = util.str_to_time(properties['time'].replace('T', ' ')) if 'hasMT' in properties and properties['hasMT'] == 'yes' \ and self._get_moment_tensors: moment_tensor = True # flag for caller to query MT else: moment_tensor = None status = properties['status'][:1] tags = [] if status in 'AMC': tags.append('geofon_status:%s' % status) category = properties.get('evtype', '') if re.match(r'^[a-zA-Z0-9]+$', category): tags.append('geofon_category:%s' % category) ev = model.Event( lat=float(lat), lon=float(lon), time=tevent, name=name, depth=float(depth), magnitude=float(magnitude), magnitude_type=str(magnitude_type), region=str(region), moment_tensor=moment_tensor, catalog='GEOFON', tags=tags) return ev
def detections_to_event_markers(fn_detections): markers = [] if fn_detections: with open(fn_detections, 'r') as f: for line in f.readlines(): data = line.split() i, t_d, t_t, apeak, latpeak, lonpeak, xpeak, ypeak, zpeak = \ data lat, lon = orthodrome.ne_to_latlon(float(latpeak), float(lonpeak), float(xpeak), float(ypeak)) t = util.str_to_time("%s %s" % (t_d, t_t)) label = "%s-%s" % (apeak, i) e = model.Event(lat=lat, lon=lon, depth=float(zpeak), name=label, time=t) m = gui_util.EventMarker(e, kind=int(kind_default[0])) markers.append(m) return markers
def testIOEvent(self): tempdir = tempfile.mkdtemp() fn = pjoin(tempdir, 'event.txt') e1 = model.Event(10., 20., 1234567890., 'bubu', region='taka tuka land', moment_tensor=moment_tensor.MomentTensor(strike=45., dip=90), magnitude=5.1, magnitude_type='Mw') guts.dump(e1, filename=fn) e2 = guts.load(filename=fn) assert e1.region == e2.region assert e1.name == e2.name assert e1.lat == e2.lat assert e1.lon == e2.lon assert e1.time == e2.time assert e1.region == e2.region assert e1.magnitude == e2.magnitude assert e1.magnitude_type == e2.magnitude_type shutil.rmtree(tempdir)
def testProjections(self): km = 1000. ev = model.Event(lat=-10, lon=150., depth=0.0) for azi in num.linspace(0., 360., 37): lat, lon = orthodrome.ne_to_latlon(ev.lat, ev.lon, 10. * km * math.cos(azi), 10. * km * math.sin(azi)) sta = model.Station(lat=lat, lon=lon) sta.set_event_relative_data(ev) sta.set_channels_by_name('BHZ', 'BHE', 'BHN') r = 1. t = 1. traces = [ trace.Trace(channel='BHE', ydata=num.array( [math.sin(azi) * r + math.cos(azi) * t])), trace.Trace(channel='BHN', ydata=num.array( [math.cos(azi) * r - math.sin(azi) * t])), ] for m, in_channels, out_channels in sta.guess_projections_to_rtu(): projected = trace.project(traces, m, in_channels, out_channels) def g(traces, cha): for tr in traces: if tr.channel == cha: return tr r = g(projected, 'R') t = g(projected, 'T') assert (near(r.ydata[0], 1.0, 0.001)) assert (near(t.ydata[0], 1.0, 0.001))
def call(self): '''Main work routine of the snuffling.''' self.cleanup() olat = 0. olon = 0. f = (0., 0., 0.) deltat = 1./self.fsampling if self.stf == 'Gauss': stf = Gauss(self.tau) elif self.stf == 'Impulse': stf = Impulse() viewer = self.get_viewer() event = viewer.get_active_event() if event: event, stations = self.get_active_event_and_stations(missing='warn') else: event = model.Event(lat=olat, lon=olon) stations = [] if not stations: s = model.Station(lat=olat, lon=olon, station='AFG') stations = [s] viewer.add_stations(stations) source = gf.DCSource( time=event.time+self.time, lat=event.lat, lon=event.lon, north_shift=self.north_km*km, east_shift=self.east_km*km, depth=self.depth_km*km, magnitude=moment_tensor.moment_to_magnitude(self.moment), strike=self.strike, dip=self.dip, rake=self.rake) source.regularize() m = EventMarker(source.pyrocko_event()) self.add_marker(m) targets = [] mt = moment_tensor.MomentTensor( strike=source.strike, dip=source.dip, rake=source.rake, moment=self.moment) traces = [] for station in stations: xyz = (self.north_km*km, self.east_km*km, self.depth_km*km) r = num.sqrt(xyz[0]**2 + xyz[1]**2 + xyz[2]**2) ns = math.ceil(r/self.vs/1.6)*2 outx = num.zeros(int(ns)) outy = num.zeros(int(ns)) outz = num.zeros(int(ns)) nsl = station.nsl() quantity = self.quantity.split()[0].lower() add_seismogram( self.vp*km, self.vs*km, self.density, self.qp, self.qs, xyz, f, mt.m6(), quantity, deltat, 0., outx, outy, outz, stf=stf, want_near=self.want_near, want_intermediate=self.want_intermediate, want_far=self.want_far) for channel, out in zip('NEZ', [outx, outy, outz]): tr = trace.Trace('', station.station, '', channel, deltat=deltat, tmin=source.time, ydata=out) traces.append(tr) self.add_traces(traces)