def call(self): '''Main work routine of the snuffling.''' self.cleanup() view = self.get_viewer() pile = self.get_pile() tmin, tmax = view.get_time_range() if self.useevent: markers = view.selected_markers() if len(markers) != 1: self.fail('Exactly one marker must be selected.') marker = markers[0] if not isinstance(marker, EventMarker): self.fail('An event marker must be selected.') ev = marker.get_event() lat, lon = ev.lat, ev.lon else: lat, lon = self.lat, self.lon print lat, lon, self.minradius, self.maxradius, util.time_to_str(tmin), util.time_to_str(tmax) data = iris_ws.ws_station(lat=lat, lon=lon, minradius=self.minradius, maxradius=self.maxradius, timewindow=(tmin,tmax), level='chan' ) stations = iris_ws.grok_station_xml(data, tmin, tmax) networks = set( [ s.network for s in stations ] ) dir = self.tempdir() fns = [] for net in networks: nstations = [ s for s in stations if s.network == net ] selection = sorted(iris_ws.data_selection( nstations, tmin, tmax )) if selection: for x in selection: print x try: d = iris_ws.ws_bulkdataselect(selection) fn = pjoin(dir,'data-%s.mseed' % net) f = open(fn, 'w') f.write(d) f.close() fns.append(fn) except urllib2.HTTPError: pass newstations = [] for sta in stations: if not view.has_station(sta): print sta newstations.append(sta) view.add_stations(newstations) for fn in fns: traces = list(io.load(fn)) self.add_traces(traces)
def __str__(self): s = self.reason if self.codes: s += ' (%s)' % '.'.join(self.codes) if self.time_range: s += ' (%s - %s)' % (util.time_to_str( self.time_range[0]), util.time_to_str(self.time_range[1])) return s
def __str__(self): s = 'SubPile\n' s += 'number of files: %i\n' % len(self.files) s += 'timerange: %s - %s\n' % (util.time_to_str(self.tmin), util.time_to_str(self.tmax)) s += 'networks: %s\n' % ', '.join(sl(self.networks.keys())) s += 'stations: %s\n' % ', '.join(sl(self.stations.keys())) s += 'locations: %s\n' % ', '.join(sl(self.locations.keys())) s += 'channels: %s\n' % ', '.join(sl(self.channels.keys())) s += 'deltats: %s\n' % ', '.join(sl(self.deltats.keys())) return s
def __str__(self): s = 'TracesFile\n' s += 'abspath: %s\n' % self.abspath s += 'file mtime: %s\n' % util.time_to_str(self.mtime) s += 'number of traces: %i\n' % len(self.traces) s += 'timerange: %s - %s\n' % (util.time_to_str(self.tmin), util.time_to_str(self.tmax)) s += 'networks: %s\n' % ', '.join(sl(self.networks.keys())) s += 'stations: %s\n' % ', '.join(sl(self.stations.keys())) s += 'locations: %s\n' % ', '.join(sl(self.locations.keys())) s += 'channels: %s\n' % ', '.join(sl(self.channels.keys())) s += 'deltats: %s\n' % ', '.join(sl(self.deltats.keys())) return s
def get_hash(self): e = self if isinstance(e.time, util.hpfloat): stime = util.time_to_str(e.time, format='%Y-%m-%d %H:%M:%S.6FRAC') else: stime = util.time_to_str(e.time, format='%Y-%m-%d %H:%M:%S.3FRAC') s = float_or_none_to_str return ehash(', '.join( (stime, s(e.lat), s(e.lon), s(e.depth), s(e.magnitude), str(e.catalog), str(e.name), str(e.region))))
def iter_event_names( self, time_range=None, magmin=0., magmax=10., latmin=-90., latmax=90., lonmin=-180., lonmax=180.): yearbeg, monbeg, daybeg = time.gmtime(time_range[0])[:3] yearend, monend, dayend = time.gmtime(time_range[1])[:3] p = [] a = p.append a('format=geojson') if self.catalog is not None: a('catalog=%s' % self.catalog.lower()) a('starttime=%s' % util.time_to_str( time_range[0], format='%Y-%m-%dT%H:%M:%S')) a('endtime=%s' % util.time_to_str( time_range[1], format='%Y-%m-%dT%H:%M:%S')) if latmin != -90.: a('minlatitude=%g' % latmin) if latmax != 90.: a('maxlatitude=%g' % latmax) if lonmin != -180.: a('minlongitude=%g' % lonmin) if lonmax != 180.: a('maxlongitude=%g' % lonmax) if magmin != 0.: a('minmagnitude=%g' % magmin) if magmax != 10.: a('maxmagnitude=%g' % magmax) url = 'https://earthquake.usgs.gov/fdsnws/event/1/query?' + '&'.join(p) logger.debug('Opening URL: %s' % url) page = urlopen(url).read() logger.debug('Received page (%i bytes)' % len(page)) events = self._parse_events_page(page) for ev in events: self.events[ev.name] = ev for ev in events: if time_range[0] <= ev.time and ev.time <= time_range[1]: yield ev.name
def iter_event_names( self, time_range=None, magmin=0., magmax=10., latmin=-90., latmax=90., lonmin=-180., lonmax=180.): yearbeg, monbeg, daybeg = time.gmtime(time_range[0])[:3] yearend, monend, dayend = time.gmtime(time_range[1])[:3] p = [] a = p.append a('format=geojson') if self.catalog is not None: a('catalog=%s' % self.catalog.lower()) a('starttime=%s' % util.time_to_str( time_range[0], format='%Y-%m-%dT%H:%M:%S')) a('endtime=%s' % util.time_to_str( time_range[1], format='%Y-%m-%dT%H:%M:%S')) if latmin != -90.: a('minlatitude=%g' % latmin) if latmax != 90.: a('maxlatitude=%g' % latmax) if lonmin != -180.: a('minlongitude=%g' % lonmin) if lonmax != 180.: a('maxlongitude=%g' % lonmax) if magmin != 0.: a('minmagnitude=%g' % magmin) if magmax != 10.: a('maxmagnitude=%g' % magmax) url = 'http://earthquake.usgs.gov/fdsnws/event/1/query?' + '&'.join(p) logger.debug('Opening URL: %s' % url) page = urllib2.urlopen(url).read() logger.debug('Received page (%i bytes)' % len(page)) events = self._parse_events_page(page) for ev in events: self.events[ev.name] = ev for ev in events: if time_range[0] <= ev.time and ev.time <= time_range[1]: yield ev.name
def get_hash(self): e = self if isinstance(e.time, util.hpfloat): stime = util.time_to_str(e.time, format='%Y-%m-%d %H:%M:%S.6FRAC') else: stime = util.time_to_str(e.time, format='%Y-%m-%d %H:%M:%S.3FRAC') s = float_or_none_to_str return ehash(', '.join(( stime, s(e.lat), s(e.lon), s(e.depth), s(e.magnitude), str(e.catalog), str(e.name), str(e.region))))
def testIterTimes(self): tmin = util.str_to_time('1999-03-20 20:10:10') tmax = util.str_to_time('2001-05-20 10:00:05') ii = 0 for ymin, ymax in util.iter_years(tmin, tmax): for mmin, mmax in util.iter_months(ymin, ymax): ii += 1 s1 = util.time_to_str(mmin) s2 = util.time_to_str(mmax) assert ii == 12*3 assert s1 == '2001-12-01 00:00:00.000' assert s2 == '2002-01-01 00:00:00.000'
def testIterTimes(self): tmin = util.str_to_time('1999-03-20 20:10:10') tmax = util.str_to_time('2001-05-20 10:00:05') ii = 0 for ymin, ymax in util.iter_years(tmin, tmax): for mmin, mmax in util.iter_months(ymin, ymax): ii += 1 s1 = util.time_to_str(mmin) s2 = util.time_to_str(mmax) assert ii == 12 * 3 assert s1 == '2001-12-01 00:00:00.000' assert s2 == '2002-01-01 00:00:00.000'
def str_duration(t): s = '' if t < 0.: s = '-' t = abs(t) if t < 10.0: return s + '%.2g s' % t elif 10.0 <= t < 3600.: return s + util.time_to_str(t, format='%M:%S min') elif 3600. <= t < 24 * 3600.: return s + util.time_to_str(t, format='%H:%M h') else: return s + '%.1f d' % (t / (24. * 3600.))
def __str__(self): try: stime = util.time_to_str(self.time) except GPSError: stime = '?' return '''%s %s %s %s''' % (stime, self.latitude, self.longitude, self.altitude)
def call(self): p = self.get_pile() try: markers = self.get_selected_event_markers() except NoViewerSet: markers = load_markers(self.markers_filename) try: out_filename = self.output_filename('Template for output files', default_output_filename) except NoViewerSet: out_filename = self.out_filename for m in markers: event = m.get_event() eventname = event.name if not eventname: eventname = util.time_to_str(event.time, format='%Y-%m-%d_%H-%M-%S') traces = p.all(tmin=event.time + self.tbeg, tmax=event.time + self.tend) io.save(traces, out_filename, additional=dict( eventname=eventname))
def get_problem(self, event, target_groups, targets): if event.depth is None: event.depth = 0. base_source = gf.MTSource.from_pyrocko_event(event) stf = STFType.base_stf(self.stf_type) stf.duration = event.duration or 0.0 base_source.stf = stf subs = dict(event_name=event.name, event_time=util.time_to_str(event.time)) problem = CMTProblem(name=expand_template(self.name_template, subs), base_source=base_source, target_groups=target_groups, targets=targets, ranges=self.ranges, distance_min=self.distance_min, mt_type=self.mt_type, stf_type=self.stf_type, norm_exponent=self.norm_exponent, nthreads=self.nthreads) return problem
def save_data(self): by_nslc, times, tinc = self.extract() nslcs = sorted(by_nslc.keys()) default_fn_template = \ 'spectrogram_%(network)s.%(station)s.%(location)s.%(channel)s.txt' fn_template = self.output_filename('Template for output filenames', default_fn_template) for i, nslc in enumerate(nslcs): fn = fn_template % { 'network': nslc[0], 'station': nslc[1], 'location': nslc[2], 'channel': nslc[3] } with open(fn, 'w') as out: for tmid, f, a in by_nslc[nslc]: stmid = util.time_to_str(tmid) n = f.size for i in range(n): out.write('%s %12.6e %12.6e\n' % (stmid, f[i], a[i]))
def read_file_header(f, npad=4): header_infos = [] nlines = 12 iline = 0 while iline < nlines: f.read(npad) d = f.read(80) f.read(npad) if iline == 0: net_name, nchannels, ear, doy, mon, day, hr, min, secs, tlen = unpack_fixed( 'x1,a29,i3,i3,x1,i3,x1,i2,x1,i2,x1,i2,x1,i2,x1,f6,x1,f9', d) year = 1900 + ear tmin = calendar.timegm((year, mon, day, hr, min, secs)) header_infos.append((net_name, nchannels, util.time_to_str(tmin))) if nchannels > 24: nlines += (nchannels - 25) / 3 + 1 if iline >= 2: for j in range(3): s = d[j * 26:(j + 1) * 26] if s.strip(): sta1, cha1, cha2, sta2, toffset, tlen = unpack_fixed( 'x1,a4,a2,x1,a1,a1,f7,x1,f8', s) sta = sta1 + sta2 cha = cha1 + cha2 header_infos.append((sta, cha, toffset, tlen)) iline += 1 return header_infos
def human_str(self): s = [ 'Latitude [deg]: %g' % self.lat, 'Longitude [deg]: %g' % self.lon, 'Time [UTC]: %s' % util.time_to_str(self.time) ] if self.name: s.append('Name: %s' % self.name) if self.depth is not None: s.append('Depth [km]: %g' % (self.depth / 1000.)) if self.magnitude is not None: s.append('Magnitude [%s]: %3.1f' % (self.magnitude_type or 'M?', self.magnitude)) if self.region: s.append('Region: %s' % self.region) if self.catalog: s.append('Catalog: %s' % self.catalog) if self.moment_tensor: s.append(str(self.moment_tensor)) return '\n'.join(s)
def __str__(self): try: stime = util.time_to_str(self.time) except GPSError: stime = '?' return '''%s %s %s %s''' % ( stime, self.latitude, self.longitude, self.altitude)
def get_hash(self): e = self return util.base36encode( abs( hash((util.time_to_str(e.time), str(e.lat), str(e.lon), str(e.depth), str(e.magnitude), e.catalog, e.name, e.region)))).lower()
def iter_event_names(self, time_range=None, **kwargs): qkwargs = {} for k in 'magmin magmax latmin latmax lonmin lonmax'.split(): if k in kwargs and kwargs[k] is not None: qkwargs[k] = '%f' % kwargs[k] if time_range is not None: form = '%Y-%m-%d_%H-%M-%S' if time_range[0] is not None: qkwargs['tmin'] = util.time_to_str(time_range[0], form) if time_range[1] is not None: qkwargs['tmax'] = util.time_to_str(time_range[1], form) for name in self.retrieve(**qkwargs): yield name
def olddumpf(self, file): file.write('name = %s\n' % self.name) file.write('time = %s\n' % util.time_to_str(self.time)) if self.lat is not None: file.write('latitude = %.12g\n' % self.lat) if self.lon is not None: file.write('longitude = %.12g\n' % self.lon) if self.magnitude is not None: file.write('magnitude = %g\n' % self.magnitude) file.write('moment = %g\n' % moment_tensor.magnitude_to_moment(self.magnitude)) if self.magnitude_type is not None: file.write('magnitude_type = %s\n' % self.magnitude_type) if self.depth is not None: file.write('depth = %.10g\n' % self.depth) if self.region is not None: file.write('region = %s\n' % self.region) if self.catalog is not None: file.write('catalog = %s\n' % self.catalog) if self.moment_tensor is not None: m = self.moment_tensor.m() sdr1, sdr2 = self.moment_tensor.both_strike_dip_rake() file.write(( 'mnn = %g\nmee = %g\nmdd = %g\nmne = %g\nmnd = %g\nmed = %g\n' 'strike1 = %g\ndip1 = %g\nrake1 = %g\n' 'strike2 = %g\ndip2 = %g\nrake2 = %g\n') % ( (m[0, 0], m[1, 1], m[2, 2], m[0, 1], m[0, 2], m[1, 2]) + sdr1 + sdr2)) if self.duration is not None: file.write('duration = %g\n' % self.duration)
def read_file_header(f, npad=4): header_infos = [] nlines = 12 iline = 0 while iline < nlines: f.read(npad) d = f.read(80) f.read(npad) if iline == 0: net_name, nchannels, ear, doy, mon, day, hr, min, secs, tlen = unpack_fixed('x1,a29,i3,i3,x1,i3,x1,i2,x1,i2,x1,i2,x1,i2,x1,f6,x1,f9', d) year = 1900 + ear tmin = calendar.timegm((year,mon,day,hr,min,secs)) header_infos.append( (net_name, nchannels, util.time_to_str(tmin)) ) if nchannels > 30: nlines += (nchannels - 31)/3 + 1 if iline >= 2: for j in range(3): s = d[j*26:(j+1)*26] if s.strip(): sta1, cha1, cha2, sta2, toffset, tlen = unpack_fixed('x1,a4,a2,x1,a1,a1,f7,x1,f8', s) sta = sta1 + sta2 cha = cha1 + cha2 header_infos.append( (sta, cha, toffset, tlen) ) iline += 1 return header_infos
def human_str(self): s = [ 'Latitude [deg]: %g' % self.lat, 'Longitude [deg]: %g' % self.lon, 'Time [UTC]: %s' % util.time_to_str(self.time)] if self.name: s.append('Name: %s' % self.name) if self.depth is not None: s.append('Depth [km]: %g' % (self.depth / 1000.)) if self.magnitude is not None: s.append('Magnitude [%s]: %3.1f' % ( self.magnitude_type or 'M?', self.magnitude)) if self.region: s.append('Region: %s' % self.region) if self.catalog: s.append('Catalog: %s' % self.catalog) if self.moment_tensor: s.append(str(self.moment_tensor)) return '\n'.join(s)
def call(self): fn = self.output_filename() out = open(fn,'w') p = self.get_pile() # for test whether there is a gap or not tpad = 10 key = lambda tr: (tr.station,tr.channel) all_ranges = {} gaps = {} cnt = 0 # call chopper with load_data=False in order to speed up things, # loads just metadata of traces for traces in p.chopper(load_data=False,trace_selector=key): if traces: for tr in traces: cnt += 1 mi, ma = tr.tmin, tr.tmax k = key(tr) if k not in all_ranges: all_ranges[k] = [[mi, ma]] else: time_list = all_ranges[k] time_list.append([mi, ma]) all_ranges[k] = time_list if cnt % 100 == 0: print "trace {} done".format(cnt) channels = all_ranges.keys() for i in range(len(channels)): if channels[i] not in all_ranges: pass else: time_list = all_ranges[channels[i]] time_list = sorted(time_list) for j in range(1,len(time_list)): if time_list[j][0]-time_list[j-1][1]>tpad: if channels[i] not in gaps: gaps[channels[i]] = [[time_list[j-1][1],time_list[j][0]]] else: gap_list = gaps[channels[i]] gap_list.append([time_list[j-1][1],time_list[j][0]]) gaps[channels[i]] = gap_list for i in range(len(channels)): if channels[i] not in gaps: pass else: gap_list = gaps[channels[i]] for j in range(len(gap_list)): line = '{} {} {} {}\n'.format(channels[i][0],channels[i][1],util.time_to_str(gap_list[j][0]),util.time_to_str(gap_list[j][1])) out.write(line) out.close
def olddumpf(self, file): file.write('name = %s\n' % self.name) file.write('time = %s\n' % util.time_to_str(self.time)) if self.lat is not None: file.write('latitude = %.12g\n' % self.lat) if self.lon is not None: file.write('longitude = %.12g\n' % self.lon) if self.magnitude is not None: file.write('magnitude = %g\n' % self.magnitude) file.write('moment = %g\n' % moment_tensor.magnitude_to_moment(self.magnitude)) if self.magnitude_type is not None: file.write('magnitude_type = %s\n' % self.magnitude_type) if self.depth is not None: file.write('depth = %.10g\n' % self.depth) if self.region is not None: file.write('region = %s\n' % self.region) if self.catalog is not None: file.write('catalog = %s\n' % self.catalog) if self.moment_tensor is not None: m = self.moment_tensor.m() sdr1, sdr2 = self.moment_tensor.both_strike_dip_rake() file.write( ('mnn = %g\nmee = %g\nmdd = %g\nmne = %g\nmnd = %g\nmed = %g\n' 'strike1 = %g\ndip1 = %g\nrake1 = %g\n' 'strike2 = %g\ndip2 = %g\nrake2 = %g\n') % ((m[0, 0], m[1, 1], m[2, 2], m[0, 1], m[0, 2], m[1, 2]) + sdr1 + sdr2)) if self.duration is not None: file.write('duration = %g\n' % self.duration)
def __str__(self): if self.tmin is not None and self.tmax is not None: tmin = util.time_to_str(self.tmin) tmax = util.time_to_str(self.tmax) s = 'Pile\n' s += 'number of subpiles: %i\n' % len(self.subpiles) s += 'timerange: %s - %s\n' % (tmin, tmax) s += 'networks: %s\n' % ', '.join(sl(self.networks.keys())) s += 'stations: %s\n' % ', '.join(sl(self.stations.keys())) s += 'locations: %s\n' % ', '.join(sl(self.locations.keys())) s += 'channels: %s\n' % ', '.join(sl(self.channels.keys())) s += 'deltats: %s\n' % ', '.join(sl(self.deltats.keys())) else: s = 'empty Pile' return s
def append_time_params(self, a, time_range): date_start_s, tstart_s = util.time_to_str( time_range[0], format='%Y-%m-%d %H:%H:%S').split() date_end_s, tend_s = util.time_to_str( time_range[1], format='%Y-%m-%d %H:%H:%S').split() date_start_s = date_start_s.split('-') date_end_s = date_end_s.split('-') a('start_year=%s' % date_start_s[0]) a('start_month=%s' % date_start_s[1]) a('start_day=%s' % date_start_s[2]) a('start_time=%s' % tstart_s) a('end_year=%s' % date_end_s[0]) a('end_month=%s' % date_end_s[1]) a('end_day=%s' % date_end_s[2]) a('end_time=%s' % tend_s)
def get_arv_time_from_pyrocko(self): """ Description: ------------ Get arrival times from pyrock format data. Parameters/Input: ----------- 'tt_pyrockp.dat': stream (obspy stream): Input 3 component waveform data evid (int): Event id. Returns/Modificatoins: P_tt (dict): P travel time data S_tt (dict): S travel time data """ evids = self.evlist.keys() markers = mk.load_markers(self.maindir + '/input/tt_pyrocko.dat') mk.associate_phases_to_events(markers) indexs = [ i for i in range(len(markers)) if isinstance(markers[i], (mk.PhaseMarker)) ] evid_markers_index = [ j for i in evids for j in indexs if markers[j]._event.name == i ] P_tt, S_tt = {}, {} for i in evids: P_tt[i] = [] S_tt[i] = [] for j in evid_markers_index: if markers[j]._phasename == 'P': P_tt[markers[j]._event.name].append([ UTCDateTime(util.time_to_str(markers[j].tmin)), markers[j].get_nslc_ids()[0][1] ]) if markers[j]._phasename == 'S': S_tt[markers[j]._event.name].append([ UTCDateTime(util.time_to_str(markers[j].tmin)), markers[j].get_nslc_ids()[0][1] ]) self.P_tt = P_tt self.S_tt = S_tt return None
def plot_traces(self,nfigure): import matplotlib.dates as mdates from pyrocko import util import matplotlib.dates as dates import datetime import os for i in xrange(self.Nwav): manifold = self.seismo[i] fig, axes = plt.subplots(manifold.Npoints, squeeze=True, sharex=True, num=nfigure, figsize = (14,6)) nfigure += 1 fig.subplots_adjust(hspace=0) plt.setp([a.get_xticklabels() for a in fig.axes[:-1]], visible=False) for j in xrange(len(manifold.targets)): tr = manifold.traces[j] syn = manifold.syn[j] arrival = manifold.arrivals[j] # print arrival # print util.time_to_str(arrival) # print util.time_to_str(tr.get_xdata()[0]) # sys.exit() target = manifold.targets[j] t_arr = dates.date2num(datetime.datetime.strptime('{}'.format(util.time_to_str(arrival)),'%Y-%m-%d %H:%M:%S.%f')) time1 = [dates.date2num(datetime.datetime.strptime('{}'.format(d),'%Y-%m-%d %H:%M:%S.%f')) for d in map(util.time_to_str,tr.get_xdata())] time2 = [dates.date2num(datetime.datetime.strptime('{}'.format(d),'%Y-%m-%d %H:%M:%S.%f')) for d in map(util.time_to_str,syn.get_xdata())] s1=axes[j].plot(time1, tr.ydata, color='b') s2=axes[j].plot(time2, syn.ydata, color='r') s3=axes[j].plot([t_arr, t_arr], [np.min(tr.ydata), np.max(tr.ydata)], 'k-', lw=2) axes[j].text(-.2,0.5,str(target.codes),transform=axes[j].transAxes) axes[j].set_yticklabels([], visible=False) axes[j].xaxis.set_major_formatter(mdates.DateFormatter("%Y-%m-%d")) axes[j].xaxis.set_major_formatter(mdates.DateFormatter("%H:%M:%S")) axes[j].set_xlabel('Time [s]') # plt.suptitle('Waveform fits for' +' '+ str(manifold.phase) +'-Phase and component' +' ' + str(manifold.component)) lgd = plt.legend((s1[0], s2[0], s3[0]), ('Data','Synthetic',str(manifold.phase)+'-onset'), loc='upper center', bbox_to_anchor=(0.5, -1.6), fancybox=True, shadow=True, ncol=5) time = util.time_to_str(manifold.base_source.time) plt.suptitle('Waveform fits for {} event '.format(time)) fig.savefig(self.outdir+'/wave/'+os.path.splitext(manifold.event)[0]+'.eps',format = 'EPS')
def get_m_angle_all(cc_i_ev_vs_rota, catalog, st, ccmin): dict_ev_angle = {} for i_ev, ev in enumerate(catalog): maxcc_value = num.max(cc_i_ev_vs_rota[i_ev, :]) if not num.isnan(maxcc_value): maxcc_angle = -180 + num.argmax(cc_i_ev_vs_rota[i_ev, :]) if maxcc_value > ccmin: dict_ev_angle[util.time_to_str(ev.time)] = int(maxcc_angle) return dict_ev_angle
def testTime(self): for fmt, accu in zip( [ '%Y-%m-%d %H:%M:%S.3FRAC', '%Y-%m-%d %H:%M:%S.2FRAC', '%Y-%m-%d %H:%M:%S.1FRAC', '%Y-%m-%d %H:%M:%S' ], [ 0.001, 0.01, 0.1, 1.] ): ta = util.str_to_time('1960-01-01 10:10:10') tb = util.str_to_time('2020-01-01 10:10:10') for i in xrange(10000): t1 = ta + random() * (tb-ta) s = util.time_to_str(t1, format=fmt) t2 = util.str_to_time(s, format=fmt) assert abs( t1 - t2 ) < accu
def convert_event_marker(marker): ev = marker.get_event() depth = ev.depth if depth is None: depth = 0.0 ev_name = ev.name if ev.name else '(Event)' xmleventmarker = XMLEventMarker(eventname=ev_name, longitude=float(ev.lon), latitude=float(ev.lat), origintime=util.time_to_str(ev.time), depth=float(depth), magnitude=float(get_magnitude(ev)), active=['no', 'yes'][marker._active]) return xmleventmarker
def testTime(self): for fmt, accu in zip([ '%Y-%m-%d %H:%M:%S.3FRAC', '%Y-%m-%d %H:%M:%S.2FRAC', '%Y-%m-%d %H:%M:%S.1FRAC', '%Y-%m-%d %H:%M:%S' ], [0.001, 0.01, 0.1, 1.]): ta = util.str_to_time('1960-01-01 10:10:10') tb = util.str_to_time('2020-01-01 10:10:10') for i in xrange(10000): t1 = ta + random() * (tb - ta) s = util.time_to_str(t1, format=fmt) t2 = util.str_to_time(s, format=fmt) assert abs(t1 - t2) < accu
def add_stations(self, stations=None, pyrocko_stations_filename=None, stationxml_filenames=None): if stations is not None: for station in stations: self.stations[station.nsl()] = station if pyrocko_stations_filename is not None: logger.debug('Loading stations from file "%s"...' % pyrocko_stations_filename) for station in model.load_stations(pyrocko_stations_filename): self.stations[station.nsl()] = station if stationxml_filenames is not None and len(stationxml_filenames) > 0: for stationxml_filename in stationxml_filenames: if not op.exists(stationxml_filename): continue logger.debug('Loading stations from StationXML file "%s"...' % stationxml_filename) sx = fs.load_xml(filename=stationxml_filename) ev = self.get_event() stations = sx.get_pyrocko_stations(time=ev.time) if len(stations) == 0: logger.warning( 'No stations found for time %s in file "%s".' % (util.time_to_str(ev.time), stationxml_filename)) for station in stations: logger.debug('Adding station: %s.%s.%s' % station.nsl()) channels = station.get_channels() if len(channels) == 1 and channels[0].name.endswith('Z'): logger.warning( 'Station "%s" has vertical component' ' information only, adding mocked channels.' % station.nsl_string()) station.add_channel( model.Channel(channels[0].name[:-1] + 'N')) station.add_channel( model.Channel(channels[0].name[:-1] + 'E')) self.stations[station.nsl()] = station
def olddumpf(self, file): if self.extras: raise EventExtrasDumpError( 'Event user-defined extras attributes cannot be dumped in the ' '"basic" event file format. Use ' 'dump_events(..., format="yaml").') file.write('name = %s\n' % self.name) file.write('time = %s\n' % util.time_to_str(self.time)) if self.lat != 0.0: file.write('latitude = %.12g\n' % self.lat) if self.lon != 0.0: file.write('longitude = %.12g\n' % self.lon) if self.north_shift != 0.0: file.write('north_shift = %.12g\n' % self.north_shift) if self.east_shift != 0.0: file.write('east_shift = %.12g\n' % self.east_shift) if self.magnitude is not None: file.write('magnitude = %g\n' % self.magnitude) file.write('moment = %g\n' % moment_tensor.magnitude_to_moment(self.magnitude)) if self.magnitude_type is not None: file.write('magnitude_type = %s\n' % self.magnitude_type) if self.depth is not None: file.write('depth = %.10g\n' % self.depth) if self.region is not None: file.write('region = %s\n' % self.region) if self.catalog is not None: file.write('catalog = %s\n' % self.catalog) if self.moment_tensor is not None: m = self.moment_tensor.m() sdr1, sdr2 = self.moment_tensor.both_strike_dip_rake() file.write( ('mnn = %g\nmee = %g\nmdd = %g\nmne = %g\nmnd = %g\nmed = %g\n' 'strike1 = %g\ndip1 = %g\nrake1 = %g\n' 'strike2 = %g\ndip2 = %g\nrake2 = %g\n') % ((m[0, 0], m[1, 1], m[2, 2], m[0, 1], m[0, 2], m[1, 2]) + sdr1 + sdr2)) if self.duration is not None: file.write('duration = %g\n' % self.duration) if self.tags: file.write('tags = %s\n' % ', '.join(self.tags))
def get_problem(self, event, target_groups, targets): base_source = gf.RectangularSource.from_pyrocko_event( event, anchor='top', decimation_factor=self.decimation_factor) subs = dict(event_name=event.name, event_time=util.time_to_str(event.time)) problem = RectangularProblem(name=expand_template( self.name_template, subs), base_source=base_source, distance_min=self.distance_min, target_groups=target_groups, targets=targets, ranges=self.ranges, norm_exponent=self.norm_exponent) return problem
def save_single_events(self, fn, directory, plot=False): """ Save table with all gains (for all events + stations) """ if not self.method[ 0] == 'reference_nsl_med' and not self.method == 'syn': indx = dict( zip(self.all_nslc_ids, num.arange(len(self.all_nslc_ids)))) results_all = num.empty( (len(self.sections), len(self.all_nslc_ids))) results_all[:] = num.nan for i_ev, section in enumerate(self.sections): # loop over events for nslc_id, scaling in section.iter_scalings(): results_all[i_ev, indx[nslc_id]] = scaling else: results_all = self.results stats_list = self.all_nslc_ids if self.method == 'syn': stats_list = self.stations with open(os.path.join(directory, fn), 'w') as outfile: outfile.write('Gain relative to station %s. \n' % (str(self.method[1]))) outfile.write('Station:') for st in stats_list: if not self.method == 'syn': outfile.write(', ' + str(st[0]) + ' ' + str(st[1])) else: outfile.write(', ' + st) outfile.write('\n') for i_line, line in enumerate(results_all): if not self.method == 'syn': outfile.write( util.time_to_str(self.sections[i_line].event.time)) else: outfile.write(self.events[i_line]) for item in line: outfile.write(', ' + str(item)) outfile.write('\n') if plot: plot_allgains(self, results_all, stats_list, directory, fn)
def testTime(self): for fmt, accu in zip( ['%Y-%m-%d %H:%M:%S.3FRAC', '%Y-%m-%d %H:%M:%S.2FRAC', '%Y-%m-%d %H:%M:%S.1FRAC', '%Y-%m-%d %H:%M:%S', '%Y-%m-%d %H.%M.%S.3FRAC'], [0.001, 0.01, 0.1, 1., 0.001, 0.001]): ta = util.str_to_time('1960-01-01 10:10:10') tb = util.str_to_time('2020-01-01 10:10:10') for i in range(10000): t1 = ta + random() * (tb-ta) s = util.time_to_str(t1, format=fmt) t2 = util.str_to_time(s, format=fmt) assert abs(t1 - t2) < accu fmt_opt = re.sub(r'\.[0-9]FRAC$', '', fmt) + '.OPTFRAC' t3 = util.str_to_time(s, format=fmt_opt) assert abs(t1 - t3) < accu
def call(self): self.cleanup() viewer = self.get_viewer() traces = [] for trs in self.chopper_selected_traces(fallback=True): for tr in trs: tr.lowpass(4, viewer.lowpass) tr.highpass(4, viewer.highpass) tr.downsample_to(self.target_deltat, snap=True, demean=False) time = tr.get_xdata() data = tr.get_ydata()*1e-9 data = detrend(data) # if self.start_zero: # time-=min(time) time -= self.time_start nslct_min = [tr.network, tr.station, tr.location, tr.channel, util.time_to_str(tr.tmin).replace(" ","")] ascii_trace = np.column_stack((time,data)) traces.append(dict({'data':ascii_trace, 'nslc':nslct_min})) if not traces: self.fail('no traces selected') ''' Enter current channels and new channels below. ''' current_channels= ['p0', 'p1', 'p2', 'HHZ', 'HHN', 'HHE', 'EHZ', 'EHN', 'EHE'] new_channels = ['BHZ', 'BHN', 'BHE' ] channel_re_map=dict(zip(current_channels, new_channels*3)) #out_dir = self.output_filename(caption='Choose directory') for trs in traces: np.savetxt('DISPL.%s.%s'%(trs['nslc'][1],channel_re_map[trs['nslc'][3]]),trs['data'], fmt='%1.8f')
def convert_event_marker(marker): ev = marker.get_event() depth = None if ev is not None: depth = ev.depth else: return None if depth is None: depth = 0.0 ev_name = ev.name if ev.name else '(Event)' xmleventmarker = XMLEventMarker(eventname=ev_name, longitude=float(ev.lon), latitude=float(ev.lat), origintime=util.time_to_str(ev.time), depth=float(depth), magnitude=float(get_magnitude(ev)), active=['no', 'yes'][marker._active]) return xmleventmarker
def get_problem(self, event, target_groups, targets): if event.depth is None: event.depth = 0. base_source = gf.VLVDSource.from_pyrocko_event(event) base_source.stf = gf.HalfSinusoidSTF(duration=event.duration or 0.0) subs = dict(event_name=event.name, event_time=util.time_to_str(event.time)) problem = VLVDProblem(name=expand_template(self.name_template, subs), base_source=base_source, target_groups=target_groups, targets=targets, ranges=self.ranges, distance_min=self.distance_min, norm_exponent=self.norm_exponent, nthreads=self.nthreads) return problem
def _parse_events_page(self, page): import json doc = json.loads(page.decode('utf-8')) events = [] for feat in doc['features']: props = feat['properties'] geo = feat['geometry'] lon, lat, depth = [float(x) for x in geo['coordinates']] t = util.str_to_time('1970-01-01 00:00:00') + \ props['time'] * 0.001 if props['mag'] is not None: mag = float(props['mag']) else: mag = None if props['place'] is not None: region = props['place'].encode('ascii', 'replace') else: region = None catalog = str(props['net'].upper()) name = 'USGS-%s-' % catalog + util.time_to_str( t, format='%Y-%m-%d_%H-%M-%S.3FRAC') ev = model.Event( lat=lat, lon=lon, time=t, name=name, depth=depth*1000., magnitude=mag, region=region, catalog=catalog) events.append(ev) return events
def _parse_events_page(self, page): import json doc = json.loads(page) events = [] for feat in doc['features']: props = feat['properties'] geo = feat['geometry'] lon, lat, depth = [float(x) for x in geo['coordinates']] t = util.str_to_time('1970-01-01 00:00:00') + \ props['time'] * 0.001 if props['mag'] is not None: mag = float(props['mag']) else: mag = None if props['place'] is not None: region = props['place'].encode('ascii', 'replace') else: region = None catalog = str(props['net'].upper()) name = 'USGS-%s-' % catalog + util.time_to_str( t, format='%Y-%m-%d_%H-%M-%S.3FRAC') ev = model.Event( lat=lat, lon=lon, time=t, name=name, depth=depth*1000., magnitude=mag, region=region, catalog=catalog) events.append(ev) return events
def __str__(self): return '''--- Seisan Response File --- station: %s component: %s start time: %s latitude: %f longitude: %f elevation: %f filetype: %s comment: %s sensor period: %g sensor damping: %g sensor sensitivity: %g amplifier gain: %g digitizer gain: %g gain at 1 Hz: %g filters: %s ''' % (self.station, self.component, util.time_to_str(self.tmin), self.latitude, self.longitude, self.elevation, self.filetype, self.comment, self.period, self.damping, self.sensor_sensitivity, self.amplifier_gain, self.digitizer_gain, self.gain_1hz, self.filters)
def save_data(self): by_nslc, times, tinc = self.extract() nslcs = sorted(by_nslc.keys()) default_fn_template = \ 'spectrogram_%(network)s.%(station)s.%(location)s.%(channel)s.txt' fn_template = self.output_filename( 'Template for output filenames', default_fn_template) for i, nslc in enumerate(nslcs): fn = fn_template % { 'network': nslc[0], 'station': nslc[1], 'location': nslc[2], 'channel': nslc[3]} with open(fn, 'w') as out: for tmid, f, a in by_nslc[nslc]: stmid = util.time_to_str(tmid) n = f.size for i in range(n): out.write('%s %12.6e %12.6e\n' % (stmid, f[i], a[i]))
from pyrocko import pile, io, util import time import calendar ''' Chope a pile of waveform traces into segments ''' p = pile.make_pile(['test.mseed']) # get timestamp for full hour before first data sample in all selected traces tmin = calendar.timegm(time.gmtime(p.tmin)[:4] + (0, 0)) # iterate over the data, with a window length of one hour for traces in p.chopper(tmin=tmin, tinc=3600): if traces: # the list could be empty due to gaps window_start = traces[0].wmin timestring = util.time_to_str(window_start, format='%Y-%m-%d_%H') filepath = 'test_hourfiles/hourfile-%s.mseed' % timestring io.save(traces, filepath)
def get_hash(self): e = self return util.base36encode(abs(hash((util.time_to_str(e.time), str(e.lat), str(e.lon), str(e.depth), str(e.magnitude), e.catalog, e.name, e.region)))).lower()
def __str__(self): return '%s %s %s %g %g %s %s' % (self.name, util.time_to_str(self.time), self.magnitude, self.lat, self.lon, self.depth, self.region)
def sdatetime(t): return util.time_to_str(t, format='%Y-%m-%dT%H:%M:%S')
def call(self): '''Main work routine of the snuffling.''' by_nslc = {} tpad = self.twin * self.overlap/100. * 0.5 tinc = self.twin - 2 * tpad times = [] for traces in self.chopper_selected_traces( tinc=tinc, tpad=tpad, want_incomplete=False, fallback=True): for tr in traces: nslc = tr.nslc_id nwant = int(math.floor((tinc + 2*tpad) / tr.deltat)) if nwant != tr.data_len(): if tr.data_len() == nwant + 1: tr.set_ydata(tr.get_ydata()[:-1]) else: continue tr.ydata = tr.ydata.astype(num.float) tr.ydata -= tr.ydata.mean() win = self.get_taper(self.taper_name, tr.data_len()) tr.ydata *= win f, a = tr.spectrum(pad_to_pow2=True) df = f[1] - f[0] a = num.abs(a)**2 a *= tr.deltat * 2. / (df*num.sum(win**2)) a[0] /= 2. a[a.size/2] /= 2. if nslc not in by_nslc: by_nslc[nslc] = [] tmid = 0.5*(tr.tmax + tr.tmin) by_nslc[nslc].append((tmid, f, a)) times.append(tmid) if not by_nslc: self.fail('No complete data windows could be exctracted for ' 'given selection') fframe = self.figure_frame() fig = fframe.gcf() nslcs = sorted(by_nslc.keys()) p = None ncols = len(nslcs) / 5 + 1 nrows = (len(nslcs)-1) / ncols + 1 tmin = min(times) tmax = max(times) nt = int(round((tmax - tmin) / tinc)) + 1 t = num.linspace(tmin, tmax, nt) if (tmax - tmin) < 60: tref = util.day_start(tmin) tref += math.floor((tmin-tref) / 60.) * 60. t -= tref tunit = 's' elif (tmax - tmin) < 3600: tref = util.day_start(tmin) tref += math.floor((tmin-tref) / 3600.) * 3600. t -= tref t /= 60. tunit = 'min' else: tref = util.day_start(tmin) t -= tref t /= 3600. tunit = 'h' axes = [] for i, nslc in enumerate(nslcs): p = fig.add_subplot(nrows, ncols, i+1, sharex=p, sharey=p) axes.append(p) group = by_nslc[nslc] f = group[0][1] nf = f.size a = num.zeros((nf, nt), dtype=num.float) a.fill(num.nan) for (t1, _, a1) in group: it = int(round((t1 - tmin) / tinc)) if it < 0 or nt <= it: continue a[:, it] = a1 if self.color_scale == 'log': a = num.log(a) label = 'log PSD' elif self.color_scale == 'sqrt': a = num.sqrt(a) label = 'sqrt PSD' else: label = 'PSD' a = num.ma.masked_invalid(a) min_a = num.min(a) max_a = num.max(a) mean_a = num.mean(a) std_a = num.std(a) zmin = max(min_a, mean_a - 3.0 * std_a) zmax = min(max_a, mean_a + 3.0 * std_a) pcm = p.pcolormesh(t, f, a, cmap=get_cmap(self.ctb_name), vmin=zmin, vmax=zmax) fmin = 2.0 / self.twin fmax = f[-1] p.set_title( '.'.join(x for x in nslc if x), ha='right', va='top', x=0.99, y=0.9) p.grid() p.set_yscale('log') divider = make_axes_locatable(p) cax = divider.append_axes('right', size='2%', pad=0.2) cbar = fig.colorbar(pcm, cax=cax) cbar.set_label(label) if i/ncols == (len(nslcs)-1)/ncols: p.set_xlabel('Time since %s [%s]' % (util.time_to_str(tref, format='%Y-%m-%d %H:%M'), tunit)) if i % ncols == 0: p.set_ylabel('Frequency [Hz]') p.set_xlim(t[0], t[-1]) p.set_ylim(fmin, fmax) for i, p in enumerate(axes): if i/ncols != (len(nslcs)-1)/ncols: for t in p.get_xticklabels(): t.set_visible(False) if i % ncols != 0: for t in p.get_yticklabels(): t.set_visible(False) else: tls = p.get_yticklabels() if len(tls) > 8: for t in tls[1::2]: t.set_visible(False) try: fig.tight_layout() except AttributeError: pass if self.save: fig.savefig(self.output_filename(dir='psd.pdf')) fig.canvas.draw()
def stime_none_aware(t): if t is None: return '?' else: return util.time_to_str(t)
def time_as_string(self): return util.time_to_str(self.time)
def call(self): '''Main work routine of the snuffling.''' by_nslc, times, tinc = self.extract() fframe = self.figure_frame() fig = fframe.gcf() nslcs = sorted(by_nslc.keys()) p = None ncols = int(len(nslcs) / 5 + 1) nrows = (len(nslcs)-1) / ncols + 1 tmin = min(times) tmax = max(times) nt = int(round((tmax - tmin) / tinc)) + 1 t = num.linspace(tmin, tmax, nt) if (tmax - tmin) < 60: tref = util.day_start(tmin) tref += math.floor((tmin-tref) / 60.) * 60. t -= tref tunit = 's' elif (tmax - tmin) < 3600: tref = util.day_start(tmin) tref += math.floor((tmin-tref) / 3600.) * 3600. t -= tref t /= 60. tunit = 'min' else: tref = util.day_start(tmin) t -= tref t /= 3600. tunit = 'h' axes = [] for i, nslc in enumerate(nslcs): p = fig.add_subplot(nrows, ncols, i+1, sharex=p, sharey=p) axes.append(p) group = by_nslc[nslc] f = group[0][1] nf = f.size a = num.zeros((nf, nt), dtype=num.float) a.fill(num.nan) for (t1, _, a1) in group: it = int(round((t1 - tmin) / tinc)) if it < 0 or nt <= it: continue a[:, it] = a1 if self.color_scale == 'log': a = num.log(a) label = 'log PSD' elif self.color_scale == 'sqrt': a = num.sqrt(a) label = 'sqrt PSD' else: label = 'PSD' a = num.ma.masked_invalid(a) min_a = num.min(a) max_a = num.max(a) mean_a = num.mean(a) std_a = num.std(a) zmin = max(min_a, mean_a - 3.0 * std_a) zmax = min(max_a, mean_a + 3.0 * std_a) pcm = p.pcolormesh(t, f, a, cmap=get_cmap(self.ctb_name), vmin=zmin, vmax=zmax) fmin = 2.0 / self.twin fmax = f[-1] p.set_title( '.'.join(x for x in nslc if x), ha='right', va='top', x=0.99, y=0.9) p.grid() p.set_yscale('log') divider = make_axes_locatable(p) cax = divider.append_axes('right', size='2%', pad=0.2) cbar = fig.colorbar(pcm, cax=cax) cbar.set_label(label) if i/ncols == (len(nslcs)-1)/ncols: p.set_xlabel('Time since %s [%s]' % (util.time_to_str(tref, format='%Y-%m-%d %H:%M'), tunit)) if i % ncols == 0: p.set_ylabel('Frequency [Hz]') p.set_xlim(t[0], t[-1]) p.set_ylim(fmin, fmax) for i, p in enumerate(axes): if i/ncols != (len(nslcs)-1)/ncols: for t in p.get_xticklabels(): t.set_visible(False) if i % ncols != 0: for t in p.get_yticklabels(): t.set_visible(False) else: tls = p.get_yticklabels() if len(tls) > 8: for t in tls[1::2]: t.set_visible(False) try: fig.tight_layout() except AttributeError: pass if self.save: fig.savefig(self.output_filename(dir='psd.pdf')) fig.canvas.draw()