def __str__(self): def sl(s): return sorted([x for x in s]) s = 'SubPile\n' s += 'number of files: %i\n' % len(self.files) s += 'timerange: %s - %s\n' % (util.gmctime( self.tmin), util.gmctime(self.tmax)) s += 'networks: %s\n' % ', '.join(sl(self.networks)) s += 'stations: %s\n' % ', '.join(sl(self.stations)) s += 'locations: %s\n' % ', '.join(sl(self.locations)) s += 'channels: %s\n' % ', '.join(sl(self.channels)) return s
def __str__(self): def sl(s): return sorted([ x for x in s ]) s = 'Pile\n' s += 'number of subpiles: %i\n' % len(self.subpiles) s += 'timerange: %s - %s\n' % (util.gmctime(self.tmin), util.gmctime(self.tmax)) s += 'networks: %s\n' % ', '.join(sl(self.networks)) s += 'stations: %s\n' % ', '.join(sl(self.stations)) s += 'locations: %s\n' % ', '.join(sl(self.locations)) s += 'channels: %s\n' % ', '.join(sl(self.channels)) return s
def __str__(self): def sl(s): return sorted(list(s)) s = 'MemTracesFile\n' s += 'abspath: %s\n' % self.abspath s += 'file mtime: %s\n' % util.gmctime(self.mtime) s += 'number of traces: %i\n' % len(self.traces) s += 'timerange: %s - %s\n' % (util.gmctime(self.tmin), util.gmctime(self.tmax)) s += 'networks: %s\n' % ', '.join(sl(self.networks)) s += 'stations: %s\n' % ', '.join(sl(self.stations)) s += 'locations: %s\n' % ', '.join(sl(self.locations)) s += 'channels: %s\n' % ', '.join(sl(self.channels)) return s
def __str__(self): def sl(s): return sorted(list(s)) s = 'TracesFile\n' s += 'abspath: %s\n' % self.abspath s += 'file mtime: %s\n' % util.gmctime(self.mtime) s += 'number of traces: %i\n' % len(self.traces) s += 'timerange: %s - %s\n' % (util.gmctime( self.tmin), util.gmctime(self.tmax)) s += 'networks: %s\n' % ', '.join(sl(self.networks)) s += 'stations: %s\n' % ', '.join(sl(self.stations)) s += 'locations: %s\n' % ', '.join(sl(self.locations)) s += 'channels: %s\n' % ', '.join(sl(self.channels)) return s
def dumpf(self, file): file.write('name = %s\n' % self.name) file.write('time = %s\n' % util.gmctime(self.time)) if self.lat is not None: file.write('latitude = %g\n' % self.lat) if self.lon is not None: file.write('longitude = %g\n' % self.lon) if self.magnitude is not None: file.write('magnitude = %g\n' % self.magnitude) file.write('moment = %g\n' % moment_tensor.magnitude_to_moment(self.magnitude)) if self.depth is not None: file.write('depth = %g\n' % self.depth) if self.region is not None: file.write('region = %s\n' % self.region) if self.catalog is not None: file.write('catalog = %s\n' % self.catalog) if self.moment_tensor is not None: m = self.moment_tensor.m() sdr1, sdr2 = self.moment_tensor.both_strike_dip_rake() file.write(( 'mnn = %g\nmee = %g\nmdd = %g\nmne = %g\nmnd = %g\nmed = %g\n' + 'strike1 = %g\ndip1 = %g\nrake1 = %g\nstrike2 = %g\ndip2 = %g\nrake2 = %g\n' ) % ((m[0, 0], m[1, 1], m[2, 2], m[0, 1], m[0, 2], m[1, 2]) + sdr1 + sdr2)) if self.duration is not None: file.write('duration = %g\n' % self.duration)
def dumpf(self, file): file.write("name = %s\n" % self.name) file.write("time = %s\n" % util.gmctime(self.time)) if self.lat is not None: file.write("latitude = %g\n" % self.lat) if self.lon is not None: file.write("longitude = %g\n" % self.lon) if self.magnitude is not None: file.write("magnitude = %g\n" % self.magnitude) file.write("moment = %g\n" % moment_tensor.magnitude_to_moment(self.magnitude)) if self.depth is not None: file.write("depth = %g\n" % self.depth) if self.region is not None: file.write("region = %s\n" % self.region) if self.catalog is not None: file.write("catalog = %s\n" % self.catalog) if self.moment_tensor is not None: m = self.moment_tensor.m() sdr1, sdr2 = self.moment_tensor.both_strike_dip_rake() file.write( ( "mnn = %g\nmee = %g\nmdd = %g\nmne = %g\nmnd = %g\nmed = %g\n" + "strike1 = %g\ndip1 = %g\nrake1 = %g\nstrike2 = %g\ndip2 = %g\nrake2 = %g\n" ) % ((m[0, 0], m[1, 1], m[2, 2], m[0, 1], m[0, 2], m[1, 2]) + sdr1 + sdr2) ) if self.duration is not None: file.write("duration = %g\n" % self.duration)
def __str__(self): return "%s %s %s %g %g %s %s" % ( self.name, util.gmctime(self.time), self.magnitude, self.lat, self.lon, self.depth, self.region, )
def _update_range(self, tmin, tmax): imin = int(math.floor(tmin / self._tinc)) imax = int(math.floor(tmax / self._tinc) + 1) todo = [] for i in xrange(imin, imax): wmin = i * self._tinc wmax = (i + 1) * self._tinc mtime = util.gmctime(self._base.get_newest_mtime(wmin, wmax)) if i not in self._blocks or self._blocks[i].mtime != mtime: if i not in self._blocks: self._blocks[i] = ShadowBlock() todo.append(i) self._blocks[i].mtime = mtime else: if todo: self._process_blocks(todo[0], todo[-1] + 1) todo = [] if todo: self._process_blocks(todo[0], todo[-1] + 1)
def _update_range(self, tmin, tmax): imin = int(math.floor(tmin / self._tinc)) imax = int(math.floor(tmax / self._tinc)+1) todo = [] for i in xrange(imin, imax): wmin = i * self._tinc wmax = (i+1) * self._tinc mtime = util.gmctime(self._base.get_newest_mtime(wmin,wmax)) if i not in self._blocks or self._blocks[i].mtime != mtime: if i not in self._blocks: self._blocks[i] = ShadowBlock() todo.append(i) self._blocks[i].mtime = mtime else: if todo: self._process_blocks(todo[0], todo[-1]+1) todo = [] if todo: self._process_blocks(todo[0], todo[-1]+1)
def __str__(self): return '''--- Seisan Response File --- station: %s component: %s start time: %s latitude: %f longitude: %f elevation: %f filetype: %s comment: %s sensor period: %g sensor damping: %g sensor sensitivity: %g amplifier gain: %g digitizer gain: %g gain at 1 Hz: %g filters: %s ''' % (self.station, self.component, util.gmctime(self.tmin), self.latitude, self.longitude, self.elevation, self.filetype, self.comment, self.period, self.damping, self.sensor_sensitivity, self.amplifier_gain, self.digitizer_gain, self.gain_1hz, self.filters)
def __str__(self): return '''--- Seisan Response File --- station: %s component: %s start time: %s latitude: %f longitude: %f elevation: %f filetype: %s comment: %s sensor period: %g sensor damping: %g sensor sensitivity: %g amplifier gain: %g digitizer gain: %g gain at 1 Hz: %g filters: %s ''' % (self.station, self.component, util.gmctime(self.tmin), self.latitude, self.longitude, self.elevation, self.filetype, self.comment, self.period, self.damping, self.sensor_sensitivity, self.amplifier_gain, self.digitizer_gain, self.gain_1hz, self.filters )
def __str__(self): return '%s %s %s %g %g %s %s' % (self.name, util.gmctime( self.time), self.magnitude, self.lat, self.lon, self.depth, self.region)
def time_as_string(self): return util.gmctime(self.time)
def _flush_buffer(self): if len(self.times) < self.min_detection_size: return t = num.array(self.times, dtype=num.float) r_deltat, r_tmin = self._regression(t) if self.disallow_uneven_sampling_rates: r_deltat = 1./round(1./r_deltat) # check if deltat is consistent with expectations if self.deltat is not None and self.fixed_deltat is None: try: p_deltat = self.previous_deltats.median() if (((self.disallow_uneven_sampling_rates and abs(1./p_deltat - 1./self.deltat) > 0.5) or (not self.disallow_uneven_sampling_rates and abs((self.deltat - p_deltat)/self.deltat) > self.deltat_tolerance)) and len(self.previous_deltats) > 0.5*self.previous_deltats.capacity()): self.deltat = None self.previous_deltats.empty() except QueueIsEmpty: pass self.previous_deltats.push_back(r_deltat) # detect sampling rate if self.deltat is None: if self.fixed_deltat is not None: self.deltat = self.fixed_deltat else: self.deltat = r_deltat self.tmin = None # must also set new time origin if sampling rate changes logger.info('Setting new sampling rate to %g Hz (sampling interval is %g s)' % (1./self.deltat, self.deltat )) # check if onset has drifted / jumped if self.deltat is not None and self.tmin is not None: continuous_tmin = self.tmin + self.ncontinuous*self.deltat tmin_offset = r_tmin - continuous_tmin try: toffset = self.previous_tmin_offsets.median() if abs(toffset) > self.deltat*0.7 and len(self.previous_tmin_offsets) > 0.5*self.previous_tmin_offsets.capacity(): soffset = int(round(toffset/self.deltat)) logger.info('Detected drift/jump/gap of %g sample%s' % (soffset, ['s',''][abs(soffset)==1]) ) if soffset == 1: for values in self.values: values.append(values[-1]) self.previous_tmin_offsets.add(-self.deltat) logger.info('Adding one sample to compensate time drift') elif soffset == -1: for values in self.values: values.pop(-1) self.previous_tmin_offsets.add(+self.deltat) logger.info('Removing one sample to compensate time drift') else: self.tmin = None self.previous_tmin_offsets.empty() except QueueIsEmpty: pass self.previous_tmin_offsets.push_back(tmin_offset) # detect onset time if self.tmin is None and self.deltat is not None: self.tmin = r_tmin self.ncontinuous = 0 logger.info('Setting new time origin to %s' % util.gmctime(self.tmin)) if self.tmin is not None and self.deltat is not None: for channel, values in zip(self.channels, self.values): v = num.array(values, dtype=num.int) tr = trace.Trace( network=self.network, station=self.station, location=self.location, channel=channel, tmin=self.tmin + self.ncontinuous*self.deltat, deltat=self.deltat, ydata=v) self.got_trace(tr) self.ncontinuous += v.size self.values = [[]] * len(self.channels) self.times = []
def _flush_buffer(self): if len(self.times) < self.min_detection_size: return t = num.array(self.times, dtype=num.float) r_deltat, r_tmin = self._regression(t) if self.disallow_uneven_sampling_rates: r_deltat = 1. / round(1. / r_deltat) # check if deltat is consistent with expectations if self.deltat is not None and self.fixed_deltat is None: try: p_deltat = self.previous_deltats.median() if (((self.disallow_uneven_sampling_rates and abs(1. / p_deltat - 1. / self.deltat) > 0.5) or (not self.disallow_uneven_sampling_rates and abs( (self.deltat - p_deltat) / self.deltat) > self.deltat_tolerance)) and len(self.previous_deltats) > 0.5 * self.previous_deltats.capacity()): self.deltat = None self.previous_deltats.empty() except QueueIsEmpty: pass self.previous_deltats.push_back(r_deltat) # detect sampling rate if self.deltat is None: if self.fixed_deltat is not None: self.deltat = self.fixed_deltat else: self.deltat = r_deltat self.tmin = None # must also set new time origin if sampling rate changes logger.info( 'Setting new sampling rate to %g Hz (sampling interval is %g s)' % (1. / self.deltat, self.deltat)) # check if onset has drifted / jumped if self.deltat is not None and self.tmin is not None: continuous_tmin = self.tmin + self.ncontinuous * self.deltat tmin_offset = r_tmin - continuous_tmin try: toffset = self.previous_tmin_offsets.median() if abs(toffset) > self.deltat * 0.7 and len( self.previous_tmin_offsets ) > 0.5 * self.previous_tmin_offsets.capacity(): soffset = int(round(toffset / self.deltat)) logger.info('Detected drift/jump/gap of %g sample%s' % (soffset, ['s', ''][abs(soffset) == 1])) if soffset == 1: for values in self.values: values.append(values[-1]) self.previous_tmin_offsets.add(-self.deltat) logger.info( 'Adding one sample to compensate time drift') elif soffset == -1: for values in self.values: values.pop(-1) self.previous_tmin_offsets.add(+self.deltat) logger.info( 'Removing one sample to compensate time drift') else: self.tmin = None self.previous_tmin_offsets.empty() except QueueIsEmpty: pass self.previous_tmin_offsets.push_back(tmin_offset) # detect onset time if self.tmin is None and self.deltat is not None: self.tmin = r_tmin self.ncontinuous = 0 logger.info('Setting new time origin to %s' % util.gmctime(self.tmin)) if self.tmin is not None and self.deltat is not None: for channel, values in zip(self.channels, self.values): v = num.array(values, dtype=num.int) tr = trace.Trace(network=self.network, station=self.station, location=self.location, channel=channel, tmin=self.tmin + self.ncontinuous * self.deltat, deltat=self.deltat, ydata=v) self.got_trace(tr) self.ncontinuous += v.size self.values = [[]] * len(self.channels) self.times = []
def __str__(self): return ' '.join([self.wid2.station, self.wid2.channel, self.wid2.auxid, self.wid2.sub_format, util.gmctime(self.wid2.tmin)])
def format(self, value): return util.gmctime(value)