class Angle(FloatWithUnit): unit = String.T(default='DEGREES', optional=True, xmlstyle='attribute')
class Map(Object): lat = Float.T(optional=True) lon = Float.T(optional=True) radius = Float.T(optional=True) width = Float.T(default=20.) height = Float.T(default=14.) margins = List.T(Float.T()) illuminate = Bool.T(default=True) skip_feature_factor = Float.T(default=0.02) show_grid = Bool.T(default=False) show_topo = Bool.T(default=True) show_topo_scale = Bool.T(default=False) show_center_mark = Bool.T(default=False) show_rivers = Bool.T(default=True) show_plates = Bool.T(default=False) show_boundaries = Bool.T(default=False) illuminate_factor_land = Float.T(default=0.5) illuminate_factor_ocean = Float.T(default=0.25) color_wet = Tuple.T(3, Int.T(), default=(216, 242, 254)) color_dry = Tuple.T(3, Int.T(), default=(172, 208, 165)) color_boundaries = Tuple.T(3, Int.T(), default=(1, 1, 1)) topo_resolution_min = Float.T( default=40., help='minimum resolution of topography [dpi]') topo_resolution_max = Float.T( default=200., help='maximum resolution of topography [dpi]') replace_topo_color_only = FloatTile.T( optional=True, help='replace topo color while keeping topographic shading') topo_cpt_wet = String.T(default='light_sea') topo_cpt_dry = String.T(default='light_land') axes_layout = String.T(optional=True) custom_cities = List.T(City.T()) gmt_config = Dict.T(String.T(), String.T()) comment = String.T(optional=True) def __init__(self, gmtversion='newest', **kwargs): Object.__init__(self, **kwargs) self._gmt = None self._scaler = None self._widget = None self._corners = None self._wesn = None self._minarea = None self._coastline_resolution = None self._rivers = None self._dems = None self._have_topo_land = None self._have_topo_ocean = None self._jxyr = None self._prep_topo_have = None self._labels = [] self._area_labels = [] self._gmtversion = gmtversion def save(self, outpath, resolution=75., oversample=2., size=None, width=None, height=None): ''' Save the image. Save the image to ``outpath``. The format is determined by the filename extension. Formats are handled as follows: ``'.eps'`` and ``'.ps'`` produce EPS and PS, respectively, directly with GMT. If the file name ends with ``'.pdf'``, GMT output is fed through ``gmtpy-epstopdf`` to create a PDF file. For any other filename extension, output is first converted to PDF with ``gmtpy-epstopdf``, then with ``pdftocairo`` to PNG with a resolution oversampled by the factor ``oversample`` and finally the PNG is downsampled and converted to the target format with ``convert``. The resolution of rasterized target image can be controlled either by ``resolution`` in DPI or by specifying ``width`` or ``height`` or ``size``, where the latter fits the image into a square with given side length. ''' gmt = self.gmt self.draw_labels() self.draw_axes() if self.show_topo and self.show_topo_scale: self._draw_topo_scale() gmt.save(outpath, resolution=resolution, oversample=oversample, size=size, width=width, height=height) @property def scaler(self): if self._scaler is None: self._setup_geometry() return self._scaler @property def wesn(self): if self._wesn is None: self._setup_geometry() return self._wesn @property def widget(self): if self._widget is None: self._setup() return self._widget @property def layout(self): if self._layout is None: self._setup() return self._layout @property def jxyr(self): if self._jxyr is None: self._setup() return self._jxyr @property def pxyr(self): if self._pxyr is None: self._setup() return self._pxyr @property def gmt(self): if self._gmt is None: self._setup() if self._have_topo_ocean is None: self._draw_background() return self._gmt def _setup(self): if not self._widget: self._setup_geometry() self._setup_lod() self._setup_gmt() def _setup_geometry(self): wpage, hpage = self.width, self.height ml, mr, mt, mb = self._expand_margins() wpage -= ml + mr hpage -= mt + mb wreg = self.radius * 2.0 hreg = self.radius * 2.0 if wpage >= hpage: wreg *= wpage/hpage else: hreg *= hpage/wpage self._wreg = wreg self._hreg = hreg self._corners = corners(self.lon, self.lat, wreg, hreg) west, east, south, north = extent(self.lon, self.lat, wreg, hreg, 10) x, y, z = ((west, east), (south, north), (-6000., 4500.)) xax = gmtpy.Ax(mode='min-max', approx_ticks=4.) yax = gmtpy.Ax(mode='min-max', approx_ticks=4.) zax = gmtpy.Ax(mode='min-max', inc=1000., label='Height', scaled_unit='km', scaled_unit_factor=0.001) scaler = gmtpy.ScaleGuru(data_tuples=[(x, y, z)], axes=(xax, yax, zax)) par = scaler.get_params() west = par['xmin'] east = par['xmax'] south = par['ymin'] north = par['ymax'] self._wesn = west, east, south, north self._scaler = scaler def _setup_lod(self): w, e, s, n = self._wesn if self.radius > 1500.*km: coastline_resolution = 'i' rivers = False else: coastline_resolution = 'f' rivers = True self._minarea = (self.skip_feature_factor * self.radius/km)**2 self._coastline_resolution = coastline_resolution self._rivers = rivers self._prep_topo_have = {} self._dems = {} cm2inch = gmtpy.cm/gmtpy.inch dmin = 2.0 * self.radius * m2d / (self.topo_resolution_max * (self.height * cm2inch)) dmax = 2.0 * self.radius * m2d / (self.topo_resolution_min * (self.height * cm2inch)) for k in ['ocean', 'land']: self._dems[k] = topo.select_dem_names(k, dmin, dmax, self._wesn) if self._dems[k]: logger.debug('using topography dataset %s for %s' % (','.join(self._dems[k]), k)) def _expand_margins(self): if len(self.margins) == 0 or len(self.margins) > 4: ml = mr = mt = mb = 2.0 elif len(self.margins) == 1: ml = mr = mt = mb = self.margins[0] elif len(self.margins) == 2: ml = mr = self.margins[0] mt = mb = self.margins[1] elif len(self.margins) == 4: ml, mr, mt, mb = self.margins return ml, mr, mt, mb def _setup_gmt(self): w, h = self.width, self.height scaler = self._scaler if gmtpy.is_gmt5(self._gmtversion): gmtconf = dict( MAP_TICK_PEN_PRIMARY='1.25p', MAP_TICK_PEN_SECONDARY='1.25p', MAP_TICK_LENGTH_PRIMARY='0.2c', MAP_TICK_LENGTH_SECONDARY='0.6c', FONT_ANNOT_PRIMARY='12p,1,black', FONT_LABEL='12p,1,black', PS_CHAR_ENCODING='ISOLatin1+', MAP_FRAME_TYPE='fancy', FORMAT_GEO_MAP='D', PS_MEDIA='Custom_%ix%i' % ( w*gmtpy.cm, h*gmtpy.cm), PS_PAGE_ORIENTATION='portrait', MAP_GRID_PEN_PRIMARY='thinnest,0/50/0', MAP_ANNOT_OBLIQUE='6') else: gmtconf = dict( TICK_PEN='1.25p', TICK_LENGTH='0.2c', ANNOT_FONT_PRIMARY='1', ANNOT_FONT_SIZE_PRIMARY='12p', LABEL_FONT='1', LABEL_FONT_SIZE='12p', CHAR_ENCODING='ISOLatin1+', BASEMAP_TYPE='fancy', PLOT_DEGREE_FORMAT='D', PAPER_MEDIA='Custom_%ix%i' % ( w*gmtpy.cm, h*gmtpy.cm), GRID_PEN_PRIMARY='thinnest/0/50/0', DOTS_PR_INCH='1200', OBLIQUE_ANNOTATION='6') gmtconf.update( (k.upper(), v) for (k, v) in self.gmt_config.items()) gmt = gmtpy.GMT(config=gmtconf, version=self._gmtversion) layout = gmt.default_layout() layout.set_fixed_margins(*[x*cm for x in self._expand_margins()]) widget = layout.get_widget() widget['P'] = widget['J'] widget['J'] = ('-JA%g/%g' % (self.lon, self.lat)) + '/%(width)gp' scaler['R'] = '-R%g/%g/%g/%gr' % self._corners # aspect = gmtpy.aspect_for_projection( # gmt.installation['version'], *(widget.J() + scaler.R())) aspect = self._map_aspect(jr=widget.J() + scaler.R()) widget.set_aspect(aspect) self._gmt = gmt self._layout = layout self._widget = widget self._jxyr = self._widget.JXY() + self._scaler.R() self._pxyr = self._widget.PXY() + [ '-R%g/%g/%g/%g' % (0, widget.width(), 0, widget.height())] self._have_drawn_axes = False self._have_drawn_labels = False def _draw_background(self): self._have_topo_land = False self._have_topo_ocean = False if self.show_topo: self._have_topo = self._draw_topo() self._draw_basefeatures() def _get_topo_tile(self, k): t = None demname = None for dem in self._dems[k]: t = topo.get(dem, self._wesn) demname = dem if t is not None: break if not t: raise NoTopo() return t, demname def _prep_topo(self, k): gmt = self._gmt t, demname = self._get_topo_tile(k) if demname not in self._prep_topo_have: grdfile = gmt.tempfilename() is_flat = num.all(t.data[0] == t.data) gmtpy.savegrd( t.x(), t.y(), t.data, filename=grdfile, naming='lonlat') if self.illuminate and not is_flat: if k == 'ocean': factor = self.illuminate_factor_ocean else: factor = self.illuminate_factor_land ilumfn = gmt.tempfilename() gmt.grdgradient( grdfile, N='e%g' % factor, A=-45, G=ilumfn, out_discard=True) ilumargs = ['-I%s' % ilumfn] else: ilumargs = [] if self.replace_topo_color_only: t2 = self.replace_topo_color_only grdfile2 = gmt.tempfilename() gmtpy.savegrd( t2.x(), t2.y(), t2.data, filename=grdfile2, naming='lonlat') if gmt.is_gmt5(): gmt.grdsample( grdfile2, G=grdfile, n='l', I='%g/%g' % (t.dx, t.dy), # noqa R=grdfile, out_discard=True) else: gmt.grdsample( grdfile2, G=grdfile, Q='l', I='%g/%g' % (t.dx, t.dy), # noqa R=grdfile, out_discard=True) gmt.grdmath( grdfile, '0.0', 'AND', '=', grdfile2, out_discard=True) grdfile = grdfile2 self._prep_topo_have[demname] = grdfile, ilumargs return self._prep_topo_have[demname] def _draw_topo(self): widget = self._widget scaler = self._scaler gmt = self._gmt cres = self._coastline_resolution minarea = self._minarea JXY = widget.JXY() R = scaler.R() try: grdfile, ilumargs = self._prep_topo('ocean') gmt.pscoast(D=cres, S='c', A=minarea, *(JXY+R)) gmt.grdimage(grdfile, C=topo.cpt(self.topo_cpt_wet), *(ilumargs+JXY+R)) gmt.pscoast(Q=True, *(JXY+R)) self._have_topo_ocean = True except NoTopo: self._have_topo_ocean = False try: grdfile, ilumargs = self._prep_topo('land') gmt.pscoast(D=cres, G='c', A=minarea, *(JXY+R)) gmt.grdimage(grdfile, C=topo.cpt(self.topo_cpt_dry), *(ilumargs+JXY+R)) gmt.pscoast(Q=True, *(JXY+R)) self._have_topo_land = True except NoTopo: self._have_topo_land = False def _draw_topo_scale(self, label='Elevation [km]'): dry = read_cpt(topo.cpt(self.topo_cpt_dry)) wet = read_cpt(topo.cpt(self.topo_cpt_wet)) combi = cpt_merge_wet_dry(wet, dry) for level in combi.levels: level.vmin /= km level.vmax /= km topo_cpt = self.gmt.tempfilename() + '.cpt' write_cpt(combi, topo_cpt) (w, h), (xo, yo) = self.widget.get_size() self.gmt.psscale( D='%gp/%gp/%gp/%gph' % (xo + 0.5*w, yo - 2.0*gmtpy.cm, w, 0.5*gmtpy.cm), C=topo_cpt, B='1:%s:' % label) def _draw_basefeatures(self): gmt = self._gmt cres = self._coastline_resolution rivers = self._rivers minarea = self._minarea color_wet = self.color_wet color_dry = self.color_dry if self.show_rivers and rivers: rivers = ['-Ir/0.25p,%s' % gmtpy.color(self.color_wet)] else: rivers = [] fill = {} if not self._have_topo_land: fill['G'] = color_dry if not self._have_topo_ocean: fill['S'] = color_wet if self.show_boundaries: fill['N'] = '1/1p,%s,%s' % ( gmtpy.color(self.color_boundaries), 'solid') gmt.pscoast( D=cres, W='thinnest,%s' % gmtpy.color(darken(gmtpy.color_tup(color_dry))), A=minarea, *(rivers+self._jxyr), **fill) if self.show_plates: self.draw_plates() def _draw_axes(self): gmt = self._gmt scaler = self._scaler widget = self._widget if self.axes_layout is None: if self.lat > 0.0: axes_layout = 'WSen' else: axes_layout = 'WseN' else: axes_layout = self.axes_layout scale_km = gmtpy.nice_value(self.radius/5.) / 1000. if self.show_center_mark: gmt.psxy( in_rows=[[self.lon, self.lat]], S='c20p', W='2p,black', *self._jxyr) if self.show_grid: btmpl = ('%(xinc)gg%(xinc)g:%(xlabel)s:/' '%(yinc)gg%(yinc)g:%(ylabel)s:') else: btmpl = '%(xinc)g:%(xlabel)s:/%(yinc)g:%(ylabel)s:' gmt.psbasemap( B=(btmpl % scaler.get_params())+axes_layout, L=('x%gp/%gp/%g/%g/%gk' % ( 6./7*widget.width(), widget.height()/7., self.lon, self.lat, scale_km)), *self._jxyr) if self.comment: font_size = self.gmt.label_font_size() _, east, south, _ = self._wesn if gmt.is_gmt5(): row = [ 1, 0, '%gp,%s,%s' % (font_size, 0, 'black'), 'BR', self.comment] farg = ['-F+f+j'] else: row = [1, 0, font_size, 0, 0, 'BR', self.comment] farg = [] gmt.pstext( in_rows=[row], N=True, R=(0, 1, 0, 1), D='%gp/%gp' % (-font_size*0.2, font_size*0.3), *(widget.PXY() + farg)) def draw_axes(self): if not self._have_drawn_axes: self._draw_axes() self._have_drawn_axes = True def _have_coastlines(self): gmt = self._gmt cres = self._coastline_resolution minarea = self._minarea checkfile = gmt.tempfilename() gmt.pscoast( M=True, D=cres, W='thinnest,black', A=minarea, out_filename=checkfile, *self._jxyr) points = [] with open(checkfile, 'r') as f: for line in f: ls = line.strip() if ls.startswith('#') or ls.startswith('>') or ls == '': continue plon, plat = [float(x) for x in ls.split()] points.append((plat, plon)) points = num.array(points, dtype=num.float) return num.any(points_in_region(points, self._wesn)) def have_coastlines(self): self.gmt return self._have_coastlines() def project(self, lats, lons, jr=None): onepoint = False if isinstance(lats, float) and isinstance(lons, float): lats = [lats] lons = [lons] onepoint = True if jr is not None: j, r = jr gmt = gmtpy.GMT(version=self._gmtversion) else: j, _, _, r = self.jxyr gmt = self.gmt f = BytesIO() gmt.mapproject(j, r, in_columns=(lons, lats), out_stream=f, D='p') f.seek(0) data = num.loadtxt(f, ndmin=2) xs, ys = data.T if onepoint: xs = xs[0] ys = ys[0] return xs, ys def _map_box(self, jr=None): ll_lon, ll_lat, ur_lon, ur_lat = self._corners xs_corner, ys_corner = self.project( (ll_lat, ur_lat), (ll_lon, ur_lon), jr=jr) w = xs_corner[1] - xs_corner[0] h = ys_corner[1] - ys_corner[0] return w, h def _map_aspect(self, jr=None): w, h = self._map_box(jr=jr) return h/w def _draw_labels(self): points_taken = [] regions_taken = [] def no_points_in_rect(xs, ys, xmin, ymin, xmax, ymax): xx = not num.any(la(la(xmin < xs, xs < xmax), la(ymin < ys, ys < ymax))) return xx def roverlaps(a, b): return (a[0] < b[2] and b[0] < a[2] and a[1] < b[3] and b[1] < a[3]) w, h = self._map_box() label_font_size = self.gmt.label_font_size() if self._labels: n = len(self._labels) lons, lats, texts, sx, sy, colors, fonts, font_sizes, styles = \ list(zip(*self._labels)) font_sizes = [ (font_size or label_font_size) for font_size in font_sizes] sx = num.array(sx, dtype=num.float) sy = num.array(sy, dtype=num.float) xs, ys = self.project(lats, lons) points_taken.append((xs, ys)) dxs = num.zeros(n) dys = num.zeros(n) for i in range(n): dx, dy = gmtpy.text_box( texts[i], font=fonts[i], font_size=font_sizes[i], **styles[i]) dxs[i] = dx dys[i] = dy la = num.logical_and anchors_ok = ( la(xs + sx + dxs < w, ys + sy + dys < h), la(xs - sx - dxs > 0., ys - sy - dys > 0.), la(xs + sx + dxs < w, ys - sy - dys > 0.), la(xs - sx - dxs > 0., ys + sy + dys < h), ) arects = [ (xs, ys, xs + sx + dxs, ys + sy + dys), (xs - sx - dxs, ys - sy - dys, xs, ys), (xs, ys - sy - dys, xs + sx + dxs, ys), (xs - sx - dxs, ys, xs, ys + sy + dys)] for i in range(n): for ianch in range(4): anchors_ok[ianch][i] &= no_points_in_rect( xs, ys, *[xxx[i] for xxx in arects[ianch]]) anchor_choices = [] anchor_take = [] for i in range(n): choices = [ianch for ianch in range(4) if anchors_ok[ianch][i]] anchor_choices.append(choices) if choices: anchor_take.append(choices[0]) else: anchor_take.append(None) def cost(anchor_take): noverlaps = 0 for i in range(n): for j in range(n): if i != j: i_take = anchor_take[i] j_take = anchor_take[j] if i_take is None or j_take is None: continue r_i = [xxx[i] for xxx in arects[i_take]] r_j = [xxx[j] for xxx in arects[j_take]] if roverlaps(r_i, r_j): noverlaps += 1 return noverlaps cur_cost = cost(anchor_take) imax = 30 while cur_cost != 0 and imax > 0: for i in range(n): for t in anchor_choices[i]: anchor_take_new = list(anchor_take) anchor_take_new[i] = t new_cost = cost(anchor_take_new) if new_cost < cur_cost: anchor_take = anchor_take_new cur_cost = new_cost imax -= 1 while cur_cost != 0: for i in range(n): anchor_take_new = list(anchor_take) anchor_take_new[i] = None new_cost = cost(anchor_take_new) if new_cost < cur_cost: anchor_take = anchor_take_new cur_cost = new_cost break anchor_strs = ['BL', 'TR', 'TL', 'BR'] for i in range(n): ianchor = anchor_take[i] color = colors[i] if color is None: color = 'black' if ianchor is not None: regions_taken.append([xxx[i] for xxx in arects[ianchor]]) anchor = anchor_strs[ianchor] yoff = [-sy[i], sy[i]][anchor[0] == 'B'] xoff = [-sx[i], sx[i]][anchor[1] == 'L'] if self.gmt.is_gmt5(): row = ( lons[i], lats[i], '%i,%s,%s' % (font_sizes[i], fonts[i], color), anchor, texts[i]) farg = ['-F+f+j'] else: row = ( lons[i], lats[i], font_sizes[i], 0, fonts[i], anchor, texts[i]) farg = ['-G%s' % color] self.gmt.pstext( in_rows=[row], D='%gp/%gp' % (xoff, yoff), *(self.jxyr + farg), **styles[i]) if self._area_labels: for lons, lats, text, color, font, font_size, style in \ self._area_labels: if font_size is None: font_size = label_font_size if color is None: color = 'black' if self.gmt.is_gmt5(): farg = ['-F+f+j'] else: farg = ['-G%s' % color] xs, ys = self.project(lats, lons) dx, dy = gmtpy.text_box( text, font=font, font_size=font_size, **style) rects = [xs-0.5*dx, ys-0.5*dy, xs+0.5*dx, ys+0.5*dy] locs_ok = num.ones(xs.size, dtype=num.bool) for iloc in range(xs.size): rcandi = [xxx[iloc] for xxx in rects] locs_ok[iloc] = True locs_ok[iloc] &= ( 0 < rcandi[0] and rcandi[2] < w and 0 < rcandi[1] and rcandi[3] < h) overlap = False for r in regions_taken: if roverlaps(r, rcandi): overlap = True break locs_ok[iloc] &= not overlap for xs_taken, ys_taken in points_taken: locs_ok[iloc] &= no_points_in_rect( xs_taken, ys_taken, *rcandi) if not locs_ok[iloc]: break rows = [] for iloc, (lon, lat) in enumerate(zip(lons, lats)): if not locs_ok[iloc]: continue if self.gmt.is_gmt5(): row = ( lon, lat, '%i,%s,%s' % (font_size, font, color), 'MC', text) else: row = ( lon, lat, font_size, 0, font, 'MC', text) rows.append(row) regions_taken.append([xxx[iloc] for xxx in rects]) break self.gmt.pstext( in_rows=rows, *(self.jxyr + farg), **style) def draw_labels(self): self.gmt if not self._have_drawn_labels: self._draw_labels() self._have_drawn_labels = True def add_label( self, lat, lon, text, offset_x=5., offset_y=5., color=None, font='1', font_size=None, style={}): if 'G' in style: style = style.copy() color = style.pop('G') self._labels.append( (lon, lat, text, offset_x, offset_y, color, font, font_size, style)) def add_area_label( self, lat, lon, text, color=None, font='3', font_size=None, style={}): self._area_labels.append( (lon, lat, text, color, font, font_size, style)) def cities_in_region(self): from pyrocko.dataset import geonames cities = geonames.get_cities_region(region=self.wesn, minpop=0) cities.extend(self.custom_cities) cities.sort(key=lambda x: x.population) return cities def draw_cities(self, exact=None, include=[], exclude=[], nmax_soft=10, psxy_style=dict(S='s5p', G='black')): cities = self.cities_in_region() if exact is not None: cities = [c for c in cities if c.name in exact] minpop = None else: cities = [c for c in cities if c.name not in exclude] minpop = 10**3 for minpop_new in [1e3, 3e3, 1e4, 3e4, 1e5, 3e5, 1e6, 3e6, 1e7]: cities_new = [ c for c in cities if c.population > minpop_new or c.name in include] if len(cities_new) == 0 or ( len(cities_new) < 3 and len(cities) < nmax_soft*2): break cities = cities_new minpop = minpop_new if len(cities) <= nmax_soft: break if cities: lats = [c.lat for c in cities] lons = [c.lon for c in cities] self.gmt.psxy( in_columns=(lons, lats), *self.jxyr, **psxy_style) for c in cities: try: text = c.name.encode('iso-8859-1').decode('iso-8859-1') except UnicodeEncodeError: text = c.asciiname self.add_label(c.lat, c.lon, text) self._cities_minpop = minpop def add_stations(self, stations, psxy_style=dict()): default_psxy_style = { 'S': 't8p', 'G': 'black' } default_psxy_style.update(psxy_style) lats, lons = zip(*[od.ne_to_latlon( s.lat, s.lon, s.north_shift, s.east_shift) for s in stations]) self.gmt.psxy( in_columns=(lons, lats), *self.jxyr, **default_psxy_style) for station in stations: self.add_label(station.lat, station.lon, '.'.join( x for x in (station.network, station.station) if x)) def add_kite_scene(self, scene): tile = FloatTile( scene.frame.llLon, scene.frame.llLat, scene.frame.dLon, scene.frame.dLat, scene.displacement) return tile def add_gnss_campaign(self, campaign, psxy_style=dict(), labels=True, vertical=False): offsets = num.array([math.sqrt(s.east.shift**2 + s.north.shift**2) for s in campaign.stations]) size = math.sqrt(self.height**2 + self.width**2) scale = (size/10.) / offsets.max() default_psxy_style = { 'h': 0, 'W': '0.5p,black', 'G': 'black', 'L': True, 'S': 'e%dc/0.95/8' % scale, } default_psxy_style.update(psxy_style) lats, lons = zip(*[od.ne_to_latlon( s.lat, s.lon, s.north_shift, s.east_shift) for s in campaign.stations]) if vertical: rows = [[lons[ista], lats[ista], 0., s.up.shift, s.east.sigma, s.north.sigma, 0] for ista, s in enumerate(campaign.stations)] else: rows = [[lons[ista], lats[ista], s.east.shift, s.north.shift, s.east.sigma, s.north.sigma, 0] for ista, s in enumerate(campaign.stations)] if labels: for row, sta in zip(rows, campaign.stations): row.append(sta.code) self.gmt.psvelo( in_rows=rows, *self.jxyr, **default_psxy_style ) def draw_plates(self): from pyrocko.dataset import tectonics neast = 20 nnorth = max(1, int(round(num.round(self._hreg/self._wreg * neast)))) norths = num.linspace(-self._hreg*0.5, self._hreg*0.5, nnorth) easts = num.linspace(-self._wreg*0.5, self._wreg*0.5, neast) norths2 = num.repeat(norths, neast) easts2 = num.tile(easts, nnorth) lats, lons = od.ne_to_latlon( self.lat, self.lon, norths2, easts2) bird = tectonics.PeterBird2003() plates = bird.get_plates() color_plates = gmtpy.color('aluminium5') color_velocities = gmtpy.color('skyblue1') color_velocities_lab = gmtpy.color(darken(gmtpy.color_tup('skyblue1'))) points = num.vstack((lats, lons)).T used = [] for plate in plates: mask = plate.contains_points(points) if num.any(mask): used.append((plate, mask)) if len(used) > 1: candi_fixed = {} label_data = [] for plate, mask in used: mean_north = num.mean(norths2[mask]) mean_east = num.mean(easts2[mask]) iorder = num.argsort(num.sqrt( (norths2[mask] - mean_north)**2 + (easts2[mask] - mean_east)**2)) lat_candis = lats[mask][iorder] lon_candis = lons[mask][iorder] candi_fixed[plate.name] = lat_candis.size label_data.append(( lat_candis, lon_candis, plate, color_plates)) boundaries = bird.get_boundaries() size = 2 psxy_kwargs = [] for boundary in boundaries: if num.any(points_in_region(boundary.points, self._wesn)): for typ, part in boundary.split_types( [['SUB'], ['OSR', 'OTF', 'OCB', 'CTF', 'CCB', 'CRB']]): lats, lons = part.T kwargs = {} if typ[0] == 'SUB': if boundary.kind == '\\': kwargs['S'] = 'f%g/%gp+t+r' % ( 0.45*size, 3.*size) elif boundary.kind == '/': kwargs['S'] = 'f%g/%gp+t+l' % ( 0.45*size, 3.*size) kwargs['G'] = color_plates kwargs['in_columns'] = (lons, lats) kwargs['W'] = '%gp,%s' % (size, color_plates), psxy_kwargs.append(kwargs) if boundary.kind == '\\': if boundary.name2 in candi_fixed: candi_fixed[boundary.name2] += neast*nnorth elif boundary.kind == '/': if boundary.name1 in candi_fixed: candi_fixed[boundary.name1] += neast*nnorth candi_fixed = [name for name in sorted( list(candi_fixed.keys()), key=lambda name: -candi_fixed[name])] candi_fixed.append(None) gsrm = tectonics.GSRM1() for name in candi_fixed: if name not in gsrm.plate_names() \ and name not in gsrm.plate_alt_names(): continue lats, lons, vnorth, veast, vnorth_err, veast_err, corr = \ gsrm.get_velocities(name, region=self._wesn) fixed_plate_name = name self.gmt.psvelo( in_columns=( lons, lats, veast, vnorth, veast_err, vnorth_err, corr), W='0.25p,%s' % color_velocities, A='9p+e+g%s' % color_velocities, S='e0.2p/0.95/10', *self.jxyr) for _ in range(len(lons) // 50 + 1): ii = random.randint(0, len(lons)-1) v = math.sqrt(vnorth[ii]**2 + veast[ii]**2) self.add_label( lats[ii], lons[ii], '%.0f' % v, font_size=0.7*self.gmt.label_font_size(), style=dict( G=color_velocities_lab)) break for (lat_candis, lon_candis, plate, color) in label_data: full_name = bird.full_name(plate.name) if plate.name == fixed_plate_name: full_name = '@_' + full_name + '@_' self.add_area_label( lat_candis, lon_candis, full_name, color=color, font='3') for kwargs in psxy_kwargs: self.gmt.psxy(*self.jxyr, **kwargs)
class ReportConfig(HasPaths): reports_base_path = Path.T(default='reports') report_sub_path = String.T(default='${event_name}/${problem_name}')
class Duration(Object): unit = String.T(optional=True, xmlstyle='attribute') uncertainty = Float.T(optional=True) value = Float.T(optional=True, xmlstyle='content') xmltagname = 'duration'
class GNSSCampaign(Object): stations = List.T( GNSSStation.T(), help='List of GNSS campaign measurements') name = String.T( help='Campaign name', default='Unnamed campaign') survey_start = DateTimestamp.T( optional=True) survey_end = DateTimestamp.T( optional=True) def __init__(self, *args, **kwargs): Object.__init__(self, *args, **kwargs) self._cov_mat = None self._cor_mat = None def add_station(self, station): return self.stations.append(station) def get_station(self, station_code): for sta in self.stations: if sta.code == station_code: return sta raise ValueError('Could not find station %s' % station_code) def get_center_latlon(self): return od.geographic_midpoint_locations(self.stations) def get_radius(self): coords = self.coordinates return od.distance_accurate50m( coords[:, 0].min(), coords[:, 1].min(), coords[:, 0].max(), coords[:, 1].max()) / 2. def get_covariance_matrix(self): if self._cov_mat is None: cov_arr = num.zeros((self.nstations*3, self.nstations*3)) for ista, sta in enumerate(self.stations): cov_arr[ista*3:ista*3+3, ista*3:ista*3+3] = \ sta.get_covariance_matrix(full=True) self._cov_mat = cov_arr return self._cov_mat def get_correlation_matrix(self): if self._cor_mat is None: cov_arr = num.zeros((self.nstations*3, self.nstations*3)) for ista, sta in enumerate(self.stations): cov_arr[ista*3:ista*3+3, ista*3:ista*3+3] = \ sta.get_correlation_matrix(full=True) self._cor_mat = cov_arr return self._cor_mat def dump(self, *args, **kwargs): self.regularize() Object.dump(self, *args, **kwargs) @property def coordinates(self): return num.array([loc.effective_latlon for loc in self.stations]) @property def nstations(self): return len(self.stations)
class StationCorrection(Object): codes = Tuple.T(4, String.T()) delay = Float.T() factor = Float.T()
class Table2(Object): xmlns = ns2 name = String.T() width = Int.T() length = Int.T()
class Longitude(FloatWithUnit): '''Type for longitude coordinate.''' unit = String.T(default='DEGREES', optional=True, xmlstyle='attribute') # fixed unit datum = String.T(default='WGS84', optional=True, xmlstyle='attribute')
class Operator(Object): agency_list = List.T(Unicode.T(xmltagname='Agency')) contact_list = List.T(Person.T(xmltagname='Contact')) web_site = String.T(optional=True, xmltagname='WebSite')
class Frequency(FloatWithUnit): unit = String.T(default='HERTZ', optional=True, xmlstyle='attribute')
class SampleRate(FloatWithUnit): '''Sample rate in samples per second.''' unit = String.T(default='SAMPLES/S', optional=True, xmlstyle='attribute')
class Distance(FloatWithUnit): '''Extension of FloatWithUnit for distances, elevations, and depths.''' unit = String.T(default='METERS', optional=True, xmlstyle='attribute')
class Dip(FloatWithUnit): '''Instrument dip in degrees down from horizontal. Together azimuth and dip describe the direction of the sensitive axis of the instrument.''' unit = String.T(default='DEGREES', optional=True, xmlstyle='attribute')
class Azimuth(FloatWithUnit): '''Instrument azimuth, degrees clockwise from North.''' unit = String.T(default='DEGREES', optional=True, xmlstyle='attribute')
class CakeTiming(Object): '''Calculates and caches phase arrivals. :param fallback_time: returned, when no phase arrival was found for the given depth-distance-phase-selection-combination E.g.: definition = 'first(p,P)-20' CakeTiming(definition)''' phase_selection = String.T() fallback_time = Float.T(optional=True) def __init__(self, phase_selection, fallback_time=None): self.arrivals = defaultdict(dict) self.fallback_time = fallback_time self.which = None self.phase_selection = phase_selection _phase_selection = phase_selection if '+' in _phase_selection: _phase_selection, self.offset = _phase_selection.split('+') self.offset = float(self.offset) elif '-' in _phase_selection: _phase_selection, self.offset = _phase_selection.split('-') self.offset = float(self.offset) self.offset = -self.offset if 'first' in _phase_selection: self.which = 'first' if 'last' in _phase_selection: self.which = 'last' if self.which: _phase_selection = self.strip(_phase_selection) self.phases = _phase_selection.split('|') def return_time(self, ray): if ray is None: return self.fallback_time else: return ray.t + self.offset def t(self, mod, z_dist, get_ray=False): ''':param phase_selection: phase names speparated by vertical bars :param z_dist: tuple with(depth, distance) ''' z, dist = z_dist if (dist, z) in self.arrivals.keys(): return self.return_time(self.arrivals[(dist, z)]) phases = [cake.PhaseDef(pid) for pid in self.phases] arrivals = mod.arrivals(distances=[dist * cake.m2d], phases=phases, zstart=z) if arrivals == []: logger.warn('no phase at d=%s, z=%s.(return fallback time)' % (dist, z)) want = None else: want = self.phase_selector(arrivals) self.arrivals[(dist, z)] = want if get_ray: return want else: return self.return_time(want) def phase_selector(self, _list): if self.which == 'first': return min(_list, key=lambda x: x.t) if self.which == 'last': return max(_list, key=lambda x: x.t) def strip(self, ps): ps = ps.replace(self.which, '') ps = ps.rstrip(')') ps = ps.lstrip('(') return ps
class Response(Object): resource_id = String.T(optional=True, xmlstyle='attribute') instrument_sensitivity = Sensitivity.T(optional=True, xmltagname='InstrumentSensitivity') instrument_polynomial = Polynomial.T(optional=True, xmltagname='InstrumentPolynomial') stage_list = List.T(ResponseStage.T(xmltagname='Stage')) def get_pyrocko_response(self, nslc, fake_input_units=None): responses = [] for stage in self.stage_list: responses.extend(stage.get_pyrocko_response(nslc)) if not self.stage_list and self.instrument_sensitivity: responses.append( trace.PoleZeroResponse( constant=self.instrument_sensitivity.value)) if fake_input_units is not None: if not self.instrument_sensitivity or \ self.instrument_sensitivity.input_units is None: raise NoResponseInformation('no input units given') input_units = self.instrument_sensitivity.input_units.name try: conresp = conversion[fake_input_units.upper(), input_units.upper()] except KeyError: raise NoResponseInformation( 'cannot convert between units: %s, %s' % (fake_input_units, input_units)) if conresp is not None: responses.append(conresp) return trace.MultiplyResponse(responses) @classmethod def from_pyrocko_pz_response(cls, presponse, input_unit, output_unit, normalization_frequency=1.0): norm_factor = 1.0 / float( abs( presponse.evaluate(num.array([normalization_frequency]))[0] / presponse.constant)) pzs = PolesZeros( pz_transfer_function_type='LAPLACE (RADIANS/SECOND)', normalization_factor=norm_factor, normalization_frequency=Frequency(normalization_frequency), zero_list=[ PoleZero(real=FloatNoUnit(z.real), imaginary=FloatNoUnit(z.imag)) for z in presponse.zeros ], pole_list=[ PoleZero(real=FloatNoUnit(z.real), imaginary=FloatNoUnit(z.imag)) for z in presponse.poles ]) pzs.validate() stage = ResponseStage( number=1, poles_zeros_list=[pzs], stage_gain=Gain(float(abs(presponse.constant)) / norm_factor)) resp = Response(instrument_sensitivity=Sensitivity( value=stage.stage_gain.value, input_units=Units(input_unit), output_units=Units(output_unit)), stage_list=[stage]) return resp
class Event(Object): '''Seismic event representation :param lat: latitude of hypocenter (default 0.0) :param lon: longitude of hypocenter (default 0.0) :param time: origin time as float in seconds after '1970-01-01 00:00:00 :param name: event identifier as string (optional) :param depth: source depth (optional) :param magnitude: magnitude of event (optional) :param region: source region (optional) :param catalog: name of catalog that lists this event (optional) :param moment_tensor: moment tensor as :py:class:`moment_tensor.MomentTensor` instance (optional) :param duration: source duration as float (optional) ''' lat = Float.T(default=0.0) lon = Float.T(default=0.0) time = Timestamp.T(default=util.str_to_time('1970-01-01 00:00:00')) name = String.T(default='', optional=True) depth = Float.T(optional=True) magnitude = Float.T(optional=True) magnitude_type = String.T(optional=True) region = String.T(optional=True) catalog = String.T(optional=True) moment_tensor = moment_tensor.MomentTensor.T(optional=True) duration = Float.T(optional=True) def __init__(self, lat=0., lon=0., time=0., name='', depth=None, magnitude=None, magnitude_type=None, region=None, load=None, loadf=None, catalog=None, moment_tensor=None, duration=None): vals = None if load is not None: vals = Event.oldload(load) elif loadf is not None: vals = Event.oldloadf(loadf) if vals: lat, lon, time, name, depth, magnitude, magnitude_type, region, \ catalog, moment_tensor, duration = vals Object.__init__(self, lat=lat, lon=lon, time=time, name=name, depth=depth, magnitude=magnitude, magnitude_type=magnitude_type, region=region, catalog=catalog, moment_tensor=moment_tensor, duration=duration) def time_as_string(self): return util.time_to_str(self.time) def set_name(self, name): self.name = name #def __str__(self): # return '%s %s %s %g %g %s %s' % (self.name, util.time_to_str(self.time), self.magnitude, self.lat, self.lon, self.depth, self.region) def olddump(self, filename): file = open(filename, 'w') self.olddumpf(file) file.close() def olddumpf(self, file): file.write('name = %s\n' % self.name) file.write('time = %s\n' % util.time_to_str(self.time)) if self.lat is not None: file.write('latitude = %g\n' % self.lat) if self.lon is not None: file.write('longitude = %g\n' % self.lon) if self.magnitude is not None: file.write('magnitude = %g\n' % self.magnitude) file.write('moment = %g\n' % moment_tensor.magnitude_to_moment(self.magnitude)) if self.magnitude_type is not None: file.write('magnitude_type = %s\n' % self.magnitude_type) if self.depth is not None: file.write('depth = %g\n' % self.depth) if self.region is not None: file.write('region = %s\n' % self.region) if self.catalog is not None: file.write('catalog = %s\n' % self.catalog) if self.moment_tensor is not None: m = self.moment_tensor.m() sdr1, sdr2 = self.moment_tensor.both_strike_dip_rake() file.write(( 'mnn = %g\nmee = %g\nmdd = %g\nmne = %g\nmnd = %g\nmed = %g\n' + 'strike1 = %g\ndip1 = %g\nrake1 = %g\nstrike2 = %g\ndip2 = %g\nrake2 = %g\n' ) % ((m[0, 0], m[1, 1], m[2, 2], m[0, 1], m[0, 2], m[1, 2]) + sdr1 + sdr2)) if self.duration is not None: file.write('duration = %g\n' % self.duration) @staticmethod def unique(events, deltat=10., group_cmp=(lambda a, b: cmp(a.catalog, b.catalog))): groups = Event.grouped(events, deltat) events = [] for group in groups: if group: group.sort(group_cmp) events.append(group[-1]) return events @staticmethod def grouped(events, deltat=10.): events = list(events) groups = [] for ia, a in enumerate(events): groups.append([]) haveit = False for ib, b in enumerate(events[:ia]): if abs(b.time - a.time) < deltat: groups[ib].append(a) haveit = True break if not haveit: groups[ia].append(a) groups = [g for g in groups if g] groups.sort(key=lambda g: sum(e.time for e in g) / len(g)) return groups @staticmethod def dump_catalog(events, filename): file = open(filename, 'w') try: i = 0 for ev in events: if i != 0: file.write( '--------------------------------------------\n') ev.olddumpf(file) i += 1 finally: file.close() @staticmethod def oldload(filename): with open(filename, 'r') as file: return Event.oldloadf(file) @staticmethod def oldloadf(file): d = {} try: for line in file: if line.lstrip().startswith('#'): continue toks = line.split(' = ', 1) if len(toks) == 2: k, v = toks[0].strip(), toks[1].strip() if k in ('name', 'region', 'catalog', 'magnitude_type'): d[k] = v if k in ( 'latitude longitude magnitude depth duration mnn mee mdd mne mnd med strike1 dip1 rake1 strike2 dip2 rake2 duration' .split()): d[k] = float(v) if k == 'time': d[k] = util.str_to_time(v) if line.startswith('---'): d['have_separator'] = True break except Exception, e: raise FileParseError(e) if not d: raise EOF() if 'have_separator' in d and len(d) == 1: raise EmptyEvent() mt = None m6 = [d[x] for x in 'mnn mee mdd mne mnd med'.split() if x in d] if len(m6) == 6: mt = moment_tensor.MomentTensor(m=moment_tensor.symmat6(*m6)) else: sdr = [d[x] for x in 'strike1 dip1 rake1'.split() if x in d] if len(sdr) == 3: moment = 1.0 if 'moment' in d: moment = d['moment'] elif 'magnitude' in d: moment = moment_tensor.magnitude_to_moment(d['magnitude']) mt = moment_tensor.MomentTensor(strike=sdr[0], dip=sdr[1], rake=sdr[2], scalar_moment=moment) return (d.get('latitude', 0.0), d.get('longitude', 0.0), d.get('time', 0.0), d.get('name', ''), d.get('depth', None), d.get('magnitude', None), d.get('magnitude_type', None), d.get('region', None), d.get('catalog', None), mt, d.get('duration', None))
class FDSNStationXML(Object): '''Top-level type for Station XML. Required field are Source (network ID of the institution sending the message) and one or more Network containers or one or more Station containers.''' schema_version = Float.T(default=1.0, xmlstyle='attribute') source = String.T(xmltagname='Source') sender = String.T(optional=True, xmltagname='Sender') module = String.T(optional=True, xmltagname='Module') module_uri = String.T(optional=True, xmltagname='ModuleURI') created = Timestamp.T(xmltagname='Created') network_list = List.T(Network.T(xmltagname='Network')) xmltagname = 'FDSNStationXML' guessable_xmlns = [guts_xmlns] def get_pyrocko_stations(self, nslcs=None, nsls=None, time=None, timespan=None, inconsistencies='warn'): assert inconsistencies in ('raise', 'warn') if nslcs is not None: nslcs = set(nslcs) if nsls is not None: nsls = set(nsls) tt = () if time is not None: tt = (time, ) elif timespan is not None: tt = timespan pstations = [] for network in self.network_list: if not network.spans(*tt): continue for station in network.station_list: if not station.spans(*tt): continue if station.channel_list: loc_to_channels = {} for channel in station.channel_list: if not channel.spans(*tt): continue loc = channel.location_code.strip() if loc not in loc_to_channels: loc_to_channels[loc] = [] loc_to_channels[loc].append(channel) for loc in sorted(loc_to_channels.keys()): channels = loc_to_channels[loc] if nslcs is not None: channels = [ channel for channel in channels if (network.code, station.code, loc, channel.code) in nslcs ] if not channels: continue nsl = network.code, station.code, loc if nsls is not None and nsl not in nsls: continue pstations.append( pyrocko_station_from_channels( nsl, channels, inconsistencies=inconsistencies)) else: pstations.append( pyrocko.model.Station(network.code, station.code, '*', lat=station.latitude.value, lon=station.longitude.value, elevation=value_or_none( station.elevation), name=station.description or '')) return pstations @classmethod def from_pyrocko_stations(cls, pyrocko_stations): ''' Generate :py:class:`FDSNStationXML` from list of :py:class;`pyrocko.model.Station` instances. :param pyrocko_stations: list of :py:class;`pyrocko.model.Station` instances. ''' from collections import defaultdict network_dict = defaultdict(list) for s in pyrocko_stations: network, station, location = s.nsl() channel_list = [] for c in s.channels: channel_list.append( Channel(location_code=location, code=c.name, latitude=Latitude(value=s.lat), longitude=Longitude(value=s.lon), elevation=Distance(value=s.elevation), depth=Distance(value=s.depth), azimuth=Azimuth(value=c.azimuth), dip=Dip(value=c.dip))) network_dict[network].append( Station(code=station, latitude=Latitude(value=s.lat), longitude=Longitude(value=s.lon), elevation=Distance(value=s.elevation), channel_list=channel_list)) timestamp = time.time() network_list = [] for k, station_list in network_dict.items(): network_list.append( Network(code=k, station_list=station_list, total_number_stations=len(station_list))) sxml = FDSNStationXML(source='from pyrocko stations list', created=timestamp, network_list=network_list) sxml.validate() return sxml def iter_network_stations(self, net=None, sta=None, time=None, timespan=None): tt = () if time is not None: tt = (time, ) elif timespan is not None: tt = timespan for network in self.network_list: if not network.spans(*tt) or (net is not None and network.code != net): continue for station in network.station_list: if not station.spans(*tt) or (sta is not None and station.code != sta): continue yield (network, station) def iter_network_station_channels(self, net=None, sta=None, loc=None, cha=None, time=None, timespan=None): if loc is not None: loc = loc.strip() tt = () if time is not None: tt = (time, ) elif timespan is not None: tt = timespan for network in self.network_list: if not network.spans(*tt) or (net is not None and network.code != net): continue for station in network.station_list: if not station.spans(*tt) or (sta is not None and station.code != sta): continue if station.channel_list: for channel in station.channel_list: if (not channel.spans(*tt) or (cha is not None and channel.code != cha) or (loc is not None and channel.location_code.strip() != loc)): continue yield (network, station, channel) def get_channel_groups(self, net=None, sta=None, loc=None, cha=None, time=None, timespan=None): groups = {} for network, station, channel in self.iter_network_station_channels( net, sta, loc, cha, time=time, timespan=timespan): net = network.code sta = station.code cha = channel.code loc = channel.location_code.strip() if len(cha) == 3: bic = cha[:2] # band and intrument code according to SEED elif len(cha) == 1: bic = '' else: bic = cha if channel.response and \ channel.response.instrument_sensitivity and \ channel.response.instrument_sensitivity.input_units: unit = channel.response.instrument_sensitivity.input_units.name else: unit = None bic = (bic, unit) k = net, sta, loc if k not in groups: groups[k] = {} if bic not in groups[k]: groups[k][bic] = [] groups[k][bic].append(channel) for nsl, bic_to_channels in groups.items(): bad_bics = [] for bic, channels in bic_to_channels.items(): sample_rates = [] for channel in channels: sample_rates.append(channel.sample_rate.value) if not same(sample_rates): scs = ','.join(channel.code for channel in channels) srs = ', '.join('%e' % x for x in sample_rates) err = 'ignoring channels with inconsistent sampling ' + \ 'rates (%s.%s.%s.%s: %s)' % (nsl + (scs, srs)) logger.warn(err) bad_bics.append(bic) for bic in bad_bics: del bic_to_channels[bic] return groups def choose_channels(self, target_sample_rate=None, priority_band_code=['H', 'B', 'M', 'L', 'V', 'E', 'S'], priority_units=['M/S', 'M/S**2'], priority_instrument_code=['H', 'L'], time=None, timespan=None): nslcs = {} for nsl, bic_to_channels in self.get_channel_groups( time=time, timespan=timespan).items(): useful_bics = [] for bic, channels in bic_to_channels.items(): rate = channels[0].sample_rate.value if target_sample_rate is not None and \ rate < target_sample_rate*0.99999: continue if len(bic[0]) == 2: if bic[0][0] not in priority_band_code: continue if bic[0][1] not in priority_instrument_code: continue unit = bic[1] prio_unit = len(priority_units) try: prio_unit = priority_units.index(unit) except ValueError: pass prio_inst = len(priority_instrument_code) prio_band = len(priority_band_code) if len(channels[0].code) == 3: try: prio_inst = priority_instrument_code.index( channels[0].code[1]) except ValueError: pass try: prio_band = priority_band_code.index( channels[0].code[0]) except ValueError: pass if target_sample_rate is None: rate = -rate useful_bics.append((-len(channels), prio_band, rate, prio_unit, prio_inst, bic)) useful_bics.sort() for _, _, rate, _, _, bic in useful_bics: channels = sorted(bic_to_channels[bic]) if channels: for channel in channels: nslcs[nsl + (channel.code, )] = channel break return nslcs def get_pyrocko_response(self, nslc, time=None, timespan=None, fake_input_units=None): net, sta, loc, cha = nslc resps = [] for _, _, channel in self.iter_network_station_channels( net, sta, loc, cha, time=time, timespan=timespan): resp = channel.response if resp: resps.append( resp.get_pyrocko_response( nslc, fake_input_units=fake_input_units)) if not resps: raise NoResponseInformation('%s.%s.%s.%s' % nslc) elif len(resps) > 1: raise MultipleResponseInformation('%s.%s.%s.%s' % nslc) return resps[0] @property def n_code_list(self): return sorted(set(x.code for x in self.network_list)) @property def ns_code_list(self): nss = set() for network in self.network_list: for station in network.station_list: nss.add((network.code, station.code)) return sorted(nss) @property def nsl_code_list(self): nsls = set() for network in self.network_list: for station in network.station_list: for channel in station.channel_list: nsls.add( (network.code, station.code, channel.location_code)) return sorted(nsls) @property def nslc_code_list(self): nslcs = set() for network in self.network_list: for station in network.station_list: for channel in station.channel_list: nslcs.add((network.code, station.code, channel.location_code, channel.code)) return sorted(nslcs) def summary(self): lst = [ 'number of n codes: %i' % len(self.n_code_list), 'number of ns codes: %i' % len(self.ns_code_list), 'number of nsl codes: %i' % len(self.nsl_code_list), 'number of nslc codes: %i' % len(self.nslc_code_list) ] return '\n'.join(lst)
class DatasetConfig(HasPaths): ''' Configuration for a Grond `Dataset` object. ''' stations_path = Path.T( optional=True, help='List of files with station coordinates in Pyrocko format.') stations_stationxml_paths = List.T( Path.T(), optional=True, help='List of files with station coordinates in StationXML format.') events_path = Path.T( optional=True, help='File with hypocenter information and possibly' ' reference solution') waveform_paths = List.T( Path.T(), optional=True, help='List of directories with raw waveform data') clippings_path = Path.T( optional=True) responses_sacpz_path = Path.T( optional=True, help='List of SACPZ response files for restitution of' ' the raw waveform data.') responses_stationxml_paths = List.T( Path.T(), optional=True, help='List of StationXML response files for restitution of' ' the raw waveform data.') station_corrections_path = Path.T( optional=True, help='File containing station correction informations.') apply_correction_factors = Bool.T( optional=True, default=True, help='Apply correction factors from station corrections.') apply_correction_delays = Bool.T( optional=True, default=True, help='Apply correction delays from station corrections.') apply_displaced_sampling_workaround = Bool.T( optional=True, default=False, help='Work around displaced sampling issues.') extend_incomplete = Bool.T( default=False, help='Extend incomplete seismic traces.') picks_paths = List.T( Path.T()) blacklist_paths = List.T( Path.T(), help='List of text files with blacklisted stations.') blacklist = List.T( String.T(), help='Stations/components to be excluded according to their STA, ' 'NET.STA, NET.STA.LOC, or NET.STA.LOC.CHA codes.') whitelist_paths = List.T( Path.T(), help='List of text files with whitelisted stations.') whitelist = List.T( String.T(), optional=True, help='If not None, list of stations/components to include according ' 'to their STA, NET.STA, NET.STA.LOC, or NET.STA.LOC.CHA codes. ' 'Note: ''when whitelisting on channel level, both, the raw and ' 'the processed channel codes have to be listed.') synthetic_test = SyntheticTest.T( optional=True) kite_scene_paths = List.T( Path.T(), optional=True, help='List of directories for the InSAR scenes.') gnss_campaign_paths = List.T( Path.T(), optional=True, help='List of directories for the GNSS campaign data.') def __init__(self, *args, **kwargs): HasPaths.__init__(self, *args, **kwargs) self._ds = {} def get_event_names(self): logger.info('Loading events ...') def extra(path): return expand_template(path, dict( event_name='*')) def fp(path): return self.expand_path(path, extra=extra) def check_events(events, fn): for ev in events: if not ev.name: logger.warning('Event in %s has no name!', fn) return if not ev.lat or not ev.lon: logger.warning('Event %s has inconsistent coordinates!', ev.name) if not ev.depth: logger.warning('Event %s has no depth!', ev.name) if not ev.time: logger.warning('Event %s has no time!', ev.name) events = [] events_path = fp(self.events_path) fns = glob.glob(events_path) if not fns: raise DatasetError('No event files matching "%s".' % events_path) for fn in fns: logger.debug('Loading from file %s' % fn) ev = model.load_events(filename=fn) check_events(ev, fn) events.extend(ev) event_names = [ev_.name for ev_ in events] event_names.sort() return event_names def get_dataset(self, event_name): if event_name not in self._ds: def extra(path): return expand_template(path, dict( event_name=event_name)) def fp(path): p = self.expand_path(path, extra=extra) if p is None: return None if isinstance(p, list): for path in p: if not op.exists(path): logger.warn('Path %s does not exist.' % path) else: if not op.exists(p): logger.warn('Path %s does not exist.' % p) return p ds = Dataset(event_name) try: ds.add_events(filename=fp(self.events_path)) ds.add_stations( pyrocko_stations_filename=fp(self.stations_path), stationxml_filenames=fp(self.stations_stationxml_paths)) if self.waveform_paths: ds.add_waveforms(paths=fp(self.waveform_paths)) if self.kite_scene_paths: ds.add_kite_scenes(paths=fp(self.kite_scene_paths)) if self.gnss_campaign_paths: ds.add_gnss_campaigns(paths=fp(self.gnss_campaign_paths)) if self.clippings_path: ds.add_clippings(markers_filename=fp(self.clippings_path)) if self.responses_sacpz_path: ds.add_responses( sacpz_dirname=fp(self.responses_sacpz_path)) if self.responses_stationxml_paths: ds.add_responses( stationxml_filenames=fp( self.responses_stationxml_paths)) if self.station_corrections_path: ds.add_station_corrections( filename=fp(self.station_corrections_path)) ds.apply_correction_factors = self.apply_correction_factors ds.apply_correction_delays = self.apply_correction_delays ds.apply_displaced_sampling_workaround = \ self.apply_displaced_sampling_workaround ds.extend_incomplete = self.extend_incomplete for picks_path in self.picks_paths: ds.add_picks( filename=fp(picks_path)) ds.add_blacklist(self.blacklist) ds.add_blacklist(filenames=fp(self.blacklist_paths)) if self.whitelist: ds.add_whitelist(self.whitelist) if self.whitelist_paths: ds.add_whitelist(filenames=fp(self.whitelist_paths)) ds.set_synthetic_test(copy.deepcopy(self.synthetic_test)) self._ds[event_name] = ds except (FileLoadError, OSError) as e: raise DatasetError(str(e)) return self._ds[event_name]
class Event(Location): '''Seismic event representation :param lat: latitude of hypocenter (default 0.0) :param lon: longitude of hypocenter (default 0.0) :param time: origin time as float in seconds after '1970-01-01 00:00:00 :param name: event identifier as string (optional) :param depth: source depth (optional) :param magnitude: magnitude of event (optional) :param region: source region (optional) :param catalog: name of catalog that lists this event (optional) :param moment_tensor: moment tensor as :py:class:`moment_tensor.MomentTensor` instance (optional) :param duration: source duration as float (optional) ''' time = Timestamp.T(default=util.str_to_time('1970-01-01 00:00:00')) name = String.T(default='', optional=True) magnitude = Float.T(optional=True) magnitude_type = String.T(optional=True) region = Unicode.T(optional=True) catalog = String.T(optional=True) moment_tensor = moment_tensor.MomentTensor.T(optional=True) duration = Float.T(optional=True) tags = List.T(Tag.T(optional=True)) def __init__( self, lat=0., lon=0., time=0., name='', depth=None, elevation=None, magnitude=None, magnitude_type=None, region=None, load=None, loadf=None, catalog=None, moment_tensor=None, duration=None, tags=[]): vals = None if load is not None: vals = Event.oldload(load) elif loadf is not None: vals = Event.oldloadf(loadf) if vals: lat, lon, time, name, depth, magnitude, magnitude_type, region, \ catalog, moment_tensor, duration, tags = vals Location.__init__( self, lat=lat, lon=lon, time=time, name=name, depth=depth, elevation=elevation, magnitude=magnitude, magnitude_type=magnitude_type, region=region, catalog=catalog, moment_tensor=moment_tensor, duration=duration, tags=tags) def time_as_string(self): return util.time_to_str(self.time) def set_name(self, name): self.name = name def olddump(self, filename): file = open(filename, 'w') self.olddumpf(file) file.close() def olddumpf(self, file): file.write('name = %s\n' % self.name) file.write('time = %s\n' % util.time_to_str(self.time)) if self.lat is not None: file.write('latitude = %.12g\n' % self.lat) if self.lon is not None: file.write('longitude = %.12g\n' % self.lon) if self.magnitude is not None: file.write('magnitude = %g\n' % self.magnitude) file.write('moment = %g\n' % moment_tensor.magnitude_to_moment(self.magnitude)) if self.magnitude_type is not None: file.write('magnitude_type = %s\n' % self.magnitude_type) if self.depth is not None: file.write('depth = %.10g\n' % self.depth) if self.region is not None: file.write('region = %s\n' % self.region) if self.catalog is not None: file.write('catalog = %s\n' % self.catalog) if self.moment_tensor is not None: m = self.moment_tensor.m() sdr1, sdr2 = self.moment_tensor.both_strike_dip_rake() file.write(( 'mnn = %g\nmee = %g\nmdd = %g\nmne = %g\nmnd = %g\nmed = %g\n' 'strike1 = %g\ndip1 = %g\nrake1 = %g\n' 'strike2 = %g\ndip2 = %g\nrake2 = %g\n') % ( (m[0, 0], m[1, 1], m[2, 2], m[0, 1], m[0, 2], m[1, 2]) + sdr1 + sdr2)) if self.duration is not None: file.write('duration = %g\n' % self.duration) if self.tags: file.write('tags = %s\n' % ', '.join(self.tags)) @staticmethod def unique(events, deltat=10., group_cmp=(lambda a, b: cmp(a.catalog, b.catalog))): groups = Event.grouped(events, deltat) events = [] for group in groups: if group: group.sort(group_cmp) events.append(group[-1]) return events @staticmethod def grouped(events, deltat=10.): events = list(events) groups = [] for ia, a in enumerate(events): groups.append([]) haveit = False for ib, b in enumerate(events[:ia]): if abs(b.time - a.time) < deltat: groups[ib].append(a) haveit = True break if not haveit: groups[ia].append(a) groups = [g for g in groups if g] groups.sort(key=lambda g: sum(e.time for e in g) // len(g)) return groups @staticmethod def dump_catalog(events, filename=None, stream=None): if filename is not None: file = open(filename, 'w') else: file = stream try: i = 0 for ev in events: ev.olddumpf(file) file.write('--------------------------------------------\n') i += 1 finally: if filename is not None: file.close() @staticmethod def oldload(filename): with open(filename, 'r') as file: return Event.oldloadf(file) @staticmethod def oldloadf(file): d = {} try: for line in file: if line.lstrip().startswith('#'): continue toks = line.split(' = ', 1) if len(toks) == 2: k, v = toks[0].strip(), toks[1].strip() if k in ('name', 'region', 'catalog', 'magnitude_type'): d[k] = v if k in (('latitude longitude magnitude depth duration ' 'mnn mee mdd mne mnd med strike1 dip1 rake1 ' 'strike2 dip2 rake2 duration').split()): d[k] = float(v) if k == 'time': d[k] = util.str_to_time(v) if k == 'tags': d[k] = [x.strip() for x in v.split(',')] if line.startswith('---'): d['have_separator'] = True break except Exception as e: raise FileParseError(e) if not d: raise EOF() if 'have_separator' in d and len(d) == 1: raise EmptyEvent() mt = None m6 = [d[x] for x in 'mnn mee mdd mne mnd med'.split() if x in d] if len(m6) == 6: mt = moment_tensor.MomentTensor(m=moment_tensor.symmat6(*m6)) else: sdr = [d[x] for x in 'strike1 dip1 rake1'.split() if x in d] if len(sdr) == 3: moment = 1.0 if 'moment' in d: moment = d['moment'] elif 'magnitude' in d: moment = moment_tensor.magnitude_to_moment(d['magnitude']) mt = moment_tensor.MomentTensor( strike=sdr[0], dip=sdr[1], rake=sdr[2], scalar_moment=moment) return ( d.get('latitude', 0.0), d.get('longitude', 0.0), d.get('time', 0.0), d.get('name', ''), d.get('depth', None), d.get('magnitude', None), d.get('magnitude_type', None), d.get('region', None), d.get('catalog', None), mt, d.get('duration', None), d.get('tags', [])) @staticmethod def load_catalog(filename): file = open(filename, 'r') try: while True: try: ev = Event(loadf=file) yield ev except EmptyEvent: pass except EOF: pass file.close() def get_hash(self): e = self if isinstance(e.time, util.hpfloat): stime = util.time_to_str(e.time, format='%Y-%m-%d %H:%M:%S.6FRAC') else: stime = util.time_to_str(e.time, format='%Y-%m-%d %H:%M:%S.3FRAC') s = float_or_none_to_str to_hash = ', '.join(( stime, s(e.lat), s(e.lon), s(e.depth), float_or_none_to_str(e.magnitude, 5), str(e.catalog), str(e.name or ''), str(e.region))) return ehash(to_hash) def human_str(self): s = [ 'Latitude [deg]: %g' % self.lat, 'Longitude [deg]: %g' % self.lon, 'Time [UTC]: %s' % util.time_to_str(self.time)] if self.name: s.append('Name: %s' % self.name) if self.depth is not None: s.append('Depth [km]: %g' % (self.depth / 1000.)) if self.magnitude is not None: s.append('Magnitude [%s]: %3.1f' % ( self.magnitude_type or 'M?', self.magnitude)) if self.region: s.append('Region: %s' % self.region) if self.catalog: s.append('Catalog: %s' % self.catalog) if self.moment_tensor: s.append(str(self.moment_tensor)) return '\n'.join(s)
class Row(Object): xmlns = ns1 cells = List.T(String.T(xmltagname='td'))
class Earthmodel(Object): id = String.T()
def testOptionalDefault(self): from pyrocko.guts_array import Array, array_equal import numpy as num assert_ae = num.testing.assert_almost_equal def array_equal_noneaware(a, b): if a is None: return b is None elif b is None: return a is None else: return array_equal(a, b) data = [ ('a', Int.T(), [None, 0, 1, 2], ['aerr', 0, 1, 2]), ('b', Int.T(optional=True), [None, 0, 1, 2], [None, 0, 1, 2]), ('c', Int.T(default=1), [None, 0, 1, 2], [1, 0, 1, 2]), ('d', Int.T(default=1, optional=True), [None, 0, 1, 2], [1, 0, 1, 2]), ('e', List.T(Int.T()), [None, [], [1], [2]], [[], [], [1], [2]]), ('f', List.T(Int.T(), optional=True), [None, [], [1], [2]], [None, [], [1], [2]]), ('g', List.T(Int.T(), default=[1]), [ None, [], [1], [2]], [[1], [], [1], [2]]), ('h', List.T(Int.T(), default=[1], optional=True), [None, [], [1], [2]], [[1], [], [1], [2]]), ('i', Tuple.T(2, Int.T()), [None, (1, 2)], ['err', (1, 2)]), ('j', Tuple.T(2, Int.T(), optional=True), [None, (1, 2)], [None, (1, 2)]), ('k', Tuple.T(2, Int.T(), default=(1, 2)), [None, (1, 2), (3, 4)], [(1, 2), (1, 2), (3, 4)]), ('l', Tuple.T(2, Int.T(), default=(1, 2), optional=True), [None, (1, 2), (3, 4)], [(1, 2), (1, 2), (3, 4)]), ('i2', Tuple.T(None, Int.T()), [None, (1, 2)], [(), (1, 2)]), ('j2', Tuple.T(None, Int.T(), optional=True), [None, (), (3, 4)], [None, (), (3, 4)]), ('k2', Tuple.T(None, Int.T(), default=(1,)), [None, (), (3, 4)], [(1,), (), (3, 4)]), ('l2', Tuple.T(None, Int.T(), default=(1,), optional=True), [None, (), (3, 4)], [(1,), (), (3, 4)]), ('m', Array.T(shape=(None,), dtype=num.int, serialize_as='list'), [num.arange(0), num.arange(2)], [num.arange(0), num.arange(2)]), ('n', Array.T(shape=(None,), dtype=num.int, serialize_as='list', optional=True), [None, num.arange(0), num.arange(2)], [None, num.arange(0), num.arange(2)]), ('o', Array.T(shape=(None,), dtype=num.int, serialize_as='list', default=num.arange(2)), [None, num.arange(0), num.arange(2), num.arange(3)], [num.arange(2), num.arange(0), num.arange(2), num.arange(3)]), ('p', Array.T(shape=(None,), dtype=num.int, serialize_as='list', default=num.arange(2), optional=True), [None, num.arange(0), num.arange(2), num.arange(3)], [num.arange(2), num.arange(0), num.arange(2), num.arange(3)]), ('q', Dict.T(String.T(), Int.T()), [None, {}, {'a': 1}], [{}, {}, {'a': 1}]), ('r', Dict.T(String.T(), Int.T(), optional=True), [None, {}, {'a': 1}], [None, {}, {'a': 1}]), ('s', Dict.T(String.T(), Int.T(), default={'a': 1}), [None, {}, {'a': 1}], [{'a': 1}, {}, {'a': 1}]), ('t', Dict.T(String.T(), Int.T(), default={'a': 1}, optional=True), [None, {}, {'a': 1}], [{'a': 1}, {}, {'a': 1}]), ] for k, t, vals, exp, in data: last = [None] class A(Object): def __init__(self, **kwargs): last[0] = len(kwargs) Object.__init__(self, **kwargs) v = t A.T.class_signature() for v, e in zip(vals, exp): if isinstance(e, str) and e == 'aerr': with self.assertRaises(ArgumentError): if v is not None: a1 = A(v=v) else: a1 = A() continue else: if v is not None: a1 = A(v=v) else: a1 = A() if isinstance(e, str) and e == 'err': with self.assertRaises(ValidationError): a1.validate() else: a1.validate() a2 = load_string(dump(a1)) if isinstance(e, num.ndarray): assert last[0] == int( not (array_equal_noneaware(t.default(), a1.v) and t.optional)) assert_ae(a1.v, e) assert_ae(a1.v, e) else: assert last[0] == int( not(t.default() == a1.v and t.optional)) self.assertEqual(a1.v, e) self.assertEqual(a2.v, e)
class Phase(Object): value = String.T(xmlstyle='content')
class GNSSStation(Location): ''' Representation of a GNSS station during a campaign measurement For more information see http://kb.unavco.org/kb/assets/660/UNAVCO_Campaign_GPS_GNSS_Handbook.pdf ''' code = String.T( help='Four letter station code', optional=True) style = StringChoice.T( choices=['static', 'rapid_static', 'kinematic'], default='static') survey_start = DateTimestamp.T( optional=True, help='Survey start time') survey_end = DateTimestamp.T( optional=True, help='Survey end time') correlation_ne = Float.T( optional=True, help='North-East component correlation') correlation_eu = Float.T( optional=True, help='East-Up component correlation') correlation_nu = Float.T( optional=True, help='North-Up component correlation') north = GNSSComponent.T( default=GNSSComponent.D()) east = GNSSComponent.T( default=GNSSComponent.D()) up = GNSSComponent.T( default=GNSSComponent.D()) def __init__(self, *args, **kwargs): Location.__init__(self, *args, **kwargs) def get_covariance_matrix(self, full=True): s = self covar = num.zeros((3, 3)) covar[num.diag_indices_from(covar)] = num.array( [c.sigma**2 for c in (s.north, s.east, s.up)]) if s.correlation_ne is not None: covar[0, 1] = s.correlation_ne * s.north.sigma * s.east.sigma if s.correlation_nu is not None: covar[0, 2] = s.correlation_nu * s.north.sigma * s.up.sigma if s.correlation_eu is not None: covar[1, 2] = s.correlation_eu * s.east.sigma * s.up.sigma if full: covar[num.tril_indices_from(covar, k=-1)] = \ covar[num.triu_indices_from(covar, k=1)] return covar def get_correlation_matrix(self, full=True): s = self corr = num.zeros((3, 3)) corr[num.diag_indices_from(corr)] = num.array( [c.sigma for c in (s.north, s.east, s.up)]) if s.correlation_ne is not None: corr[0, 1] = s.correlation_ne if s.correlation_nu is not None: corr[0, 2] = s.correlation_nu if s.correlation_eu is not None: corr[1, 2] = s.correlation_eu if full: corr[num.tril_indices_from(corr, k=-1)] = \ corr[num.triu_indices_from(corr, k=1)] return corr
class EventDescription(Object): text = String.T() type = EventDescriptionType.T(optional=True)
class ReportIndexEntry(Object): path = String.T() problem_name = String.T() event_reference = Event.T(optional=True) event_best = Event.T(optional=True) grond_version = String.T(optional=True)
class Comment(Object): id = ResourceReference.T(optional=True, xmlstyle='attribute') text = String.T() creation_info = CreationInfo.T(optional=True)
class VelocityProfile(Object): uid = Int.T( optional=True, help='Unique ID of measurement') lat = Float.T( help='Latitude [deg]') lon = Float.T( help='Longitude [deg]') elevation = Float.T( default=num.nan, help='Elevation [m]') vp = Array.T( shape=(None, 1), help='P Wave velocities [m/s]') vs = Array.T( shape=(None, 1), help='S Wave velocities [m/s]') d = Array.T( shape=(None, 1), help='Interface depth, top [m]') h = Array.T( shape=(None, 1), help='Interface thickness [m]') heatflow = Float.T( optional=True, help='Heatflow [W/m^2]') geographical_location = String.T( optional=True, help='Geographic Location') geological_province = String.T( optional=True, help='Geological Province') geological_age = String.T( optional=True, help='Geological Age') measurement_method = Int.T( optional=True, help='Measurement method') publication_reference = String.T( optional=True, help='Publication Reference') publication_year__ = Int.T( help='Publication Date') def __init__(self, *args, **kwargs): Object.__init__(self, *args, **kwargs) self.h = num.abs(self.d - num.roll(self.d, -1)) self.h[-1] = 0 self.nlayers = self.h.size self.geographical_location = '%s (%s)' % ( provinceKey(self.geographical_location), self.geographical_location) self.vs[self.vs == 0] = num.nan self.vp[self.vp == 0] = num.nan self._step_vp = num.repeat(self.vp, 2) self._step_vs = num.repeat(self.vs, 2) self._step_d = num.roll(num.repeat(self.d, 2), -1) self._step_d[-1] = self._step_d[-2] + THICKNESS_HALFSPACE @property def publication_year__(self): return pubYear(self.publication_reference) def interpolateProfile(self, depths, phase='p', stepped=True): '''Get a continuous velocity function at arbitrary depth :param depth: Depths to interpolate :type depth: :class:`numpy.ndarray` :param phase: P or S wave velocity, **p** or **s** :type phase: str, optional :param stepped: Use a stepped velocity function or gradient :type stepped: bool :returns: velocities at requested depths :rtype: :class:`numpy.ndarray` ''' if phase not in ['s', 'p']: raise AttributeError('Phase has to be either \'p\' or \'s\'.') if phase == 'p': vel = self._step_vp if stepped else self.vp elif phase == 's': vel = self._step_vs if stepped else self.vs d = self._step_d if stepped else self.d if vel.size == 0: raise ProfileEmpty('Phase %s does not contain velocities' % phase) try: res = num.interp(depths, d, vel, left=num.nan, right=num.nan) except ValueError: raise ValueError('Could not interpolate velocity profile.') return res def plot(self, axes=None): ''' Plot the velocity profile, see :class:`pyrocko.cake`. :param axes: Axes to plot into. :type axes: :class:`matplotlib.Axes`''' import matplotlib.pyplot as plt fig, ax = _getCanvas(axes) my_model_plot(self.getLayeredModel(), axes=axes) ax.set_title('Global Crustal Database\n' 'Velocity Structure at {p.lat:.4f}N, ' ' {p.lat:.4f}E (uid {p.uid})'.format(p=self)) if axes is None: plt.show() def getLayeredModel(self): ''' Get a layered model, see :class:`pyrocko.cake.LayeredModel`. ''' def iterLines(): for il, m in enumerate(self.iterLayers()): yield self.d[il], m, '' return LayeredModel.from_scanlines(iterLines()) def iterLayers(self): ''' Iterator returns a :class:`pyrocko.cake.Material` for each layer''' for il in range(self.nlayers): yield Material(vp=self.vp[il], vs=self.vs[il]) @property def geog_loc_long(self): return provinceKey(self.geog_loc) @property def geol_age_long(self): return ageKey(self.geol_age) @property def has_s(self): return num.any(self.vp) @property def has_p(self): return num.any(self.vs) def get_weeded(self): ''' Get weeded representation of layers used in the profile. See :func:`pyrocko.cake.get_weeded` for details. ''' weeded = num.zeros((self.nlayers, 4)) weeded[:, 0] = self.d weeded[:, 1] = self.vp weeded[:, 2] = self.vs def _csv(self): output = '' for d in range(len(self.h)): output += ('{p.uid}, {p.lat}, {p.lon},' ' {vp}, {vs}, {h}, {d}, {self.reference}').format( p=self, vp=self.vs[d], vs=self.vp[d], h=self.h[d], d=self.d[d]) return output
class Voltage(FloatWithUnit): unit = String.T(default='VOLTS', optional=True, xmlstyle='attribute')