def getSlabInfo(self, lat, lon): """Return a dictionary with depth,dip,strike, and depth uncertainty. Args: lat (float): Hypocentral latitude in decimal degrees. lon (float): Hypocentral longitude in decimal degrees. Returns: dict: Dictionary containing keys: - region Three letter Slab model region code. - strike Slab model strike angle. - dip Slab model dip angle. - depth Slab model depth (km). - depth_uncertainty Slab model depth uncertainty. """ slabinfo = {} if not self.contains(lat, lon): return slabinfo fpath, fname = os.path.split(self._depth_file) parts = fname.split('_') region = parts[0] depth_grid = GMTGrid.load(self._depth_file) # slab grids are negative depth depth = -1 * depth_grid.getValue(lat, lon) dip_grid = GMTGrid.load(self._dip_file) strike_grid = GMTGrid.load(self._strike_file) if self._error_file is not None: error_grid = GMTGrid.load(self._error_file) error = error_grid.getValue(lat, lon) else: error = DEFAULT_DEPTH_ERROR # Slab 2.0 dip directions are positive, 1.0 is negative dip = dip_grid.getValue(lat, lon) if dip < 0: dip = dip * -1 strike = strike_grid.getValue(lat, lon) strike = strike if strike < 0: strike += 360 if np.isnan(strike): error = np.nan # get the maximum interface depth from table (if present) if self._slab_table is not None: df = self._slab_table max_int_depth = df[df['zone'] == region].iloc[0]['interface_max_depth'] else: max_int_depth = MAX_INTERFACE_DEPTH slabinfo = { 'region': region, 'strike': strike, 'dip': dip, 'depth': depth, 'maximum_interface_depth': max_int_depth, 'depth_uncertainty': error } return slabinfo
def grdcmp(x, y, rtol=1e-6, atol=0): """ Compare contents of two GMT GRD files using numpy assert method. Args: x: Path to a GRD file. y: Another path to a GRD file. """ xgrid = GMTGrid.load(x) xdata = xgrid.getData() ygrid = GMTGrid.load(y) ydata = ygrid.getData() np.testing.assert_allclose(xdata, ydata, rtol=rtol, atol=atol)
def test_mapmaker_intensity(): homedir = os.path.dirname( os.path.abspath(__file__)) # where is this script? shakedir = os.path.abspath(os.path.join(homedir, '..', '..', '..')) out_file = os.path.join(shakedir, 'tests', 'data', 'containers', 'northridge', 'shake_result.hdf') container = ShakeMapOutputContainer.load(out_file) topofile = os.path.join(homedir, '..', '..', 'data', 'install', 'data', 'mapping', 'CA_topo.grd') info = container.getMetadata() xmin = info['output']['map_information']['min']['longitude'] xmax = info['output']['map_information']['max']['longitude'] ymin = info['output']['map_information']['min']['latitude'] ymax = info['output']['map_information']['max']['latitude'] xmin = float(xmin) - 0.1 xmax = float(xmax) + 0.1 ymin = float(ymin) - 0.1 ymax = float(ymax) + 0.1 dy = float(info['output']['map_information']['grid_spacing']['latitude']) dx = float(info['output']['map_information']['grid_spacing']['longitude']) sampledict = GeoDict.createDictFromBox(xmin, xmax, ymin, ymax, dx, dy) topogrid = GMTGrid.load(topofile, samplegeodict=sampledict, resample=False) outpath = mkdtemp() model_config = container.getConfig() comp = container.getComponents('MMI')[0] textfile = os.path.join(get_data_path(), 'mapping', 'map_strings.en') text_dict = get_text_strings(textfile) cities = Cities.fromDefault() d = { 'imtype': 'MMI', 'topogrid': topogrid, 'allcities': cities, 'states_provinces': None, 'countries': None, 'oceans': None, 'lakes': None, 'roads': None, 'faults': None, 'datadir': outpath, 'operator': 'NEIC', 'filter_size': 10, 'info': info, 'component': comp, 'imtdict': container.getIMTGrids('MMI', comp), 'ruptdict': copy.deepcopy(container.getRuptureDict()), 'stationdict': container.getStationDict(), 'config': model_config, 'tdict': text_dict } try: fig1, fig2 = draw_map(d) except Exception: assert 1 == 2 finally: shutil.rmtree(outpath)
def test_zhu2015(): conf_file = os.path.join(upone, 'defaultconfigfiles', 'models', 'zhu_2015.ini') conf = ConfigObj(conf_file) data_path = os.path.join(datadir, 'loma_prieta', 'model_inputs') # Check slopefile trimming conf['zhu_2015']['slopefile'] = 'global_gted_maxslope_30c.flt' conf = correct_config_filepaths(data_path, conf) # Run with divfactor of 1 conf['zhu_2015']['divfactor'] = '1.' shakefile = os.path.join(datadir, 'loma_prieta', 'grid.xml') lm = LM.LogisticModel(shakefile, conf, saveinputs=True) maplayers = lm.calculate() pgrid = maplayers['model']['grid'] test_data = pgrid.getData() if changetarget: # To change target data: pgrd = GMTGrid(pgrid.getData(), pgrid.getGeoDict()) pgrd.save( os.path.join(datadir, 'loma_prieta', 'targets', 'zhu2015.grd')) # Load target target_file = os.path.join(datadir, 'loma_prieta', 'targets', 'zhu2015.grd') target_grid = GMTGrid.load(target_file) target_data = target_grid.getData() # Assert np.testing.assert_allclose(target_data, test_data, rtol=1e-3)
def _load(vs30File, samplegeodict=None, resample=False, method='linear', doPadding=False, padValue=np.nan): try: vs30grid = GMTGrid.load(vs30File, samplegeodict=samplegeodict, resample=resample, method=method, doPadding=doPadding, padValue=padValue) except Exception as msg1: try: vs30grid = GDALGrid.load(vs30File, samplegeodict=samplegeodict, resample=resample, method=method, doPadding=doPadding, padValue=padValue) except Exception as msg2: msg = 'Load failure of %s - error messages: "%s"\n "%s"' % ( vs30File, str(msg1), str(msg2)) raise ShakeLibException(msg) if vs30grid.getData().dtype != np.float64: vs30grid.setData(vs30grid.getData().astype(np.float64)) return vs30grid
def getNoDataGrid(predictors,xmin,xmax,ymin,ymax): txmin = xmin txmax = xmax tymin = ymin tymax = ymax mindx = 9999999999 mindy = 9999999999 #figure out bounds enclosing all files for predname,predfile in predictors.items(): if not os.path.isfile(predfile): continue ftype = getFileType(predfile) if ftype == 'shapefile': f = fiona.open(predfile,'r') bxmin,bymin,bxmax,bymax = f.bounds f.close() if bxmin < txmin: txmin = bxmin if bxmax > txmax: txmax = bxmax if bymin < tymin: tymin = bymin if bymax > tymax: tymax = bymax elif ftype == 'grid': gridtype = getGridType(predfile) if gridtype is None: raise Exception('File "%s" does not appear to be either a GMT grid or an ESRI grid.' % gridfile) fdict = getFileGeoDict(predfile,gridtype) if fdict.dx < mindx: mindx = fdict.dx if fdict.dy < mindy: mindy = fdict.dy if fdict.xmin < txmin: txmin = fdict.xmin if fdict.xmax > txmax: txmax = txmax if fdict.ymin < tymin: tymin = tymin if fdict.ymax > tymax: tymax = tymax sdict = GeoDict.createDictFromBox(txmin,txmax,tymin,tymax,mindx,mindy) nanarray = np.zeros((sdict.ny,sdict.nx),dtype=np.int8) for predname,predfile in predictors.items(): if not os.path.isfile(predfile): continue ftype = getFileType(predfile) if ftype == 'shapefile': shapes = list(fiona.open(predfile,'r')) grid = Grid2D.rasterizeFromGeometry(shapes,sdict) else: gridtype = getGridType(predfile) if gridtype == 'gmt': grid = GMTGrid.load(predfile,samplegeodict=sdict,resample=True,method='nearest',doPadding=True) else: grid = GDALGrid.load(predfile,samplegeodict=sdict,resample=True,method='nearest',doPadding=True) nangrid = np.isnan(grid.getData()) nanarray = nanarray | nangrid nangrid = Grid2D(data=nanarray,geodict=sdict) return nangrid
def _load(vs30File, samplegeodict=None, resample=False, method='linear', doPadding=False, padValue=np.nan): try: vs30grid = GMTGrid.load(vs30File, samplegeodict=samplegeodict, resample=resample, method=method, doPadding=doPadding, padValue=padValue) except Exception as msg1: try: vs30grid = GDALGrid.load(vs30File, samplegeodict=samplegeodict, resample=resample, method=method, doPadding=doPadding, padValue=padValue) except Exception as msg2: msg = 'Load failure of %s - error messages: "%s"\n "%s"' % ( vs30File, str(msg1), str(msg2)) raise ShakeMapException(msg) if vs30grid.getData().dtype != np.float64: vs30grid.setData(vs30grid.getData().astype(np.float64)) return vs30grid
def test_godt_2008(): conf_file = os.path.join(upone, 'defaultconfigfiles', 'models', 'godt_2008.ini') conf = ConfigObj(conf_file) conf['godt_2008']['divfactor'] = '1.' data_path = os.path.join(datadir, 'loma_prieta', 'model_inputs') conf = correct_config_filepaths(data_path, conf) shakefile = os.path.join(datadir, 'loma_prieta', 'grid.xml') maplayers = godt2008(shakefile, conf) pgrid = maplayers['model']['grid'] test_data = pgrid.getData() if changetarget: # To change target data: pgrd = GMTGrid(pgrid.getData(), pgrid.getGeoDict()) pgrd.save( os.path.join(datadir, 'loma_prieta', 'targets', 'godt_2008.grd')) # Load target target_file = os.path.join(datadir, 'loma_prieta', 'targets', 'godt_2008.grd') target_grid = GMTGrid.load(target_file) target_data = target_grid.getData() # Assert np.testing.assert_allclose(target_data, test_data, rtol=1e-3)
def test_jessee_2018(): conf_file = os.path.join(upone, 'defaultconfigfiles', 'models', 'jessee_2018.ini') conf = ConfigObj(conf_file) data_path = os.path.join(datadir, 'loma_prieta', 'model_inputs') conf = correct_config_filepaths(data_path, conf) shakefile = os.path.join(datadir, 'loma_prieta', 'grid.xml') undertainty_file = os.path.join(datadir, 'loma_prieta', 'uncertainty.xml') lm = LM.LogisticModel(shakefile, conf, saveinputs=True, uncertfile=undertainty_file) maplayers = lm.calculate() pgrid = maplayers['model']['grid'] stdgrid = maplayers['std']['grid'] test_data = pgrid.getData() test_data_std = stdgrid.getData() if changetarget: # To change target data: pgrd = GMTGrid(pgrid.getData(), pgrid.getGeoDict()) pgrd.save( os.path.join(datadir, 'loma_prieta', 'targets', 'jessee_2018.grd')) stdgrd = GMTGrid(stdgrid.getData(), stdgrid.getGeoDict()) stdgrd.save( os.path.join(datadir, 'loma_prieta', 'targets', 'jessee_2018_std.grd')) # Load target target_file = os.path.join(datadir, 'loma_prieta', 'targets', 'jessee_2018.grd') target_grid = GMTGrid.load(target_file) target_data = target_grid.getData() std_file = os.path.join(datadir, 'loma_prieta', 'targets', 'jessee_2018_std.grd') target_grid_std = GMTGrid.load(std_file) target_data_std = target_grid_std.getData() # Assert np.testing.assert_allclose(target_data, test_data, rtol=1e-3) np.testing.assert_allclose(target_data_std, test_data_std, rtol=1e-3)
def sampleGridFile(gridfile, xypoints, method='nearest'): """Sample grid file (ESRI or GMT format) at each of a set of XY (decimal degrees) points. :param gridfile: Name of ESRI or GMT grid format file from which to sample values. :param xypoints: 2D numpy array of XY points, decimal degrees. :param method: Interpolation method, either 'nearest' or 'linear'. :returns: 1D numpy array of grid values at each of input XY points. """ xmin = np.min(xypoints[:, 0]) xmax = np.max(xypoints[:, 0]) ymin = np.min(xypoints[:, 1]) ymax = np.max(xypoints[:, 1]) gridtype = None try: fdict = GMTGrid.getFileGeoDict(gridfile) gridtype = 'gmt' except Exception as error: try: fdict = GDALGrid.getFileGeoDict(gridfile) gridtype = 'esri' except: pass if gridtype is None: raise Exception( 'File "%s" does not appear to be either a GMT grid or an ESRI grid.' % gridfile) xmin = xmin - fdict.dx * 3 xmax = xmax + fdict.dx * 3 ymin = ymin - fdict.dy * 3 ymax = ymax + fdict.dy * 3 #bounds = (xmin, xmax, ymin, ymax) if gridtype == 'gmt': fgeodict = GMTGrid.getFileGeoDict(gridfile) else: fgeodict = GDALGrid.getFileGeoDict(gridfile) dx, dy = (fgeodict.dx, fgeodict.dy) sdict = GeoDict.createDictFromBox(xmin, xmax, ymin, ymax, dx, dy) if gridtype == 'gmt': grid = GMTGrid.load(gridfile, samplegeodict=sdict, resample=False, method=method, doPadding=True) else: grid = GDALGrid.load(gridfile, samplegeodict=sdict, resample=False, method=method, doPadding=True) return sampleFromGrid(grid, xypoints)
def sampleGridFile(gridfile,xypoints,method='nearest'): """ Sample grid file (ESRI or GMT format) at each of a set of XY (decimal degrees) points. :param gridfile: Name of ESRI or GMT grid format file from which to sample values. :param xypoints: 2D numpy array of XY points, decimal degrees. :param method: Interpolation method, either 'nearest' or 'linear'. :returns: 1D numpy array of grid values at each of input XY points. """ if not len(xypoints): return np.array([]) xmin = np.min(xypoints[:,0]) xmax = np.max(xypoints[:,0]) ymin = np.min(xypoints[:,1]) ymax = np.max(xypoints[:,1]) gridtype = None try: fdict,tmp = GMTGrid.getFileGeoDict(gridfile) gridtype = 'gmt' except Exception as error: try: fdict,tmp = GDALGrid.getFileGeoDict(gridfile) gridtype = 'esri' except: pass if gridtype is None: raise Exception('File "%s" does not appear to be either a GMT grid or an ESRI grid.' % gridfile) xmin = xmin - fdict.dx*3 xmax = xmax + fdict.dx*3 ymin = ymin - fdict.dy*3 ymax = ymax + fdict.dy*3 bounds = (xmin,xmax,ymin,ymax) if gridtype == 'gmt': fgeodict,tmp = GMTGrid.getFileGeoDict(gridfile) else: fgeodict,tmp = GDALGrid.getFileGeoDict(gridfile) dx,dy = (fgeodict.dx,fgeodict.dy) sdict = GeoDict.createDictFromBox(xmin,xmax,ymin,ymax,dx,dy) if gridtype == 'gmt': grid = GMTGrid.load(gridfile,samplegeodict=sdict,resample=True,method=method,doPadding=True) else: grid = GDALGrid.load(gridfile,samplegeodict=sdict,resample=True,method=method,doPadding=True) return sampleFromGrid(grid,xypoints)
def test_mapmaker_contour(): homedir = os.path.dirname(os.path.abspath( __file__)) # where is this script? shakedir = os.path.abspath(os.path.join(homedir, '..', '..', '..')) out_file = os.path.join(shakedir, 'tests', 'data', 'containers', 'northridge', 'shake_result.hdf') container = ShakeMapOutputContainer.load(out_file) topofile = os.path.join(homedir, '..', '..', 'data', 'install', 'data', 'mapping', 'CA_topo.grd') info = container.getMetadata() xmin = info['output']['map_information']['min']['longitude'] xmax = info['output']['map_information']['max']['longitude'] ymin = info['output']['map_information']['min']['latitude'] ymax = info['output']['map_information']['max']['latitude'] xmin = float(xmin) - 0.1 xmax = float(xmax) + 0.1 ymin = float(ymin) - 0.1 ymax = float(ymax) + 0.1 dy = float(info['output']['map_information'] ['grid_spacing']['latitude']) dx = float(info['output']['map_information'] ['grid_spacing']['longitude']) sampledict = GeoDict.createDictFromBox(xmin, xmax, ymin, ymax, dx, dy) topogrid = GMTGrid.load(topofile, samplegeodict=sampledict, resample=False) oceanfile = os.path.join(homedir, '..', '..', 'data', 'install', 'data', 'mapping', 'northridge_ocean.json') outpath = mkdtemp() filter_size = 10 try: pdf, png = draw_contour(container, 'PGA', topogrid, oceanfile, outpath, 'NEIC', filter_size) print(pdf) except Exception: assert 1 == 2 finally: shutil.rmtree(outpath)
def main(self, gmrecords): """Compute station metrics. Args: gmrecords: GMrecordsApp instance. """ logging.info('Running subcommand \'%s\'' % self.command_name) self.gmrecords = gmrecords self._check_arguments() self._get_events() vs30_grids = None if gmrecords.conf is not None: if 'vs30' in gmrecords.conf['metrics']: vs30_grids = gmrecords.conf['metrics']['vs30'] for vs30_name in vs30_grids: vs30_grids[vs30_name]['grid_object'] = GMTGrid.load( vs30_grids[vs30_name]['file']) self.vs30_grids = vs30_grids if gmrecords.args.num_processes: # parallelize processing on events try: client = Client(n_workers=gmrecords.args.num_processes) except BaseException as ex: print(ex) print("Could not create a dask client.") print("To turn off paralleization, use '--num-processes 0'.") sys.exit(1) futures = client.map(self._event_station_metrics, self.events) for result in as_completed(futures, with_results=True): print(result) # print('Completed event: %s' % result) else: for event in self.events: self._event_station_metrics(event) self._summarize_files_created()
def drawContourMap(self, imt, outfolder, cmin=None, cmax=None): """ Render IMT data as contours over topography, with oceans, coastlines, etc. Args: outfolder (str): Path to directory where output map should be saved. Returns: str: Path to output IMT map. """ if self.contour_colormap is None: raise Exception('MapMaker.setGMTColormap() has not been called.') t0 = time.time() # resample shakemap to topogrid # get the geodict for the topo file topodict = GMTGrid.getFileGeoDict(self.topofile)[0] # get the geodict for the ShakeMap comp = self.container.getComponents(imt)[0] imtdict = self.container.getIMTGrids(imt, comp) imtgrid = imtdict['mean'] smdict = imtgrid.getGeoDict() # get a geodict that is aligned with topo, but inside shakemap sampledict = topodict.getBoundsWithin(smdict) imtgrid = imtgrid.interpolateToGrid(sampledict) gd = imtgrid.getGeoDict() # establish the basemap object m = self._setMap(gd) # get topo layer and project it topogrid = GMTGrid.load( self.topofile, samplegeodict=sampledict, resample=False) topodata = topogrid.getData().copy() ptopo = self._projectGrid(topodata, m, gd) # get contour layer and project it1 imtdata = imtgrid.getData().copy() # convert units if necessary if imt == 'MMI': pass elif imt == 'PGV': imtdata = np.exp(imtdata) else: imtdata = np.exp(imtdata) * 100 pimt = self._projectGrid(imtdata, m, gd) # get the draped intensity data hillshade = self._getShaded(ptopo) # draw the draped intensity data m.imshow(hillshade, interpolation='none', zorder=IMG_ZORDER) # draw the contours of imt data xmin = gd.xmin if gd.xmax < gd.xmin: xmin -= 360 lons = np.linspace(xmin, gd.xmax, gd.nx) # backwards so it plots right side up lats = np.linspace(gd.ymax, gd.ymin, gd.ny) x, y = m(*np.meshgrid(lons, lats)) pimt = gaussian_filter(pimt, 5.0) dmin = pimt.min() dmax = pimt.max() levels = self.getContourLevels(dmin, dmax, imt) cs = m.contour(x, y, np.flipud(pimt), colors='w', cmap=None, levels=levels, zorder=CONTOUR_ZORDER) clabels = plt.clabel(cs, colors='k', fmt='%.1f', fontsize=8.0, zorder=CONTOUR_ZORDER) for cl in clabels: bbox = dict(boxstyle="round", facecolor='white', edgecolor='w') cl.set_bbox(bbox) cl.set_zorder(CONTOUR_ZORDER) # draw country/state boundaries self._drawBoundaries(m) # draw lakes self._drawLakes(m, gd) # draw oceans (pre-processed with islands taken out) t1 = time.time() self._drawOceans(m, gd) t2 = time.time() self.logger.debug('%.1f seconds to render oceans.' % (t2 - t1)) # draw coastlines self._drawCoastlines(m, gd) # draw meridians, parallels, labels, ticks self._drawGraticules(m, gd) # draw filled symbols for MMI and instrumented measures self._drawStations(m, fill=True, imt=imt) # draw map scale self._drawMapScale(m, gd) # draw fault polygon, if present self._drawFault(m) # get the fault loaded # draw epicenter origin = self.fault.getOrigin() hlon = origin.lon hlat = origin.lat m.plot(hlon, hlat, 'k*', latlon=True, fillstyle='none', markersize=22, mew=1.2, zorder=EPICENTER_ZORDER) # draw cities # reduce the number of cities to those whose labels don't collide # set up cities if self.city_cols is not None: self.cities = self.cities.limitByBounds( (gd.xmin, gd.xmax, gd.ymin, gd.ymax)) self.cities = self.cities.limitByGrid( nx=self.city_cols, ny=self.city_rows, cities_per_grid=self.cities_per_grid) if 'Times New Roman' in self.cities._fontlist: font = 'Times New Roman' else: font = 'DejaVu Sans' self.cities = self.cities.limitByMapCollision(m, fontname=font) self.cities.renderToMap(m.ax, zorder=CITIES_ZORDER) # draw title and supertitle self._drawTitle(imt) # save plot to file fileimt = oq_to_file(imt) plt.draw() outfile = os.path.join(outfolder, 'contour_%s.pdf' % (fileimt)) plt.savefig(outfile) tn = time.time() self.logger.debug('%.1f seconds to render entire map.' % (tn - t0)) return outfile
def drawContourMap(self,outfolder,cmin=None,cmax=None): if self.contour_colormap is None: raise ShakeMapException('MapMaker.setGMTColormap() has not been called.') t0 = time.time() #resample shakemap to topogrid #get the geodict for the topo file topodict = GMTGrid.getFileGeoDict(self.topofile) #get the geodict for the ShakeMap smdict = self.shakemap.getGeoDict() #get a geodict that is aligned with topo, but inside shakemap sampledict = topodict.getBoundsWithin(smdict) self.shakemap = self.shakemap.interpolateToGrid(sampledict) gd = self.shakemap.getGeoDict() #establish the basemap object m = self._setMap(gd) #get topo layer and project it topogrid = GMTGrid.load(self.topofile,samplegeodict=sampledict,resample=False) topodata = topogrid.getData().copy() ptopo = self._projectGrid(topodata,m,gd) #get contour layer and project it1 imtdata = self.shakemap.getLayer(self.contour_layer).getData().copy() pimt = self._projectGrid(imtdata,m,gd) #get the draped intensity data hillshade = self._getShaded(ptopo) #draw the draped intensity data m.imshow(hillshade, interpolation='none',zorder=IMG_ZORDER); #draw the contours of imt data xmin = gd.xmin if gd.xmax < gd.xmin: xmin -= 360 lons = np.linspace(xmin, gd.xmax, gd.nx) lats = np.linspace(gd.ymax, gd.ymin, gd.ny) # backwards so it plots right side up x, y = m(*np.meshgrid(lons,lats)) pimt = gaussian_filter(pimt,5.0) dmin = pimt.min() dmax = pimt.max() levels = self.getContourLevels(dmin,dmax,self.contour_layer) cs = m.contour(x,y,np.flipud(pimt),colors='w',cmap=None,levels=levels,zorder=CONTOUR_ZORDER) clabels = plt.clabel(cs,colors='k',fmt='%.1f',fontsize=8.0,zorder=CONTOUR_ZORDER) for cl in clabels: bbox = dict(boxstyle="round",facecolor='white',edgecolor='w') cl.set_bbox(bbox) cl.set_zorder(CONTOUR_ZORDER) #draw country/state boundaries self._drawBoundaries(m) #draw lakes self._drawLakes(m,gd) #draw oceans (pre-processed with islands taken out) t1 = time.time() self._drawOceans(m,gd) t2 = time.time() print('%.1f seconds to render oceans.' % (t2-t1)) #draw coastlines self._drawCoastlines(m,gd) #draw meridians, parallels, labels, ticks self._drawGraticules(m,gd) #draw filled symbols for MMI and instrumented measures self._drawStations(m,fill=True,imt=self.contour_layer) #draw map scale scalex = gd.xmin + (gd.xmax-gd.xmin)/5.0 scaley = gd.ymin + (gd.ymax-gd.ymin)/10.0 yoff = (0.007*(m.ymax-m.ymin)) clon = (gd.xmin + gd.xmax)/2.0 clat = (gd.ymin + gd.ymax)/2.0 m.drawmapscale(scalex,scaley,clon,clat,length=100,barstyle='fancy',yoffset=yoff,zorder=SCALE_ZORDER) #draw fault polygon, if present self._drawFault(m) #get the fault loaded #draw epicenter hlon = self.shakemap.getEventDict()['lon'] hlat = self.shakemap.getEventDict()['lat'] m.plot(hlon,hlat,'k*',latlon=True,fillstyle='none',markersize=22,mew=1.2,zorder=EPICENTER_ZORDER); #draw cities #reduce the number of cities to those whose labels don't collide #set up cities if self.city_cols is not None: self.cities = self.cities.limitByBounds((gd.xmin,gd.xmax,gd.ymin,gd.ymax)) self.cities = self.cities.limitByGrid(nx=self.city_cols,ny=self.city_rows, cities_per_grid=self.cities_per_grid) self.cities = self.cities.limitByMapCollision(m) self.cities.renderToMap(m.ax,zorder=CITIES_ZORDER) #draw title and supertitle eventid = self._drawTitle(isContour=True) #draw whatever road data is available #self._drawRoads(m) #save plot to file plt.draw() outfile = os.path.join(outfolder,'contour_%s_%s.pdf' % (self.contour_layer,eventid)) plt.savefig(outfile) tn = time.time() print('%.1f seconds to render entire map.' % (tn-t0)) return outfile
def drawIntensityMap(self,outfolder): if self.intensity_colormap is None: raise ShakeMapException('MapMaker.setGMTColormap() has not been called.') t0 = time.time() #resample shakemap to topogrid #get the geodict for the topo file topodict = GMTGrid.getFileGeoDict(self.topofile) #get the geodict for the ShakeMap smdict = self.shakemap.getGeoDict() #get a geodict that is aligned with topo, but inside shakemap sampledict = topodict.getBoundsWithin(smdict) self.shakemap = self.shakemap.interpolateToGrid(sampledict) gd = self.shakemap.getGeoDict() #establish the basemap object m = self._setMap(gd) #get topo layer and project it topogrid = GMTGrid.load(self.topofile,samplegeodict=sampledict,resample=False) topodata = topogrid.getData().copy() ptopo = self._projectGrid(topodata,m,gd) #get intensity layer and project it imtdata = self.shakemap.getLayer(self.imt_layer).getData().copy() pimt = self._projectGrid(imtdata,m,gd) #get the draped intensity data draped_hsv = self._getDraped(pimt,ptopo) #where will 10.0 come from #draw the draped intensity data m.imshow(draped_hsv, interpolation='none',zorder=IMG_ZORDER); #draw country/state boundaries self._drawBoundaries(m) #draw whatever road data is available self._drawRoads(m) #draw lakes self._drawLakes(m,gd) #draw oceans (pre-processed with islands taken out) t1 = time.time() self._drawOceans(m,gd) t2 = time.time() print('%.1f seconds to render oceans.' % (t2-t1)) #draw coastlines self._drawCoastlines(m,gd) #draw meridians, parallels, labels, ticks self._drawGraticules(m,gd) #draw map scale scalex = gd.xmin + (gd.xmax-gd.xmin)/5.0 scaley = gd.ymin + (gd.ymax-gd.ymin)/10.0 yoff = (0.007*(m.ymax-m.ymin)) clon = (gd.xmin + gd.xmax)/2.0 clat = (gd.ymin + gd.ymax)/2.0 m.drawmapscale(scalex,scaley,clon,clat,length=100,barstyle='fancy',yoffset=yoff,zorder=SCALE_ZORDER) #draw fault polygon, if present self._drawFault(m) #get the fault loaded #draw epicenter hlon = self.shakemap.getEventDict()['lon'] hlat = self.shakemap.getEventDict()['lat'] m.plot(hlon,hlat,'k*',latlon=True,fillstyle='none',markersize=22,mew=1.2,zorder=EPICENTER_ZORDER); #draw cities #reduce the number of cities to those whose labels don't collide #set up cities if self.city_cols is not None: self.cities = self.cities.limitByBounds((gd.xmin,gd.xmax,gd.ymin,gd.ymax)) self.cities = self.cities.limitByGrid(nx=self.city_cols,ny=self.city_rows, cities_per_grid=self.cities_per_grid) self.cities = self.cities.limitByMapCollision(m) self.cities.renderToMap(m.ax,zorder=CITIES_ZORDER) #draw title and supertitle eventid = self._drawTitle() #draw station and macroseismic locations self._drawStations(m) #need stationlist object #save plot to file plt.draw() outfile = os.path.join(outfolder,'intensity_%s.pdf' % eventid) plt.savefig(outfile) tn = time.time() print('%.1f seconds to render entire map.' % (tn-t0)) return outfile
def __init__(self, config, shakefile, model, uncertfile=None): """Set up the logistic model :param config: configobj (config .ini file read in using configobj) defining the model and its inputs :type config: dictionary :param shakefile: Full file path to shakemap.xml file for the event of interest :type shakefile: string :param model: Name of model defined in config that should be run for the event of interest :type model: string :param uncertfile: :type uncertfile: """ if model not in getLogisticModelNames(config): raise Exception('Could not find a model called "%s" in config %s.' % (model, config)) #do everything here short of calculations - parse config, assemble eqn strings, load data. self.model = model cmodel = config['logistic_models'][model] self.modeltype = cmodel['gfetype'] self.coeffs = validateCoefficients(cmodel) self.layers = validateLayers(cmodel) # key = layer name, value = file name self.terms, timeField = validateTerms(cmodel, self.coeffs, self.layers) self.interpolations = validateInterpolations(cmodel, self.layers) self.units = validateUnits(cmodel, self.layers) self.gmused = [value for term, value in cmodel['terms'].items() if 'pga' in value.lower() or 'pgv' in value.lower() or 'mmi' in value.lower()] self.modelrefs, self.longrefs, self.shortrefs = validateRefs(cmodel) if 'baselayer' not in cmodel: raise Exception('You must specify a base layer file in config.') if cmodel['baselayer'] not in list(self.layers.keys()): raise Exception('You must specify a base layer corresponding to one of the files in the layer section.') #get the geodict for the shakemap geodict = ShakeGrid.getFileGeoDict(shakefile, adjust='res') griddict, eventdict, specdict, fields, uncertainties = getHeaderData(shakefile) #YEAR = eventdict['event_timestamp'].year MONTH = MONTHS[(eventdict['event_timestamp'].month)-1] #DAY = eventdict['event_timestamp'].day #HOUR = eventdict['event_timestamp'].hour #now find the layer that is our base layer and get the largest bounds we can guarantee not to exceed shakemap bounds basefile = self.layers[cmodel['baselayer']] ftype = getFileType(basefile) if ftype == 'esri': basegeodict, firstcol = GDALGrid.getFileGeoDict(basefile) sampledict = basegeodict.getBoundsWithin(geodict) elif ftype == 'gmt': basegeodict, firstcol = GMTGrid.getFileGeoDict(basefile) sampledict = basegeodict.getBoundsWithin(geodict) else: raise Exception('All predictor variable grids must be a valid GMT or ESRI file type') #now load the shakemap, resampling and padding if necessary self.shakemap = ShakeGrid.load(shakefile, samplegeodict=sampledict, resample=True, doPadding=True, adjust='res') # take uncertainties into account if uncertfile is not None: try: self.uncert = ShakeGrid.load(uncertfile, samplegeodict=sampledict, resample=True, doPadding=True, adjust='res') except: print('Could not read uncertainty file, ignoring uncertainties') self.uncert = None else: self.uncert = None #load the predictor layers into a dictionary self.layerdict = {} # key = layer name, value = grid object for layername, layerfile in self.layers.items(): if isinstance(layerfile, list): for lfile in layerfile: if timeField == 'MONTH': if lfile.find(MONTH) > -1: layerfile = lfile ftype = getFileType(layerfile) interp = self.interpolations[layername] if ftype == 'gmt': lyr = GMTGrid.load(layerfile, sampledict, resample=True, method=interp, doPadding=True) elif ftype == 'esri': lyr = GDALGrid.load(layerfile, sampledict, resample=True, method=interp, doPadding=True) else: msg = 'Layer %s (file %s) does not appear to be a valid GMT or ESRI file.' % (layername, layerfile) raise Exception(msg) self.layerdict[layername] = lyr else: #first, figure out what kind of file we have (or is it a directory?) ftype = getFileType(layerfile) interp = self.interpolations[layername] if ftype == 'gmt': lyr = GMTGrid.load(layerfile, sampledict, resample=True, method=interp, doPadding=True) elif ftype == 'esri': lyr = GDALGrid.load(layerfile, sampledict, resample=True, method=interp, doPadding=True) else: msg = 'Layer %s (file %s) does not appear to be a valid GMT or ESRI file.' % (layername, layerfile) raise Exception(msg) self.layerdict[layername] = lyr shapes = {} for layername, layer in self.layerdict.items(): shapes[layername] = layer.getData().shape self.nuggets = [str(self.coeffs['b0'])] ckeys = list(self.terms.keys()) ckeys.sort() for key in ckeys: term = self.terms[key] coeff = self.coeffs[key] self.nuggets.append('(%g * %s)' % (coeff, term)) self.equation = ' + '.join(self.nuggets) if self.uncert is not None: self.nugmin = copy.copy(self.nuggets) self.nugmax = copy.copy(self.nuggets) # Find the term with the shakemap input and replace for these nuggets for k, nug in enumerate(self.nuggets): if "self.shakemap.getLayer('pga').getData()" in nug: self.nugmin[k] = self.nugmin[k].replace("self.shakemap.getLayer('pga').getData()", "(np.exp(np.log(self.shakemap.getLayer('pga').getData()) - self.uncert.getLayer('stdpga').getData()))") self.nugmax[k] = self.nugmax[k].replace("self.shakemap.getLayer('pga').getData()", "(np.exp(np.log(self.shakemap.getLayer('pga').getData()) + self.uncert.getLayer('stdpga').getData()))") elif "self.layerdict['pgv'].getData()" in nug: self.nugmin[k] = self.nugmin[k].replace("self.shakemap.getLayer('pgv').getData()", "(np.exp(np.log(self.shakemap.getLayer('pgv').getData()) - self.uncert.getLayer('stdpgv').getData()))") self.nugmax[k] = self.nugmax[k].replace("self.shakemap.getLayer('pgv').getData()", "(np.exp(np.log(self.shakemap.getLayer('pgv').getData()) + self.uncert.getLayer('stdpgv').getData()))") elif "self.layerdict['mmi'].getData()" in nug: self.nugmin[k] = self.nugmin[k].replace("self.shakemap.getLayer('mmi').getData()", "(np.exp(np.log(self.shakemap.getLayer('mmi').getData()) - self.uncert.getLayer('stdmmi').getData()))") self.nugmax[k] = self.nugmax[k].replace("self.shakemap.getLayer('mmi').getData()", "(np.exp(np.log(self.shakemap.getLayer('mmi').getData()) + self.uncert.getLayer('stdmmi').getData()))") self.equationmin = ' + '.join(self.nugmin) self.equationmax = ' + '.join(self.nugmax) else: self.equationmin = None self.equationmax = None self.geodict = self.shakemap.getGeoDict() try: self.slopemin = float(config['logistic_models'][model]['slopemin']) self.slopemax = float(config['logistic_models'][model]['slopemax']) except: print('could not find slopemin and/or slopemax in config, no limits will be applied') self.slopemin = 0. self.slopemax = 90.
def calcMetrics(self, eventid, stations=None, labels=None, config=None, streams=None, stream_label=None, rupture_file=None, calc_station_metrics=True, calc_waveform_metrics=True): """ Calculate waveform and/or station metrics for a set of waveforms. Args: eventid (str): ID of event to search for in ASDF file. stations (list): List of stations to create metrics for. labels (list): List of processing labels to create metrics for. config (dict): Configuration dictionary. streams (StreamCollection): Optional StreamCollection object to create metrics for. stream_label (str): Label to be used in the metrics path when providing a StreamCollection. rupture_file (str): Path pointing to the rupture file. calc_station_metrics (bool): Whether to calculate station metrics. Default is True. calc_waveform_metrics (bool): Whether to calculate waveform metrics. Default is True. """ if not self.hasEvent(eventid): fmt = 'No event matching %s found in workspace.' raise KeyError(fmt % eventid) if streams is None: streams = self.getStreams( eventid, stations=stations, labels=labels) event = self.getEvent(eventid) # Load the rupture file origin = Origin({ 'id': event.id, 'netid': '', 'network': '', 'lat': event.latitude, 'lon': event.longitude, 'depth': event.depth_km, 'locstring': '', 'mag': event.magnitude, 'time': event.time }) rupture = get_rupture(origin, rupture_file) vs30_grids = None if config is not None: if 'vs30' in config['metrics']: vs30_grids = config['metrics']['vs30'] for vs30_name in vs30_grids: vs30_grids[vs30_name]['grid_object'] = GMTGrid.load( vs30_grids[vs30_name]['file']) for stream in streams: instrument = stream.get_id() logging.info('Calculating stream metrics for %s...' % instrument) try: summary = StationSummary.from_config( stream, event=event, config=config, calc_waveform_metrics=calc_waveform_metrics, calc_station_metrics=calc_station_metrics, rupture=rupture, vs30_grids=vs30_grids) except BaseException as pgme: fmt = ('Could not create stream metrics for event %s,' 'instrument %s: "%s"') logging.warning(fmt % (eventid, instrument, str(pgme))) continue if calc_waveform_metrics and stream.passed: xmlstr = summary.get_metric_xml() if stream_label is not None: tag = '%s_%s' % (eventid, stream_label) else: tag = stream.tag metricpath = '/'.join([ format_netsta(stream[0].stats), format_nslit(stream[0].stats, stream.get_inst(), tag), ]) self.insert_aux(xmlstr, 'WaveFormMetrics', metricpath) if calc_station_metrics: xmlstr = summary.get_station_xml() metricpath = '/'.join([ format_netsta(stream[0].stats), format_nslit(stream[0].stats, stream.get_inst(), eventid) ]) self.insert_aux(xmlstr, 'StationMetrics', metricpath)
def __init__(self, shakefile, config, uncertfile=None, saveinputs=False, slopefile=None, slopediv=1., bounds=None, numstd=1): """Set up the logistic model # ADD BOUNDS TO THIS MODEL :param config: configobj (config .ini file read in using configobj) defining the model and its inputs. Only one model should be described in each config file. :type config: dictionary :param shakefile: Full file path to shakemap.xml file for the event of interest :type shakefile: string :param uncertfile: Full file path to xml file of shakemap uncertainties :type uncertfile: string :param saveinputs: if True, saves all the input layers as Grid2D objects in addition to the model if false, it will just output the model :type saveinputs: boolean :param slopefile: optional file path to slopefile that will be resampled to the other input files for applying thresholds OVERWRITES VALUE IN CONFIG :type slopefile: string :param slopediv: number to divide slope by to get to degrees (usually will be default of 1.) :type slopediv: float :param numstd: number of +/- standard deviations to use if uncertainty is computed (uncertfile is not None) """ mnames = getLogisticModelNames(config) if len(mnames) == 0: raise Exception( 'No config file found or problem with config file format') if len(mnames) > 1: raise Exception( 'Config file contains more than one model which is no longer allowed,\ update your config file to the newer format') self.model = mnames[0] self.config = config cmodel = config[self.model] self.modeltype = cmodel['gfetype'] self.coeffs = validateCoefficients(cmodel) self.layers = validateLayers( cmodel) # key = layer name, value = file name self.terms, timeField = validateTerms(cmodel, self.coeffs, self.layers) self.interpolations = validateInterpolations(cmodel, self.layers) self.units = validateUnits(cmodel, self.layers) self.gmused = [ value for term, value in cmodel['terms'].items() if 'pga' in value.lower() or 'pgv' in value.lower() or 'mmi' in value.lower() ] self.modelrefs, self.longrefs, self.shortrefs = validateRefs(cmodel) self.numstd = numstd if cmodel['baselayer'] not in list(self.layers.keys()): raise Exception( 'You must specify a base layer corresponding to one of the files in the layer section.' ) self.saveinputs = saveinputs if slopefile is None: try: self.slopefile = cmodel['slopefile'] except: print( 'Could not find slopefile term in config, no slope thresholds will be applied\n' ) self.slopefile = None else: self.slopefile = slopefile self.slopediv = slopediv #get the geodict for the shakemap geodict = ShakeGrid.getFileGeoDict(shakefile, adjust='res') griddict, eventdict, specdict, fields, uncertainties = getHeaderData( shakefile) #YEAR = eventdict['event_timestamp'].year MONTH = MONTHS[(eventdict['event_timestamp'].month) - 1] #DAY = eventdict['event_timestamp'].day #HOUR = eventdict['event_timestamp'].hour #now find the layer that is our base layer and get the largest bounds we can guarantee not to exceed shakemap bounds basefile = self.layers[cmodel['baselayer']] ftype = getFileType(basefile) if ftype == 'esri': basegeodict, firstcol = GDALGrid.getFileGeoDict(basefile) sampledict = basegeodict.getBoundsWithin(geodict) elif ftype == 'gmt': basegeodict, firstcol = GMTGrid.getFileGeoDict(basefile) sampledict = basegeodict.getBoundsWithin(geodict) else: raise Exception( 'All predictor variable grids must be a valid GMT or ESRI file type' ) #now load the shakemap, resampling and padding if necessary if ShakeGrid.getFileGeoDict(shakefile, adjust='res') == sampledict: self.shakemap = ShakeGrid.load(shakefile, adjust='res') flag = 1 else: self.shakemap = ShakeGrid.load(shakefile, samplegeodict=sampledict, resample=True, doPadding=True, adjust='res') flag = 0 # take uncertainties into account if uncertfile is not None: try: if flag == 1: self.uncert = ShakeGrid.load(uncertfile, adjust='res') else: self.uncert = ShakeGrid.load(uncertfile, samplegeodict=sampledict, resample=True, doPadding=True, adjust='res') except: print( 'Could not read uncertainty file, ignoring uncertainties') self.uncert = None else: self.uncert = None #load the predictor layers into a dictionary self.layerdict = {} # key = layer name, value = grid object for layername, layerfile in self.layers.items(): if isinstance(layerfile, list): for lfile in layerfile: if timeField == 'MONTH': if lfile.find(MONTH) > -1: layerfile = lfile ftype = getFileType(layerfile) interp = self.interpolations[layername] if ftype == 'gmt': if GMTGrid.getFileGeoDict( layerfile)[0] == sampledict: lyr = GMTGrid.load(layerfile) else: lyr = GMTGrid.load(layerfile, sampledict, resample=True, method=interp, doPadding=True) elif ftype == 'esri': if GDALGrid.getFileGeoDict( layerfile)[0] == sampledict: lyr = GDALGrid.load(layerfile) else: lyr = GDALGrid.load(layerfile, sampledict, resample=True, method=interp, doPadding=True) else: msg = 'Layer %s (file %s) does not appear to be a valid GMT or ESRI file.' % ( layername, layerfile) raise Exception(msg) self.layerdict[layername] = lyr else: #first, figure out what kind of file we have (or is it a directory?) ftype = getFileType(layerfile) interp = self.interpolations[layername] if ftype == 'gmt': if GMTGrid.getFileGeoDict(layerfile)[0] == sampledict: lyr = GMTGrid.load(layerfile) else: lyr = GMTGrid.load(layerfile, sampledict, resample=True, method=interp, doPadding=True) elif ftype == 'esri': if GDALGrid.getFileGeoDict(layerfile)[0] == sampledict: lyr = GDALGrid.load(layerfile) else: lyr = GDALGrid.load(layerfile, sampledict, resample=True, method=interp, doPadding=True) else: msg = 'Layer %s (file %s) does not appear to be a valid GMT or ESRI file.' % ( layername, layerfile) raise Exception(msg) self.layerdict[layername] = lyr shapes = {} for layername, layer in self.layerdict.items(): shapes[layername] = layer.getData().shape self.nuggets = [str(self.coeffs['b0'])] ckeys = list(self.terms.keys()) ckeys.sort() for key in ckeys: term = self.terms[key] coeff = self.coeffs[key] self.nuggets.append('(%g * %s)' % (coeff, term)) self.equation = ' + '.join(self.nuggets) if self.uncert is not None: self.nugmin = copy.copy(self.nuggets) self.nugmax = copy.copy(self.nuggets) # Find the term with the shakemap input and replace for these nuggets for k, nug in enumerate(self.nuggets): if "self.shakemap.getLayer('pga').getData()" in nug: self.nugmin[k] = self.nugmin[k].replace( "self.shakemap.getLayer('pga').getData()", "(np.exp(np.log(self.shakemap.getLayer('pga').getData())\ - self.numstd * self.uncert.getLayer('stdpga').getData()))" ) self.nugmax[k] = self.nugmax[k].replace( "self.shakemap.getLayer('pga').getData()", "(np.exp(np.log(self.shakemap.getLayer('pga').getData())\ + self.numstd * self.uncert.getLayer('stdpga').getData()))" ) elif "self.shakemap.getLayer('pgv').getData()" in nug: self.nugmin[k] = self.nugmin[k].replace( "self.shakemap.getLayer('pgv').getData()", "(np.exp(np.log(self.shakemap.getLayer('pgv').getData())\ - self.numstd * self.uncert.getLayer('stdpgv').getData()))" ) self.nugmax[k] = self.nugmax[k].replace( "self.shakemap.getLayer('pgv').getData()", "(np.exp(np.log(self.shakemap.getLayer('pgv').getData())\ + self.numstd * self.uncert.getLayer('stdpgv').getData()))" ) elif "self.shakemap.getLayer('mmi').getData()" in nug: self.nugmin[k] = self.nugmin[k].replace( "self.shakemap.getLayer('mmi').getData()", "(np.exp(np.log(self.shakemap.getLayer('mmi').getData())\ - self.numstd * self.uncert.getLayer('stdmmi').getData()))" ) self.nugmax[k] = self.nugmax[k].replace( "self.shakemap.getLayer('mmi').getData()", "(np.exp(np.log(self.shakemap.getLayer('mmi').getData())\ + self.numstd * self.uncert.getLayer('stdmmi').getData()))" ) self.equationmin = ' + '.join(self.nugmin) self.equationmax = ' + '.join(self.nugmax) else: self.equationmin = None self.equationmax = None self.geodict = self.shakemap.getGeoDict() try: self.slopemin = float(config[self.model]['slopemin']) self.slopemax = float(config[self.model]['slopemax']) except: print( 'could not find slopemin and/or slopemax in config, no limits will be applied' ) self.slopemin = 0. self.slopemax = 90.
def calculate(self, saveinputs=False, slopefile=None, slopediv=1.): """Calculate the model :param saveinputs: if True, saves all the input layers as Grid2D objects in addition to the model if false, it will just output the model :type saveinputs: boolean :param slopefile: optional file path to slopefile that will be resampled to the other input files for applying thresholds :type slopefile: string :param slopediv: number to divide slope by to get to degrees (usually will be default of 1.) :type slopediv: float :returns: a dictionary containing the model results and model inputs if saveinputs was set to True, see <https://github.com/usgs/groundfailure#api-for-model-output> for a description of the structure of this output """ X = eval(self.equation) P = 1/(1 + np.exp(-X)) if self.uncert is not None: Xmin = eval(self.equationmin) Xmax = eval(self.equationmax) Pmin = 1/(1 + np.exp(-Xmin)) Pmax = 1/(1 + np.exp(-Xmax)) if slopefile is not None: ftype = getFileType(slopefile) sampledict = self.shakemap.getGeoDict() if ftype == 'gmt': slope = GMTGrid.load(slopefile, sampledict, resample=True, method='linear', doPadding=True).getData()/slopediv # Apply slope min/max limits print('applying slope thresholds') P[slope > self.slopemax] = 0. P[slope < self.slopemin] = 0. if self.uncert is not None: Pmin[slope > self.slopemax] = 0. Pmin[slope < self.slopemin] = 0. Pmax[slope > self.slopemax] = 0. Pmax[slope < self.slopemin] = 0. elif ftype == 'esri': slope = GDALGrid.load(slopefile, sampledict, resample=True, method='linear', doPadding=True).getData()/slopediv # Apply slope min/max limits print('applying slope thresholds') P[slope > self.slopemax] = 0. P[slope < self.slopemin] = 0. if self.uncert is not None: Pmin[slope > self.slopemax] = 0. Pmin[slope < self.slopemin] = 0. Pmax[slope > self.slopemax] = 0. Pmax[slope < self.slopemin] = 0. else: print('Slope file does not appear to be a valid GMT or ESRI file, not applying any slope thresholds.' % (slopefile)) else: print('No slope file provided, slope thresholds not applied') # Stuff into Grid2D object temp = self.shakemap.getShakeDict() shakedetail = '%s_ver%s' % (temp['shakemap_id'], temp['shakemap_version']) description = {'name': self.modelrefs['shortref'], 'longref': self.modelrefs['longref'], 'units': 'probability', 'shakemap': shakedetail, 'parameters': {'slopemin': self.slopemin, 'slopemax': self.slopemax}} Pgrid = Grid2D(P, self.geodict) rdict = collections.OrderedDict() rdict['model'] = {'grid': Pgrid, 'label': ('%s Probability') % (self.modeltype.capitalize()), 'type': 'output', 'description': description} if self.uncert is not None: rdict['modelmin'] = {'grid': Grid2D(Pmin, self.geodict), 'label': ('%s Probability (-1 std ground motion)') % (self.modeltype.capitalize()), 'type': 'output', 'description': description} rdict['modelmax'] = {'grid': Grid2D(Pmax, self.geodict), 'label': ('%s Probability (+1 std ground motion)') % (self.modeltype.capitalize()), 'type': 'output', 'description': description} if saveinputs is True: for layername, layergrid in list(self.layerdict.items()): units = self.units[layername] rdict[layername] = {'grid': layergrid, 'label': '%s (%s)' % (layername, units), 'type': 'input', 'description': {'units': units, 'shakemap': shakedetail}} for gmused in self.gmused: if 'pga' in gmused: units = '%g' getkey = 'pga' if 'pgv' in gmused: units = 'cm/s' getkey = 'pgv' if 'mmi' in gmused: units = 'intensity' getkey = 'mmi' layer = self.shakemap.getLayer(getkey) rdict[gmused] = {'grid': layer, 'label': '%s (%s)' % (getkey.upper(), units), 'type': 'input', 'description': {'units': units, 'shakemap': shakedetail}} if self.uncert is not None: layer1 = np.exp(np.log(layer.getData()) - self.uncert.getLayer('std'+getkey).getData()) rdict[gmused + '-1std'] = {'grid': Grid2D(layer1, self.geodict), 'label': '%s (%s)' % (getkey.upper()+' -1 std', units), 'type': 'input', 'description': {'units': units, 'shakemap': shakedetail}} layer2 = np.exp(np.log(layer.getData()) + self.uncert.getLayer('std'+getkey).getData()) rdict[gmused + '+1std'] = {'grid': Grid2D(layer2, self.geodict), 'label': '%s (%s)' % (getkey.upper()+' +1 std', units), 'type': 'input', 'description': {'units': units, 'shakemap': shakedetail}} return rdict
def drawIntensityMap(self, outfolder): """ Render the MMI data as intensity draped over topography, with oceans, coastlines, etc. Args: outfolder (str): Path to directory where output map should be saved. Returns: str: Path to output intensity map. """ t0 = time.time() # resample shakemap to topogrid # get the geodict for the topo file topodict = GMTGrid.getFileGeoDict(self.topofile)[0] # get the geodict for the ShakeMap comp = self.container.getComponents('MMI')[0] imtdict = self.container.getIMTGrids('MMI', comp) mmigrid = imtdict['mean'] smdict = mmigrid.getGeoDict() # get a geodict that is aligned with topo, but inside shakemap sampledict = topodict.getBoundsWithin(smdict) mmigrid = mmigrid.interpolateToGrid(sampledict) gd = mmigrid.getGeoDict() # establish the basemap object m = self._setMap(gd) # get topo layer and project it topogrid = GMTGrid.load( self.topofile, samplegeodict=sampledict, resample=False) topodata = topogrid.getData().copy() ptopo = self._projectGrid(topodata, m, gd) # get intensity layer and project it imtdata = mmigrid.getData().copy() pimt = self._projectGrid(imtdata, m, gd) # get the draped intensity data draped_hsv = self._getDraped(pimt, ptopo) # where will 10.0 come from # draw the draped intensity data m.imshow(draped_hsv, interpolation='none', zorder=IMG_ZORDER) # draw country/state boundaries self._drawBoundaries(m) # draw whatever road data is available # self.logger.debug('Drawing roads...') # self._drawRoads(m) # self.logger.debug('Done drawing roads...') # draw lakes self._drawLakes(m, gd) # draw oceans (pre-processed with islands taken out) t1 = time.time() self._drawOceans(m, gd) t2 = time.time() self.logger.debug('%.1f seconds to render oceans.' % (t2 - t1)) # draw coastlines self._drawCoastlines(m, gd) # draw meridians, parallels, labels, ticks self._drawGraticules(m, gd) # draw map scale self._drawMapScale(m, gd) # draw fault polygon, if present self._drawFault(m) # get the fault loaded # draw epicenter origin = self.fault.getOrigin() hlon = origin.lon hlat = origin.lat m.plot(hlon, hlat, 'k*', latlon=True, fillstyle='none', markersize=22, mew=1.2, zorder=EPICENTER_ZORDER) # draw cities # reduce the number of cities to those whose labels don't collide # set up cities if self.city_cols is not None: self.cities = self.cities.limitByBounds( (gd.xmin, gd.xmax, gd.ymin, gd.ymax)) self.cities = self.cities.limitByGrid( nx=self.city_cols, ny=self.city_rows, cities_per_grid=self.cities_per_grid) # self.logger.debug("Available fonts: ", self.cities._fontlist) if 'Times New Roman' in self.cities._fontlist: font = 'Times New Roman' else: font = 'DejaVu Sans' self.cities = self.cities.limitByMapCollision(m, fontname=font) self.cities.renderToMap(m.ax, zorder=CITIES_ZORDER) # draw title and supertitle self._drawTitle('MMI') # draw station and macroseismic locations self._drawStations(m) # need stationlist object # save plot to file plt.draw() outfile = os.path.join(outfolder, 'intensity.pdf') plt.savefig(outfile) tn = time.time() self.logger.debug('%.1f seconds to render entire map.' % (tn - t0)) return outfile
def holzer_liq(shakefile, config, uncertfile=None, saveinputs=False, modeltype=None, displmodel=None, probtype=None, bounds=None): """ Method for computing the probability of liquefaction using the Holzer method using the Wills et al. (2015) Vs30 map of California to define the susceptibility classes and the Fan et al. global water table model. """ layers = config['holzer_liq_cal']['layers'] vs30_file = layers['vs30']['file'] wtd_file = layers['watertable']['file'] shkgdict = ShakeGrid.getFileGeoDict(shakefile) fgeodict = GMTGrid.getFileGeoDict(vs30_file)[0] #--------------------------------------------------------------------------- # Loading info #--------------------------------------------------------------------------- shakemap = ShakeGrid.load(shakefile, fgeodict, resample=True, method='linear', doPadding=True) PGA = shakemap.getLayer('pga').getData() / 100 # convert to g griddict, eventdict, specdict, fields, uncertainties = getHeaderData( shakefile) mag = eventdict['magnitude'] #--------------------------------------------------------------------------- # Logistic funciton parameters from Vs30 #--------------------------------------------------------------------------- vs30_grid = GMTGrid.load(vs30_file) vs30 = vs30_grid.getData() a0 = np.zeros_like(vs30) b0 = np.zeros_like(vs30) c0 = np.zeros_like(vs30) a1 = np.zeros_like(vs30) b1 = np.zeros_like(vs30) c1 = np.zeros_like(vs30) for k, v in config['holzer_liq_cal']['parameters'].items(): ind = np.where(vs30 == float(v[0])) a0[ind] = v[1] b0[ind] = v[2] c0[ind] = v[3] a1[ind] = v[4] b1[ind] = v[5] c1[ind] = v[6] #--------------------------------------------------------------------------- # Water table #--------------------------------------------------------------------------- wtd_grid = GMTGrid.load(wtd_file, fgeodict, resample=True, method=layers['watertable']['interpolation'], doPadding=True) tmp = wtd_grid._data tmp = np.nan_to_num(tmp) # Compute water weights w0, w1 = get_water_weights(tmp) #--------------------------------------------------------------------------- # Compute probability of liquefaction #--------------------------------------------------------------------------- prob0 = get_prob(PGA, a0, b0, c0, mag) prob1 = get_prob(PGA, a1, b1, c1, mag) prob = prob0 * w0 + prob1 * w1 #--------------------------------------------------------------------------- # Turn output and inputs into into grids and put in maplayers dictionary #--------------------------------------------------------------------------- maplayers = collections.OrderedDict() temp = shakemap.getShakeDict() shakedetail = '%s_ver%s' % (temp['shakemap_id'], temp['shakemap_version']) modelsref = config['holzer_liq_cal']['shortref'] modellref = config['holzer_liq_cal']['longref'] modeltype = 'Holzer/Wills' maplayers['model'] = { 'grid': GDALGrid(prob, fgeodict), 'label': 'Probability', 'type': 'output', 'description': { 'name': modelsref, 'longref': modellref, 'units': 'coverage', 'shakemap': shakedetail, 'parameters': { 'modeltype': modeltype } } } if saveinputs is True: maplayers['pga'] = { 'grid': GDALGrid(PGA, fgeodict), 'label': 'PGA (g)', 'type': 'input', 'description': { 'units': 'g', 'shakemap': shakedetail } } maplayers['vs30'] = { 'grid': GDALGrid(vs30, fgeodict), 'label': 'Vs30 (m/s)', 'type': 'input', 'description': { 'units': 'm/s' } } maplayers['wtd'] = { 'grid': GDALGrid(wtd_grid._data, fgeodict), 'label': 'wtd (m)', 'type': 'input', 'description': { 'units': 'm' } } return maplayers
def modelMap(grids, shakefile=None, suptitle=None, inventory_shapefile=None, plotorder=None, maskthreshes=None, colormaps=None, boundaries=None, zthresh=0, scaletype='continuous', lims=None, logscale=False, ALPHA=0.7, maproads=True, mapcities=True, isScenario=False, roadfolder=None, topofile=None, cityfile=None, oceanfile=None, roadcolor='#6E6E6E', watercolor='#B8EEFF', countrycolor='#177F10', outputdir=None, savepdf=True, savepng=True, showplots=False, roadref='unknown', cityref='unknown', oceanref='unknown', printparam=False, ds=True, dstype='mean', upsample=False): """ This function creates maps of mapio grid layers (e.g. liquefaction or landslide models with their input layers) All grids must use the same bounds TO DO change so that all input layers do not have to have the same bounds, test plotting multiple probability layers, and add option so that if PDF and PNG aren't output, opens plot on screen using plt.show() :param grids: Dictionary of N layers and metadata formatted like: maplayers['layer name']={ 'grid': mapio grid2D object, 'label': 'label for colorbar and top line of subtitle', 'type': 'output or input to model', 'description': 'detailed description of layer for subtitle'}. Layer names must be unique. :type name: Dictionary or Ordered dictionary - import collections; grids = collections.OrderedDict() :param shakefile: optional ShakeMap file (url or full file path) to extract information for labels and folder names :type shakefile: Shakemap Event Dictionary :param suptitle: This will be displayed at the top of the plots and in the figure names :type suptitle: string :param plotorder: List of keys describing the order to plot the grids, if None and grids is an ordered dictionary, it will use the order of the dictionary, otherwise it will choose order which may be somewhat random but it will always put a probability grid first :type plotorder: list :param maskthreshes: N x 1 array or list of lower thresholds for masking corresponding to order in plotorder or order of OrderedDict if plotorder is None. If grids is not an ordered dict and plotorder is not specified, this will not work right. If None (default), nothing will be masked :param colormaps: List of strings of matplotlib colormaps (e.g. cm.autumn_r) corresponding to plotorder or order of dictionary if plotorder is None. The list can contain both strings and None e.g. colormaps = ['cm.autumn', None, None, 'cm.jet'] and None's will default to default colormap :param boundaries: None to show entire study area, 'zoom' to zoom in on the area of action (only works if there is a probability layer) using zthresh as a threshold, or a dictionary defining lats and lons in the form of boundaries.xmin = minlon, boundaries.xmax = maxlon, boundaries.ymin = min lat, boundaries.ymax = max lat :param zthresh: threshold for computing zooming bounds, only used if boundaries = 'zoom' :type zthresh: float :param scaletype: Type of scale for plotting, 'continuous' or 'binned' - will be reflected in colorbar :type scaletype: string :param lims: None or Nx1 list of tuples or numpy arrays corresponding to plotorder defining the limits for saturating the colorbar (vmin, vmax) if scaletype is continuous or the bins to use (clev) if scaletype if binned. The list can contain tuples, arrays, and Nones, e.g. lims = [(0., 10.), None, (0.1, 1.5), np.linspace(0., 1.5, 15)]. When None is specified, the program will estimate the limits, when an array is specified but the scale type is continuous, vmin will be set to min(array) and vmax will be set to max(array) :param lims: None or Nx1 list of Trues and Falses corresponding to plotorder defining whether to use a linear or log scale (log10) for plotting the layer. This will be reflected in the labels :param ALPHA: Transparency for mapping, if there is a hillshade that will plot below each layer, it is recommended to set this to at least 0.7 :type ALPHA: float :param maproads: Whether to show roads or not, default True, but requires that roadfile is specified and valid to work :type maproads: boolean :param mapcities: Whether to show cities or not, default True, but requires that cityfile is specified and valid to work :type mapcities: boolean :param isScenario: Whether this is a scenario (True) or a real event (False) (default False) :type isScenario: boolean :param roadfolder: Full file path to folder containing road shapefiles :type roadfolder: string :param topofile: Full file path to topography grid (GDAL compatible) - this is only needed to make a hillshade if a premade hillshade is not specified :type topofile: string :param cityfile: Full file path to Pager file containing city & population information :type cityfile: string :param roadcolor: Color to use for roads, if plotted, default #6E6E6E :type roadcolor: Hex color or other matplotlib compatible way of defining color :param watercolor: Color to use for oceans, lakes, and rivers, default #B8EEFF :type watercolor: Hex color or other matplotlib compatible way of defining color :param countrycolor: Color for country borders, default #177F10 :type countrycolor: Hex color or other matplotlib compatible way of defining color :param outputdir: File path for outputting figures, if edict is defined, a subfolder based on the event id will be created in this folder. If None, will use current directory :param savepdf: True to save pdf figure, False to not :param savepng: True to save png figure, False to not :param ds: True to allow downsampling for display (necessary when arrays are quite large, False to not allow) :param dstype: What function to use in downsampling, options are 'min', 'max', 'median', or 'mean' :param upsample: True to upsample the layer to the DEM resolution for better looking hillshades :returns: * PDF and/or PNG of map * Downsampled and trimmed version of input grids. If no modification was needed for plotting, this will be identical to grids but without the metadata """ if suptitle is None: suptitle = ' ' plt.ioff() defaultcolormap = cm.jet if shakefile is not None: edict = ShakeGrid.load(shakefile, adjust='res').getEventDict() temp = ShakeGrid.load(shakefile, adjust='res').getShakeDict() edict['eventid'] = temp['shakemap_id'] edict['version'] = temp['shakemap_version'] else: edict = None # Get output file location if outputdir is None: print('No output location given, using current directory for outputs\n') outputdir = os.getcwd() if edict is not None: outfolder = os.path.join(outputdir, edict['event_id']) else: outfolder = outputdir if not os.path.isdir(outfolder): os.makedirs(outfolder) # Get plotting order, if not specified if plotorder is None: plotorder = list(grids.keys()) # Get boundaries to use for all plots cut = True if boundaries is None: cut = False keytemp = list(grids.keys()) boundaries = grids[keytemp[0]]['grid'].getGeoDict() elif boundaries == 'zoom': # Find probability layer (will just take the maximum bounds if there is # more than one) keytemp = list(grids.keys()) key1 = [key for key in keytemp if 'model' in key.lower()] if len(key1) == 0: print('Could not find model layer to use for zoom, using default boundaries') keytemp = list(grids.keys()) boundaries = grids[keytemp[0]]['grid'].getGeoDict() else: lonmax = -1.e10 lonmin = 1.e10 latmax = -1.e10 latmin = 1.e10 for key in key1: # get lat lons of areas affected and add, if no areas affected, # switch to shakemap boundaries temp = grids[key]['grid'] xmin, xmax, ymin, ymax = temp.getBounds() lons = np.linspace(xmin, xmax, temp.getGeoDict().nx) lats = np.linspace(ymax, ymin, temp.getGeoDict().ny) # backwards so it plots right row, col = np.where(temp.getData() > float(zthresh)) lonmin = lons[col].min() lonmax = lons[col].max() latmin = lats[row].min() latmax = lats[row].max() # llons, llats = np.meshgrid(lons, lats) # make meshgrid # llons1 = llons[temp.getData() > float(zthresh)] # llats1 = llats[temp.getData() > float(zthresh)] # if llons1.min() < lonmin: # lonmin = llons1.min() # if llons1.max() > lonmax: # lonmax = llons1.max() # if llats1.min() < latmin: # latmin = llats1.min() # if llats1.max() > latmax: # latmax = llats1.max() boundaries1 = {'dx': 100, 'dy': 100., 'nx': 100., 'ny': 100} # dummy fillers, only really care about bounds if xmin < lonmin-0.15*(lonmax-lonmin): boundaries1['xmin'] = lonmin-0.1*(lonmax-lonmin) else: boundaries1['xmin'] = xmin if xmax > lonmax+0.15*(lonmax-lonmin): boundaries1['xmax'] = lonmax+0.1*(lonmax-lonmin) else: boundaries1['xmax'] = xmax if ymin < latmin-0.15*(latmax-latmin): boundaries1['ymin'] = latmin-0.1*(latmax-latmin) else: boundaries1['ymin'] = ymin if ymax > latmax+0.15*(latmax-latmin): boundaries1['ymax'] = latmax+0.1*(latmax-latmin) else: boundaries1['ymax'] = ymax boundaries = GeoDict(boundaries1, adjust='res') else: # SEE IF BOUNDARIES ARE SAME AS BOUNDARIES OF LAYERS keytemp = list(grids.keys()) tempgdict = grids[keytemp[0]]['grid'].getGeoDict() if np.abs(tempgdict.xmin-boundaries['xmin']) < 0.05 and \ np.abs(tempgdict.ymin-boundaries['ymin']) < 0.05 and \ np.abs(tempgdict.xmax-boundaries['xmax']) < 0.05 and \ np.abs(tempgdict.ymax - boundaries['ymax']) < 0.05: print('Input boundaries are almost the same as specified boundaries, no cutting needed') boundaries = tempgdict cut = False else: try: if boundaries['xmin'] > boundaries['xmax'] or \ boundaries['ymin'] > boundaries['ymax']: print('Input boundaries are not usable, using default boundaries') keytemp = list(grids.keys()) boundaries = grids[keytemp[0]]['grid'].getGeoDict() cut = False else: # Build dummy GeoDict boundaries = GeoDict({'xmin': boundaries['xmin'], 'xmax': boundaries['xmax'], 'ymin': boundaries['ymin'], 'ymax': boundaries['ymax'], 'dx': 100., 'dy': 100., 'ny': 100., 'nx': 100.}, adjust='res') except: print('Input boundaries are not usable, using default boundaries') keytemp = list(grids.keys()) boundaries = grids[keytemp[0]]['grid'].getGeoDict() cut = False # Pull out bounds for various uses bxmin, bxmax, bymin, bymax = boundaries.xmin, boundaries.xmax, boundaries.ymin, boundaries.ymax # Determine if need a single panel or multi-panel plot and if multi-panel, # how many and how it will be arranged fig = plt.figure() numpanels = len(grids) if numpanels == 1: rowpan = 1 colpan = 1 # create the figure and axes instances. fig.set_figwidth(5) elif numpanels == 2 or numpanels == 4: rowpan = np.ceil(numpanels/2.) colpan = 2 fig.set_figwidth(13) else: rowpan = np.ceil(numpanels/3.) colpan = 3 fig.set_figwidth(15) if rowpan == 1: fig.set_figheight(rowpan*6.0) else: fig.set_figheight(rowpan*5.3) # Need to update naming to reflect the shakemap version once can get # getHeaderData to work, add edict['version'] back into title, maybe # shakemap id also? fontsizemain = 14. fontsizesub = 12. fontsizesmallest = 10. if rowpan == 1.: fontsizemain = 12. fontsizesub = 10. fontsizesmallest = 8. if edict is not None: if isScenario: title = edict['event_description'] else: timestr = edict['event_timestamp'].strftime('%b %d %Y') title = 'M%.1f %s v%i - %s' % (edict['magnitude'], timestr, edict['version'], edict['event_description']) plt.suptitle(title+'\n'+suptitle, fontsize=fontsizemain) else: plt.suptitle(suptitle, fontsize=fontsizemain) clear_color = [0, 0, 0, 0.0] # Cut all of them and release extra memory xbuff = (bxmax-bxmin)/10. ybuff = (bymax-bymin)/10. cutxmin = bxmin-xbuff cutymin = bymin-ybuff cutxmax = bxmax+xbuff cutymax = bymax+ybuff if cut is True: newgrids = collections.OrderedDict() for k, layer in enumerate(plotorder): templayer = grids[layer]['grid'] try: newgrids[layer] = {'grid': templayer.cut(cutxmin, cutxmax, cutymin, cutymax, align=True)} except Exception as e: print(('Cutting failed, %s, continuing with full layers' % e)) newgrids = grids continue del templayer gc.collect() else: newgrids = grids tempgdict = newgrids[list(grids.keys())[0]]['grid'].getGeoDict() # Upsample layers to same as topofile if desired for better looking hillshades if upsample is True and topofile is not None: try: topodict = GDALGrid.getFileGeoDict(topofile) if topodict.dx >= tempgdict.dx or topodict.dy >= tempgdict.dy: print('Upsampling not possible, resolution of results already smaller than DEM') pass else: tempgdict1 = GeoDict({'xmin': tempgdict.xmin-xbuff, 'ymin': tempgdict.ymin-ybuff, 'xmax': tempgdict.xmax+xbuff, 'ymax': tempgdict.ymax+ybuff, 'dx': topodict.dx, 'dy': topodict.dy, 'nx': topodict.nx, 'ny': topodict.ny}, adjust='res') tempgdict2 = tempgdict1.getBoundsWithin(tempgdict) for k, layer in enumerate(plotorder): newgrids[layer]['grid'] = newgrids[layer]['grid'].subdivide(tempgdict2) except: print('Upsampling failed, continuing') # Downsample all of them for plotting, if needed, and replace them in # grids (to save memory) tempgrid = newgrids[list(grids.keys())[0]]['grid'] xsize = tempgrid.getGeoDict().nx ysize = tempgrid.getGeoDict().ny inchesx, inchesy = fig.get_size_inches() divx = int(np.round(xsize/(500.*inchesx))) divy = int(np.round(ysize/(500.*inchesy))) xmin, xmax, ymin, ymax = tempgrid.getBounds() gdict = tempgrid.getGeoDict() # Will be replaced if downsampled del tempgrid gc.collect() if divx <= 1: divx = 1 if divy <= 1: divy = 1 if (divx > 1. or divy > 1.) and ds: if dstype == 'max': func = np.nanmax elif dstype == 'min': func = np.nanmin elif dstype == 'med': func = np.nanmedian else: func = np.nanmean for k, layer in enumerate(plotorder): layergrid = newgrids[layer]['grid'] dat = block_reduce(layergrid.getData().copy(), block_size=(divy, divx), cval=float('nan'), func=func) if k == 0: lons = block_reduce(np.linspace(xmin, xmax, layergrid.getGeoDict().nx), block_size=(divx,), func=np.mean, cval=float('nan')) if math.isnan(lons[-1]): lons[-1] = lons[-2] + (lons[1]-lons[0]) lats = block_reduce(np.linspace(ymax, ymin, layergrid.getGeoDict().ny), block_size=(divy,), func=np.mean, cval=float('nan')) if math.isnan(lats[-1]): lats[-1] = lats[-2] + (lats[1]-lats[0]) gdict = GeoDict({'xmin': lons.min(), 'xmax': lons.max(), 'ymin': lats.min(), 'ymax': lats.max(), 'dx': np.abs(lons[1]-lons[0]), 'dy': np.abs(lats[1]-lats[0]), 'nx': len(lons), 'ny': len(lats)}, adjust='res') newgrids[layer]['grid'] = Grid2D(dat, gdict) del layergrid, dat else: lons = np.linspace(xmin, xmax, xsize) lats = np.linspace(ymax, ymin, ysize) # backwards so it plots right side up #make meshgrid llons1, llats1 = np.meshgrid(lons, lats) # See if there is an oceanfile for masking bbox = PolygonSH(((cutxmin, cutymin), (cutxmin, cutymax), (cutxmax, cutymax), (cutxmax, cutymin))) if oceanfile is not None: try: f = fiona.open(oceanfile) oc = next(f) f.close shapes = shape(oc['geometry']) # make boundaries into a shape ocean = shapes.intersection(bbox) except: print('Not able to read specified ocean file, will use default ocean masking') oceanfile = None if inventory_shapefile is not None: try: f = fiona.open(inventory_shapefile) invshp = list(f.items(bbox=(bxmin, bymin, bxmax, bymax))) f.close() inventory = [shape(inv[1]['geometry']) for inv in invshp] except: print('unable to read inventory shapefile specified, will not plot inventory') inventory_shapefile = None # # Find cities that will be plotted if mapcities is True and cityfile is not None: try: mycity = BasemapCities.loadFromGeoNames(cityfile=cityfile) bcities = mycity.limitByBounds((bxmin, bxmax, bymin, bymax)) #bcities = bcities.limitByPopulation(40000) bcities = bcities.limitByGrid(nx=4, ny=4, cities_per_grid=2) except: print('Could not read in cityfile, not plotting cities') mapcities = False cityfile = None # Load in topofile if topofile is not None: try: topomap = GDALGrid.load(topofile, resample=True, method='linear', samplegeodict=gdict) except: topomap = GMTGrid.load(topofile, resample=True, method='linear', samplegeodict=gdict) topodata = topomap.getData().copy() # mask oceans if don't have ocean shapefile if oceanfile is None: topodata = maskoceans(llons1, llats1, topodata, resolution='h', grid=1.25, inlands=True) else: print('no hillshade is possible\n') topomap = None topodata = None # Load in roads, if needed if maproads is True and roadfolder is not None: try: roadslist = [] for folder in os.listdir(roadfolder): road1 = os.path.join(roadfolder, folder) shpfiles = glob.glob(os.path.join(road1, '*.shp')) if len(shpfiles): shpfile = shpfiles[0] f = fiona.open(shpfile) shapes = list(f.items(bbox=(bxmin, bymin, bxmax, bymax))) for shapeid, shapedict in shapes: roadslist.append(shapedict) f.close() except: print('Not able to plot roads') roadslist = None val = 1 for k, layer in enumerate(plotorder): layergrid = newgrids[layer]['grid'] if 'label' in list(grids[layer].keys()): label1 = grids[layer]['label'] else: label1 = layer try: sref = grids[layer]['description']['name'] except: sref = None ax = fig.add_subplot(rowpan, colpan, val) val += 1 clat = bymin + (bymax-bymin)/2.0 clon = bxmin + (bxmax-bxmin)/2.0 # setup of basemap ('lcc' = lambert conformal conic). # use major and minor sphere radii from WGS84 ellipsoid. m = Basemap(llcrnrlon=bxmin, llcrnrlat=bymin, urcrnrlon=bxmax, urcrnrlat=bymax, rsphere=(6378137.00, 6356752.3142), resolution='l', area_thresh=1000., projection='lcc', lat_1=clat, lon_0=clon, ax=ax) x1, y1 = m(llons1, llats1) # get projection coordinates axsize = ax.get_window_extent().transformed(fig.dpi_scale_trans.inverted()) if k == 0: wid, ht = axsize.width, axsize.height if colormaps is not None and \ len(colormaps) == len(newgrids) and \ colormaps[k] is not None: palette = colormaps[k] else: # Find preferred default color map for each type of layer if 'prob' in layer.lower() or 'pga' in layer.lower() or \ 'pgv' in layer.lower() or 'cohesion' in layer.lower() or \ 'friction' in layer.lower() or 'fs' in layer.lower(): palette = cm.jet elif 'slope' in layer.lower(): palette = cm.gnuplot2 elif 'precip' in layer.lower(): palette = cm2.s3pcpn else: palette = defaultcolormap if topodata is not None: if k == 0: ptopo = m.transform_scalar( np.flipud(topodata), lons+0.5*gdict.dx, lats[::-1]-0.5*gdict.dy, np.round(300.*wid), np.round(300.*ht), returnxy=False, checkbounds=False, order=1, masked=False) #use lightsource class to make our shaded topography ls = LightSource(azdeg=135, altdeg=45) ls1 = LightSource(azdeg=120, altdeg=45) ls2 = LightSource(azdeg=225, altdeg=45) intensity1 = ls1.hillshade(ptopo, fraction=0.25, vert_exag=1.) intensity2 = ls2.hillshade(ptopo, fraction=0.25, vert_exag=1.) intensity = intensity1*0.5 + intensity2*0.5 #hillshm_im = m.transform_scalar(np.flipud(hillshm), lons, lats[::-1], np.round(300.*wid), np.round(300.*ht), returnxy=False, checkbounds=False, order=0, masked=False) #m.imshow(hillshm_im, cmap='Greys', vmin=0., vmax=3., zorder=1, interpolation='none') # vmax = 3 to soften colors to light gray #m.pcolormesh(x1, y1, hillshm, cmap='Greys', linewidth=0., rasterized=True, vmin=0., vmax=3., edgecolors='none', zorder=1); # plt.draw() # Get the data dat = layergrid.getData().copy() # mask out anything below any specified thresholds # Might need to move this up to before downsampling...might give illusion of no hazard in places where there is some that just got averaged out if maskthreshes is not None and len(maskthreshes) == len(newgrids): if maskthreshes[k] is not None: dat[dat <= maskthreshes[k]] = float('NaN') dat = np.ma.array(dat, mask=np.isnan(dat)) if logscale is not False and len(logscale) == len(newgrids): if logscale[k] is True: dat = np.log10(dat) label1 = r'$log_{10}$(' + label1 + ')' if scaletype.lower() == 'binned': # Find order of range to know how to scale order = np.round(np.log(np.nanmax(dat) - np.nanmin(dat))) if order < 1.: scal = 10**-order else: scal = 1. if lims is None or len(lims) != len(newgrids): clev = (np.linspace(np.floor(scal*np.nanmin(dat)), np.ceil(scal*np.nanmax(dat)), 10))/scal else: if lims[k] is None: clev = (np.linspace(np.floor(scal*np.nanmin(dat)), np.ceil(scal*np.nanmax(dat)), 10))/scal else: clev = lims[k] # Adjust to colorbar levels dat[dat < clev[0]] = clev[0] for j, level in enumerate(clev[:-1]): dat[(dat >= clev[j]) & (dat < clev[j+1])] = clev[j] # So colorbar saturates at top dat[dat > clev[-1]] = clev[-1] #panelhandle = m.contourf(x1, y1, datm, clev, cmap=palette, linewidth=0., alpha=ALPHA, rasterized=True) vmin = clev[0] vmax = clev[-1] else: if lims is not None and len(lims) == len(newgrids): if lims[k] is None: vmin = np.nanmin(dat) vmax = np.nanmax(dat) else: vmin = lims[k][0] vmax = lims[k][-1] else: vmin = np.nanmin(dat) vmax = np.nanmax(dat) # Mask out cells overlying oceans or block with a shapefile if available if oceanfile is None: dat = maskoceans(llons1, llats1, dat, resolution='h', grid=1.25, inlands=True) else: #patches = [] if type(ocean) is PolygonSH: ocean = [ocean] for oc in ocean: patch = getProjectedPatch(oc, m, edgecolor="#006280", facecolor=watercolor, lw=0.5, zorder=4.) #x, y = m(oc.exterior.xy[0], oc.exterior.xy[1]) #xy = zip(x, y) #patch = Polygon(xy, facecolor=watercolor, edgecolor="#006280", lw=0.5, zorder=4.) ##patches.append(Polygon(xy, facecolor=watercolor, edgecolor=watercolor, zorder=500.)) ax.add_patch(patch) ##ax.add_collection(PatchCollection(patches)) if inventory_shapefile is not None: for in1 in inventory: if 'point' in str(type(in1)): x, y = in1.xy x = x[0] y = y[0] m.scatter(x, y, c='m', s=50, latlon=True, marker='^', zorder=100001) else: x, y = m(in1.exterior.xy[0], in1.exterior.xy[1]) xy = list(zip(x, y)) patch = Polygon(xy, facecolor='none', edgecolor='k', lw=0.5, zorder=10.) #patches.append(Polygon(xy, facecolor=watercolor, edgecolor=watercolor, zorder=500.)) ax.add_patch(patch) palette.set_bad(clear_color, alpha=0.0) # Plot it up dat_im = m.transform_scalar( np.flipud(dat), lons+0.5*gdict.dx, lats[::-1]-0.5*gdict.dy, np.round(300.*wid), np.round(300.*ht), returnxy=False, checkbounds=False, order=0, masked=True) if topodata is not None: # Drape over hillshade #turn data into an RGBA image cmap = palette #adjust data so scaled between vmin and vmax and between 0 and 1 dat1 = dat_im.copy() dat1[dat1 < vmin] = vmin dat1[dat1 > vmax] = vmax dat1 = (dat1 - vmin)/(vmax-vmin) rgba_img = cmap(dat1) maskvals = np.dstack((dat1.mask, dat1.mask, dat1.mask)) rgb = np.squeeze(rgba_img[:, :, 0:3]) rgb[maskvals] = 1. draped_hsv = ls.blend_hsv(rgb, np.expand_dims(intensity, 2)) m.imshow(draped_hsv, zorder=3., interpolation='none') # This is just a dummy layer that will be deleted to make the # colorbar look right panelhandle = m.imshow(dat_im, cmap=palette, zorder=0., vmin=vmin, vmax=vmax) else: panelhandle = m.imshow(dat_im, cmap=palette, zorder=3., vmin=vmin, vmax=vmax, interpolation='none') #panelhandle = m.pcolormesh(x1, y1, dat, linewidth=0., cmap=palette, vmin=vmin, vmax=vmax, alpha=ALPHA, rasterized=True, zorder=2.); #panelhandle.set_edgecolors('face') # add colorbar cbfmt = '%1.1f' if vmax is not None and vmin is not None: if (vmax - vmin) < 1.: cbfmt = '%1.2f' elif vmax > 5.: # (vmax - vmin) > len(clev): cbfmt = '%1.0f' #norm = mpl.colors.Normalize(vmin=vmin, vmax=vmax) if scaletype.lower() == 'binned': cbar = fig.colorbar(panelhandle, spacing='proportional', ticks=clev, boundaries=clev, fraction=0.036, pad=0.04, format=cbfmt, extend='both') #cbar1 = ColorbarBase(cbar.ax, cmap=palette, norm=norm, spacing='proportional', ticks=clev, boundaries=clev, fraction=0.036, pad=0.04, format=cbfmt, extend='both', extendfrac='auto') else: cbar = fig.colorbar(panelhandle, fraction=0.036, pad=0.04, extend='both', format=cbfmt) #cbar1 = ColorbarBase(cbar.ax, cmap=palette, norm=norm, fraction=0.036, pad=0.04, extend='both', extendfrac='auto', format=cbfmt) if topodata is not None: panelhandle.remove() cbar.set_label(label1, fontsize=10) cbar.ax.tick_params(labelsize=8) parallels = m.drawparallels(getMapLines(bymin, bymax, 3), labels=[1, 0, 0, 0], linewidth=0.5, labelstyle='+/-', fontsize=9, xoffset=-0.8, color='gray', zorder=100.) m.drawmeridians(getMapLines(bxmin, bxmax, 3), labels=[0, 0, 0, 1], linewidth=0.5, labelstyle='+/-', fontsize=9, color='gray', zorder=100.) for par in parallels: try: parallels[par][1][0].set_rotation(90) except: pass #draw roads on the map, if they were provided to us if maproads is True and roadslist is not None: try: for road in roadslist: try: xy = list(road['geometry']['coordinates']) roadx, roady = list(zip(*xy)) mapx, mapy = m(roadx, roady) m.plot(mapx, mapy, roadcolor, lw=0.5, zorder=9) except: continue except Exception as e: print(('Failed to plot roads, %s' % e)) #add city names to map if mapcities is True and cityfile is not None: try: fontname = 'Arial' fontsize = 8 if k == 0: # Only need to choose cities first time and then apply to rest fcities = bcities.limitByMapCollision( m, fontname=fontname, fontsize=fontsize) ctlats, ctlons, names = fcities.getCities() cxis, cyis = m(ctlons, ctlats) for ctlat, ctlon, cxi, cyi, name in zip(ctlats, ctlons, cxis, cyis, names): m.scatter(ctlon, ctlat, c='k', latlon=True, marker='.', zorder=100000) ax.text(cxi, cyi, name, fontname=fontname, fontsize=fontsize, zorder=100000) except Exception as e: print('Failed to plot cities, %s' % e) #draw star at epicenter plt.sca(ax) if edict is not None: elat, elon = edict['lat'], edict['lon'] ex, ey = m(elon, elat) plt.plot(ex, ey, '*', markeredgecolor='k', mfc='None', mew=1.0, ms=15, zorder=10000.) m.drawmapboundary(fill_color=watercolor) m.fillcontinents(color=clear_color, lake_color=watercolor) m.drawrivers(color=watercolor) ##m.drawcoastlines() #draw country boundaries m.drawcountries(color=countrycolor, linewidth=1.0) #add map scale m.drawmapscale((bxmax+bxmin)/2., (bymin+(bymax-bymin)/9.), clon, clat, np.round((((bxmax-bxmin)*111)/5)/10.)*10, barstyle='fancy', zorder=10) # Add border autoAxis = ax.axis() rec = Rectangle((autoAxis[0]-0.7, autoAxis[2]-0.2), (autoAxis[1]-autoAxis[0])+1, (autoAxis[3]-autoAxis[2])+0.4, fill=False, lw=1, zorder=1e8) rec = ax.add_patch(rec) rec.set_clip_on(False) plt.draw() if sref is not None: label2 = '%s\nsource: %s' % (label1, sref) # '%s\n' % label1 + r'{\fontsize{10pt}{3em}\selectfont{}%s}' % sref # else: label2 = label1 plt.title(label2, axes=ax, fontsize=fontsizesub) #draw scenario watermark, if scenario if isScenario: plt.sca(ax) cx, cy = m(clon, clat) plt.text(cx, cy, 'SCENARIO', rotation=45, alpha=0.10, size=72, ha='center', va='center', color='red') #if ds: # Could add this to print "downsampled" on map # plt.text() if k == 1 and rowpan == 1: # adjust single level plot axsize = ax.get_window_extent().transformed(fig.dpi_scale_trans.inverted()) ht2 = axsize.height fig.set_figheight(ht2*1.6) else: plt.tight_layout() # Make room for suptitle - tight layout doesn't account for it plt.subplots_adjust(top=0.92) if printparam is True: try: fig = plt.gcf() dictionary = grids['model']['description']['parameters'] paramstring = 'Model parameters: ' halfway = np.ceil(len(dictionary)/2.) for i, key in enumerate(dictionary): if i == halfway and colpan == 1: paramstring += '\n' paramstring += ('%s = %s; ' % (key, dictionary[key])) print(paramstring) fig.text(0.01, 0.015, paramstring, fontsize=fontsizesmallest) plt.draw() except: print('Could not display model parameters') if edict is not None: eventid = edict['eventid'] else: eventid = '' time1 = datetime.datetime.utcnow().strftime('%d%b%Y_%H%M') outfile = os.path.join(outfolder, '%s_%s_%s.pdf' % (eventid, suptitle, time1)) pngfile = os.path.join(outfolder, '%s_%s_%s.png' % (eventid, suptitle, time1)) if savepdf is True: print('Saving map output to %s' % outfile) plt.savefig(outfile, dpi=300) if savepng is True: print('Saving map output to %s' % pngfile) plt.savefig(pngfile) if showplots is True: plt.show() else: plt.close(fig) return newgrids
def calculate(self): """Calculate the model :returns: a dictionary containing the model results and model inputs if saveinputs was set to True when class was set up, see <https://github.com/usgs/groundfailure#api-for-model-output> for a description of the structure of this output """ X = eval(self.equation) P = 1 / (1 + np.exp(-X)) if 'vs30max' in self.config[self.model].keys(): vs30 = self.layerdict['vs30'].getData() P[vs30 > float(self.config[self.model]['vs30max'])] = 0.0 if 'minpgv' in self.config[self.model].keys(): pgv = self.shakemap.getLayer('pgv').getData() P[pgv < float(self.config[self.model]['minpgv'])] = 0.0 if 'coverage' in self.config[self.model].keys(): eqn = self.config[self.model]['coverage']['eqn'] ind = copy.copy(P) P = eval(eqn) if self.uncert is not None: print(self.numstd) print(type(self.numstd)) Xmin = eval(self.equationmin) Xmax = eval(self.equationmax) Pmin = 1 / (1 + np.exp(-Xmin)) Pmax = 1 / (1 + np.exp(-Xmax)) if 'vs30max' in self.config[self.model].keys(): vs30 = self.layerdict['vs30'].getData() Pmin[vs30 > float(self.config[self.model]['vs30max'])] = 0.0 Pmax[vs30 > float(self.config[self.model]['vs30max'])] = 0.0 if 'minpgv' in self.config[self.model].keys(): pgv = self.shakemap.getLayer('pgv').getData() Pmin[pgv < float(self.config[self.model]['minpgv'])] = 0.0 Pmax[pgv < float(self.config[self.model]['minpgv'])] = 0.0 if 'coverage' in self.config[self.model].keys(): eqnmin = eqn.replace('P', 'Pmin') eqnmax = eqn.replace('P', 'Pmax') Pmin = eval(eqnmin) Pmax = eval(eqnmax) if self.slopefile is not None: ftype = getFileType(self.slopefile) sampledict = self.shakemap.getGeoDict() if ftype == 'gmt': if GMTGrid.getFileGeoDict(self.slopefile)[0] == sampledict: slope = GMTGrid.load( self.slopefile).getData() / self.slopediv else: slope = GMTGrid.load( self.slopefile, sampledict, resample=True, method='linear', doPadding=True).getData() / self.slopediv # Apply slope min/max limits print('applying slope thresholds') P[slope > self.slopemax] = 0. P[slope < self.slopemin] = 0. if self.uncert is not None: Pmin[slope > self.slopemax] = 0. Pmin[slope < self.slopemin] = 0. Pmax[slope > self.slopemax] = 0. Pmax[slope < self.slopemin] = 0. elif ftype == 'esri': if GDALGrid.getFileGeoDict(self.slopefile)[0] == sampledict: slope = GDALGrid.load( self.slopefile).getData() / self.slopediv else: slope = GDALGrid.load( self.slopefile, sampledict, resample=True, method='linear', doPadding=True).getData() / self.slopediv # Apply slope min/max limits print('applying slope thresholds') P[slope > self.slopemax] = 0. P[slope < self.slopemin] = 0. if self.uncert is not None: Pmin[slope > self.slopemax] = 0. Pmin[slope < self.slopemin] = 0. Pmax[slope > self.slopemax] = 0. Pmax[slope < self.slopemin] = 0. else: print( 'Slope file does not appear to be a valid GMT or ESRI file, not applying any slope thresholds.' % (self.slopefile)) else: print('No slope file provided, slope thresholds not applied') # Stuff into Grid2D object temp = self.shakemap.getShakeDict() shakedetail = '%s_ver%s' % (temp['shakemap_id'], temp['shakemap_version']) description = { 'name': self.modelrefs['shortref'], 'longref': self.modelrefs['longref'], 'units': 'probability', 'shakemap': shakedetail, 'parameters': { 'slopemin': self.slopemin, 'slopemax': self.slopemax } } Pgrid = Grid2D(P, self.geodict) rdict = collections.OrderedDict() rdict['model'] = { 'grid': Pgrid, 'label': ('%s Probability') % (self.modeltype.capitalize()), 'type': 'output', 'description': description } if self.uncert is not None: rdict['modelmin'] = { 'grid': Grid2D(Pmin, self.geodict), 'label': ('%s Probability (-%0.1f std ground motion)') % (self.modeltype.capitalize(), self.numstd), 'type': 'output', 'description': description } rdict['modelmax'] = { 'grid': Grid2D(Pmax, self.geodict), 'label': ('%s Probability (+%0.1f std ground motion)') % (self.modeltype.capitalize(), self.numstd), 'type': 'output', 'description': description } if self.saveinputs is True: for layername, layergrid in list(self.layerdict.items()): units = self.units[layername] if units is None: units = '' rdict[layername] = { 'grid': layergrid, 'label': '%s (%s)' % (layername, units), 'type': 'input', 'description': { 'units': units, 'shakemap': shakedetail } } for gmused in self.gmused: if 'pga' in gmused: units = '%g' getkey = 'pga' elif 'pgv' in gmused: units = 'cm/s' getkey = 'pgv' elif 'mmi' in gmused: units = 'intensity' getkey = 'mmi' else: continue # Layer is derived from several input layers, skip outputting this layer if getkey in rdict: continue layer = self.shakemap.getLayer(getkey) rdict[getkey] = { 'grid': layer, 'label': '%s (%s)' % (getkey.upper(), units), 'type': 'input', 'description': { 'units': units, 'shakemap': shakedetail } } if self.uncert is not None: layer1 = np.exp( np.log(layer.getData()) - self.uncert.getLayer('std' + getkey).getData()) rdict[getkey + 'modelmin'] = { 'grid': Grid2D(layer1, self.geodict), 'label': '%s - %0.1f std (%s)' % (getkey.upper(), self.numstd, units), 'type': 'input', 'description': { 'units': units, 'shakemap': shakedetail } } layer2 = np.exp( np.log(layer.getData()) + self.uncert.getLayer('std' + getkey).getData()) rdict[getkey + 'modelmax'] = { 'grid': Grid2D(layer2, self.geodict), 'label': '%s + %0.1f std (%s)' % (getkey.upper(), self.numstd, units), 'type': 'input', 'description': { 'units': units, 'shakemap': shakedetail } } return rdict
def quickcut(filename, gdict, tempname=None, extrasamp=5., method='bilinear', precise=True, cleanup=True, verbose=False, override=False): """ Use gdal to trim a large global file down quickly so mapio can read it efficiently. (Cannot read Shakemap.xml files, must save as .bil filrst) Args: filename (str): File path to original input file (raster). gdict (geodict): Geodictionary to cut around and align with. tempname (str): File path to desired location of clipped part of filename. extrasamp (int): Number of extra cells to cut around each edge of geodict to have resampling buffer for future steps. method (str): If resampling is necessary, method to use. precise (bool): If true, will resample to the gdict as closely as possible, if False it will just roughly cut around the area of interest without changing resolution cleanup (bool): if True, delete tempname after reading it back in verbose (bool): if True, prints more details override (bool): if True, if filename extent is not fully contained by gdict, read in the entire file (only used for ShakeMaps) Returns: New grid2D layer Note: This function uses the subprocess approach because ``gdal.Translate`` doesn't hang on the command until the file is created which causes problems in the next steps. """ if gdict.xmax < gdict.xmin: raise Exception('quickcut: your geodict xmax is smaller than xmin') try: filegdict = GDALGrid.getFileGeoDict(filename) except: try: filegdict = GMTGrid.getFileGeoDict(filename) except: raise Exception('Cannot get geodict for %s' % filename) if tempname is None: tempdir = tempfile.mkdtemp() tempname = os.path.join(tempdir, 'junk.tif') deltemp = True else: tempdir = None deltemp = False # if os.path.exists(tempname): # os.remove(tempname) # print('Temporary file already there, removing file') filegdict = filegdict[0] # Get the right methods for mapio (method) and gdal (method2) if method == 'linear': method2 = 'bilinear' if method == 'nearest': method2 = 'near' if method == 'bilinear': method = 'linear' method2 = 'bilinear' if method == 'near': method = 'nearest' method2 = 'near' else: method2 = method if filegdict != gdict: # First cut without resampling tempgdict = GeoDict.createDictFromBox(gdict.xmin, gdict.xmax, gdict.ymin, gdict.ymax, filegdict.dx, filegdict.dy, inside=True) try: egdict = filegdict.getBoundsWithin(tempgdict) ulx = egdict.xmin - extrasamp * egdict.dx uly = egdict.ymax + extrasamp * egdict.dy lrx = egdict.xmax + (extrasamp + 1) * egdict.dx lry = egdict.ymin - (extrasamp + 1) * egdict.dy cmd = 'gdal_translate -a_srs EPSG:4326 -of GTiff -projwin %1.8f \ %1.8f %1.8f %1.8f -r %s %s %s' % (ulx, uly, lrx, lry, method2, filename, tempname) except Exception as e: if override: # When ShakeMap is being loaded, sometimes they won't align # right because it's already cut to the area, so just load # the whole file cmd = 'gdal_translate -a_srs EPSG:4326 -of GTiff -r %s %s %s' \ % (method2, filename, tempname) else: raise Exception('Failed to cut layer: %s' % e) rc, so, se = get_command_output(cmd) if not rc: raise Exception(se.decode()) else: if verbose: print(so.decode()) newgrid2d = GDALGrid.load(tempname) if precise: # Resample to exact geodictionary newgrid2d = newgrid2d.interpolate2(gdict, method=method) if cleanup: os.remove(tempname) if deltemp: shutil.rmtree(tempdir) else: ftype = GMTGrid.getFileType(filename) if ftype != 'unknown': newgrid2d = GMTGrid.load(filename) elif filename.endswith('.xml'): newgrid2d = ShakeGrid.load(filename) else: newgrid2d = GDALGrid.load(filename) return newgrid2d
pass if gridtype is None: raise Exception('File "%s" does not appear to be either a GMT grid or an ESRI grid.' % gridfile) xmin = xmin - fdict.dx*3 xmax = xmax + fdict.dx*3 ymin = ymin - fdict.dy*3 ymax = ymax + fdict.dy*3 bounds = (xmin,xmax,ymin,ymax) if gridtype == 'gmt': fgeodict = GMTGrid.getFileGeoDict(gridfile) else: fgeodict = GDALGrid.getFileGeoDict(gridfile) dx,dy = (fgeodict.dx,fgeodict.dy) sdict = GeoDict.createDictFromBox(xmin,xmax,ymin,ymax,dx,dy) if gridtype == 'gmt': grid = GMTGrid.load(gridfile,samplegeodict=sdict,resample=False,method=method,doPadding=True) else: grid = GDALGrid.load(gridfile,samplegeodict=sdict,resample=False,method=method,doPadding=True) return sampleFromGrid(grid,xypoints) def sampleFromGrid(grid,xypoints,method='nearest'): """ Sample 2D grid object at each of a set of XY (decimal degrees) points. :param grid: Grid2D object at which to sample data. :param xypoints: 2D numpy array of XY points, decimal degrees. :param method: Interpolation method, either 'nearest' or 'linear'. :returns:
def __init__(self,config,shakefile,model): if model not in getLogisticModelNames(config): raise Exception('Could not find a model called "%s" in config %s.' % (model,config)) #do everything here short of calculations - parse config, assemble eqn strings, load data. self.model = model cmodel = config['logistic_models'][model] self.coeffs = validateCoefficients(cmodel) self.layers = validateLayers(cmodel)#key = layer name, value = file name self.terms,timeField = validateTerms(cmodel,self.coeffs,self.layers) self.interpolations = validateInterpolations(cmodel,self.layers) self.units = validateUnits(cmodel,self.layers) if 'baselayer' not in cmodel: raise Exception('You must specify a base layer file in config.') if cmodel['baselayer'] not in list(self.layers.keys()): raise Exception('You must specify a base layer corresponding to one of the files in the layer section.') #get the geodict for the shakemap geodict = ShakeGrid.getFileGeoDict(shakefile,adjust='res') griddict,eventdict,specdict,fields,uncertainties = getHeaderData(shakefile) YEAR = eventdict['event_timestamp'].year MONTH = MONTHS[(eventdict['event_timestamp'].month)-1] DAY = eventdict['event_timestamp'].day HOUR = eventdict['event_timestamp'].hour #now find the layer that is our base layer and get the largest bounds we can guaranteed not to exceed shakemap bounds basefile = self.layers[cmodel['baselayer']] ftype = getFileType(basefile) if ftype == 'esri': basegeodict = GDALGrid.getFileGeoDict(basefile) sampledict = basegeodict.getBoundsWithin(geodict) elif ftype == 'gmt': basegeodict = GMTGrid.getFileGeoDict(basefile) sampledict = basegeodict.getBoundsWithin(geodict) else: raise Exception('All predictor variable grids must be a valid GMT or ESRI file type') #now load the shakemap, resampling and padding if necessary self.shakemap = ShakeGrid.load(shakefile,samplegeodict=sampledict,resample=True,doPadding=True,adjust='res') #load the predictor layers into a dictionary self.layerdict = {} #key = layer name, value = grid object for layername,layerfile in self.layers.items(): if isinstance(layerfile,list): for lfile in layerfile: if timeField == 'MONTH': if lfile.find(MONTH) > -1: layerfile = lfile ftype = getFileType(layerfile) interp = self.interpolations[layername] if ftype == 'gmt': lyr = GMTGrid.load(layerfile,sampledict,resample=True,method=interp,doPadding=True) elif ftype == 'esri': lyr = GDALGrid.load(layerfile,sampledict,resample=True,method=interp,doPadding=True) else: msg = 'Layer %s (file %s) does not appear to be a valid GMT or ESRI file.' % (layername,layerfile) raise Exception(msg) self.layerdict[layername] = lyr else: #first, figure out what kind of file we have (or is it a directory?) ftype = getFileType(layerfile) interp = self.interpolations[layername] if ftype == 'gmt': lyr = GMTGrid.load(layerfile,sampledict,resample=True,method=interp,doPadding=True) elif ftype == 'esri': lyr = GDALGrid.load(layerfile,sampledict,resample=True,method=interp,doPadding=True) else: msg = 'Layer %s (file %s) does not appear to be a valid GMT or ESRI file.' % (layername,layerfile) raise Exception(msg) self.layerdict[layername] = lyr shapes = {} for layername,layer in self.layerdict.items(): shapes[layername] = layer.getData().shape x = 1 self.nuggets = [str(self.coeffs['b0'])] ckeys = list(self.terms.keys()) ckeys.sort() for key in ckeys: term = self.terms[key] coeff = self.coeffs[key] self.nuggets.append('(%g * %s)' % (coeff, term)) self.equation = ' + '.join(self.nuggets) self.geodict = self.shakemap.getGeoDict()
def hazus_liq(shakefile, config, uncertfile=None, saveinputs=False, modeltype=None, displmodel=None, probtype=None, bounds=None): """ Method for computing the probability of liquefaction using the Hazus method using the Wills et al. (2015) Vs30 map of California to define the susceptibility classes and the Fan et al. global water table model. """ layers = config['hazus_liq_cal']['layers'] vs30_file = layers['vs30']['file'] wtd_file = layers['watertable']['file'] shkgdict = ShakeGrid.getFileGeoDict(shakefile) fgeodict = GMTGrid.getFileGeoDict(vs30_file)[0] #--------------------------------------------------------------------------- # Loading #--------------------------------------------------------------------------- shakemap = ShakeGrid.load(shakefile, fgeodict, resample=True, method='linear', doPadding=True) PGA = shakemap.getLayer('pga').getData() / 100 # convert to g griddict, eventdict, specdict, fields, uncertainties = getHeaderData( shakefile) mag = eventdict['magnitude'] # Correction factor for moment magnitudes other than M=7.5 k_m = 0.0027 * mag**3 - 0.0267 * mag**2 - 0.2055 * mag + 2.9188 #--------------------------------------------------------------------------- # Susceptibility from Vs30 #--------------------------------------------------------------------------- vs30_grid = GMTGrid.load(vs30_file) vs30 = vs30_grid.getData() p_ml = np.zeros_like(vs30) a = np.zeros_like(vs30) b = np.zeros_like(vs30) for k, v in config['hazus_liq_cal']['parameters'].items(): ind = np.where(vs30 == float(v[0])) if v[1] == "VH": p_ml[ind] = 0.25 a[ind] = 9.09 b[ind] = -0.82 if v[1] == "H": p_ml[ind] = 0.2 a[ind] = 7.67 b[ind] = -0.92 if v[1] == "M": p_ml[ind] = 0.1 a[ind] = 6.67 b[ind] = -1.0 if v[1] == "L": p_ml[ind] = 0.05 a[ind] = 5.57 b[ind] = -1.18 if v[1] == "VL": p_ml[ind] = 0.02 a[ind] = 4.16 b[ind] = -1.08 # Conditional liquefaction probability for a given susceptibility category # at a specified PGA p_liq_pga = a * PGA + b p_liq_pga = p_liq_pga.clip(min=0, max=1) #--------------------------------------------------------------------------- # Water table #--------------------------------------------------------------------------- wtd_grid = GMTGrid.load(wtd_file, fgeodict, resample=True, method=layers['watertable']['interpolation'], doPadding=True) tmp = wtd_grid._data tmp = np.nan_to_num(tmp) # Convert to ft wt_ft = tmp * 3.28084 # Correction factor for groundwater depths other than five feet k_w = 0.022 * wt_ft + 0.93 #--------------------------------------------------------------------------- # Combine to get conditional liquefaction probability #--------------------------------------------------------------------------- p_liq_sc = p_liq_pga * p_ml / k_m / k_w #--------------------------------------------------------------------------- # Turn output and inputs into into grids and put in maplayers dictionary #--------------------------------------------------------------------------- maplayers = collections.OrderedDict() temp = shakemap.getShakeDict() shakedetail = '%s_ver%s' % (temp['shakemap_id'], temp['shakemap_version']) modelsref = config['hazus_liq_cal']['shortref'] modellref = config['hazus_liq_cal']['longref'] modeltype = 'Hazus/Wills' maplayers['model'] = { 'grid': GDALGrid(p_liq_sc, fgeodict), 'label': 'Probability', 'type': 'output', 'description': { 'name': modelsref, 'longref': modellref, 'units': 'coverage', 'shakemap': shakedetail, 'parameters': { 'modeltype': modeltype } } } if saveinputs is True: maplayers['pga'] = { 'grid': GDALGrid(PGA, fgeodict), 'label': 'PGA (g)', 'type': 'input', 'description': { 'units': 'g', 'shakemap': shakedetail } } maplayers['vs30'] = { 'grid': GDALGrid(vs30, fgeodict), 'label': 'Vs30 (m/s)', 'type': 'input', 'description': { 'units': 'm/s' } } maplayers['wtd'] = { 'grid': GDALGrid(wtd_grid._data, fgeodict), 'label': 'wtd (m)', 'type': 'input', 'description': { 'units': 'm' } } return maplayers
def slhrf_liq(shakefile, config, uncertfile=None, saveinputs=False, modeltype=None, displmodel=None, probtype=None, bounds=None): """ Method for computing the probability of liquefaction using the SLHRF, primarily relying on the Wills et al. (2015) Vs30 map of California and Hydrosheds distance to rivers. """ layers = config['slhrf_liq_cal']['layers'] vs30_file = layers['vs30']['file'] elev_file = layers['elev']['file'] dc_file = layers['dc']['file'] dr_file = layers['dr']['file'] fgeodict = GMTGrid.getFileGeoDict(vs30_file)[0] #--------------------------------------------------------------------------- # Read in data layers #--------------------------------------------------------------------------- shakemap = ShakeGrid.load(shakefile, fgeodict, resample=True, method='linear', doPadding=True) PGA = shakemap.getLayer('pga').getData()/100 # convert to g griddict,eventdict,specdict,fields,uncertainties = getHeaderData(shakefile) mag = eventdict['magnitude'] vs30_grid = GMTGrid.load(vs30_file) vs30 = vs30_grid.getData() elev = GDALGrid.load(elev_file, fgeodict, resample=True, method=layers['elev']['interpolation'], doPadding = True).getData() dc = GDALGrid.load(dc_file, fgeodict, resample=True, method=layers['dc']['interpolation'], doPadding = True).getData() dr = GDALGrid.load(dr_file, fgeodict, resample=True, method=layers['dr']['interpolation'], doPadding = True).getData() dw = np.minimum(dr, dc) #--------------------------------------------------------------------------- # Evaluate the different factors #--------------------------------------------------------------------------- Fgeo = np.zeros_like(vs30) for k,v in config['slhrf_liq_cal']['parameters'].items(): ind = np.where(vs30 == float(v[0])) Fgeo[ind] = float(v[1]) Fz = z_factor(elev) Fmag = mag_factor(mag) Fpga = pga_factor(PGA) Fdw = dw_factor(dw) Fnehrp = nehrp_factor(vs30) #--------------------------------------------------------------------------- # Combine factors #--------------------------------------------------------------------------- SLHRF = Fz * Fmag * Fpga * Fdw * Fgeo * Fnehrp # Transform into a 'probability' prob = 0.4 * (1 - np.exp(-0.2 * SLHRF**2) ) #--------------------------------------------------------------------------- # Turn output and inputs into into grids and put in maplayers dictionary #--------------------------------------------------------------------------- maplayers = collections.OrderedDict() temp = shakemap.getShakeDict() shakedetail = '%s_ver%s' % (temp['shakemap_id'], temp['shakemap_version']) modelsref = config['slhrf_liq_cal']['shortref'] modellref = config['slhrf_liq_cal']['longref'] modeltype = 'SLHRF/Wills' maplayers['model'] = {'grid': GDALGrid(prob, fgeodict), 'label': 'Probability', 'type': 'output', 'description': {'name': modelsref, 'longref': modellref, 'units': 'coverage', 'shakemap': shakedetail, 'parameters': {'modeltype': modeltype} } } if saveinputs is True: maplayers['slhrf'] = {'grid': GDALGrid(SLHRF, fgeodict), 'label': 'SLHRF', 'type': 'input', 'description': {'units': 'none'}} maplayers['pga'] = {'grid': GDALGrid(PGA, fgeodict), 'label': 'PGA (g)', 'type': 'input', 'description': {'units': 'g', 'shakemap': shakedetail}} maplayers['vs30'] = {'grid': GDALGrid(vs30, fgeodict), 'label': 'Vs30 (m/s)', 'type': 'input', 'description': {'units': 'm/s'}} maplayers['dw'] = {'grid': GDALGrid(dw, fgeodict), 'label': 'dw (km)', 'type': 'input', 'description': {'units': 'km'}} maplayers['elev'] = {'grid': GDALGrid(elev, fgeodict), 'label': 'elev (m)', 'type': 'input', 'description': {'units': 'm'}} maplayers['FPGA'] = {'grid': GDALGrid(Fpga, fgeodict), 'label': 'Fpga', 'type': 'input', 'description': {'units': 'none'}} maplayers['FDW'] = {'grid': GDALGrid(Fdw, fgeodict), 'label': 'Fdw', 'type': 'input', 'description': {'units': 'none'}} maplayers['FGEO'] = {'grid': GDALGrid(Fgeo, fgeodict), 'label': 'Fgeo', 'type': 'input', 'description': {'units': 'none'}} maplayers['FZ'] = {'grid': GDALGrid(Fz, fgeodict), 'label': 'Fz', 'type': 'input', 'description': {'units': 'none'}} maplayers['FNEHRP'] = {'grid': GDALGrid(Fnehrp, fgeodict), 'label': 'Fnehrp', 'type': 'input', 'description': {'units': 'none'}} return maplayers