def sampleGridFile(gridfile, xypoints, method='nearest'): """Sample grid file (ESRI or GMT format) at each of a set of XY (decimal degrees) points. :param gridfile: Name of ESRI or GMT grid format file from which to sample values. :param xypoints: 2D numpy array of XY points, decimal degrees. :param method: Interpolation method, either 'nearest' or 'linear'. :returns: 1D numpy array of grid values at each of input XY points. """ xmin = np.min(xypoints[:, 0]) xmax = np.max(xypoints[:, 0]) ymin = np.min(xypoints[:, 1]) ymax = np.max(xypoints[:, 1]) gridtype = None try: fdict = GMTGrid.getFileGeoDict(gridfile) gridtype = 'gmt' except Exception as error: try: fdict = GDALGrid.getFileGeoDict(gridfile) gridtype = 'esri' except: pass if gridtype is None: raise Exception( 'File "%s" does not appear to be either a GMT grid or an ESRI grid.' % gridfile) xmin = xmin - fdict.dx * 3 xmax = xmax + fdict.dx * 3 ymin = ymin - fdict.dy * 3 ymax = ymax + fdict.dy * 3 #bounds = (xmin, xmax, ymin, ymax) if gridtype == 'gmt': fgeodict = GMTGrid.getFileGeoDict(gridfile) else: fgeodict = GDALGrid.getFileGeoDict(gridfile) dx, dy = (fgeodict.dx, fgeodict.dy) sdict = GeoDict.createDictFromBox(xmin, xmax, ymin, ymax, dx, dy) if gridtype == 'gmt': grid = GMTGrid.load(gridfile, samplegeodict=sdict, resample=False, method=method, doPadding=True) else: grid = GDALGrid.load(gridfile, samplegeodict=sdict, resample=False, method=method, doPadding=True) return sampleFromGrid(grid, xypoints)
def getFileType(filename): """ Determine whether input file is a shapefile or a grid (ESRI or GMT). :param filename: String path to candidate filename. :returns: String, one of 'shapefile','grid','unknown'. """ fname, fext = os.path.splitext(filename) dbf = fname + '.dbf' ftype = 'unknown' if os.path.isfile(dbf): ftype = 'shapefile' else: try: fdict = GMTGrid.getFileGeoDict(filename) ftype = 'grid' except Exception as error: try: fdict = GDALGrid.getFileGeoDict(filename) ftype = 'grid' except: pass return ftype
def sampleGridFile(gridfile,xypoints,method='nearest'): """ Sample grid file (ESRI or GMT format) at each of a set of XY (decimal degrees) points. :param gridfile: Name of ESRI or GMT grid format file from which to sample values. :param xypoints: 2D numpy array of XY points, decimal degrees. :param method: Interpolation method, either 'nearest' or 'linear'. :returns: 1D numpy array of grid values at each of input XY points. """ xmin = np.min(xypoints[:,0]) xmax = np.max(xypoints[:,0]) ymin = np.min(xypoints[:,1]) ymax = np.max(xypoints[:,1]) gridtype = None try: fdict = GMTGrid.getFileGeoDict(gridfile) gridtype = 'gmt' except Exception,error: try: fdict = GDALGrid.getFileGeoDict(gridfile) gridtype = 'esri' except: pass
def sampleGridFile(gridfile,xypoints,method='nearest'): """ Sample grid file (ESRI or GMT format) at each of a set of XY (decimal degrees) points. :param gridfile: Name of ESRI or GMT grid format file from which to sample values. :param xypoints: 2D numpy array of XY points, decimal degrees. :param method: Interpolation method, either 'nearest' or 'linear'. :returns: 1D numpy array of grid values at each of input XY points. """ if not len(xypoints): return np.array([]) xmin = np.min(xypoints[:,0]) xmax = np.max(xypoints[:,0]) ymin = np.min(xypoints[:,1]) ymax = np.max(xypoints[:,1]) gridtype = None try: fdict,tmp = GMTGrid.getFileGeoDict(gridfile) gridtype = 'gmt' except Exception as error: try: fdict,tmp = GDALGrid.getFileGeoDict(gridfile) gridtype = 'esri' except: pass if gridtype is None: raise Exception('File "%s" does not appear to be either a GMT grid or an ESRI grid.' % gridfile) xmin = xmin - fdict.dx*3 xmax = xmax + fdict.dx*3 ymin = ymin - fdict.dy*3 ymax = ymax + fdict.dy*3 bounds = (xmin,xmax,ymin,ymax) if gridtype == 'gmt': fgeodict,tmp = GMTGrid.getFileGeoDict(gridfile) else: fgeodict,tmp = GDALGrid.getFileGeoDict(gridfile) dx,dy = (fgeodict.dx,fgeodict.dy) sdict = GeoDict.createDictFromBox(xmin,xmax,ymin,ymax,dx,dy) if gridtype == 'gmt': grid = GMTGrid.load(gridfile,samplegeodict=sdict,resample=True,method=method,doPadding=True) else: grid = GDALGrid.load(gridfile,samplegeodict=sdict,resample=True,method=method,doPadding=True) return sampleFromGrid(grid,xypoints)
def _getFileGeoDict(fname): geodict = None try: geodict = GMTGrid.getFileGeoDict(fname) except Exception as msg1: try: geodict = GDALGrid.getFileGeoDict(fname) except Exception as msg2: msg = 'File geodict failure with %s - error messages: "%s"\n "%s"' % (fname,str(msg1),str(msg2)) raise ShakeMapException(msg) return geodict
def getGridType(gridfile): gridtype = None try: fdict = GMTGrid.getFileGeoDict(gridfile) gridtype = 'gmt' except Exception as error: try: fdict = GDALGrid.getFileGeoDict(gridfile) gridtype = 'esri' except: pass return gridtype
def _getFileGeoDict(fname): geodict = None try: geodict = GMTGrid.getFileGeoDict(fname) except Exception as msg1: try: geodict = GDALGrid.getFileGeoDict(fname) except Exception as msg2: msg = 'File geodict failure with %s - error messages: "%s"\n "%s"' % ( fname, str(msg1), str(msg2)) raise ShakeMapException(msg) return geodict
def contains(self, lat, lon): """Check to see if input coordinates are contained inside Slab model. Args: lat (float): Hypocentral latitude in decimal degrees. lon (float): Hypocentral longitude in decimal degrees. Returns: bool: True if point falls inside minimum bounding box of slab model. """ gdict, tmp = GMTGrid.getFileGeoDict(self._depth_file) gxmin = gdict.xmin gxmax = gdict.xmax if lon < 0: if gxmin > gxmax: gxmin -= 360 else: if gxmin > gxmax: gxmax += 360 if lat >= gdict.ymin and lat <= gdict.ymax and lon >= gxmin and lon <= gxmax: return True return False
def getFileGeoDict(filename,gridtype): if gridtype == 'gmt': fgeodict,tmp = GMTGrid.getFileGeoDict(filename) else: fgeodict,tmp = GDALGrid.getFileGeoDict(filename) return fgeodict
def run_one_old_shakemap(eventid, topo=True, genex=True): """ Convenience method for running old (v 3.5) shakemap with new estimates. This allows for us to generate all the products with the old code since the new code cannot do this yet, but use the new code for computing the ground motions. Args: eventid (srt): Specifies the id of the event to process. topo (bool): Include topography shading? genex (bool): Should genex be run? Returns: dictionary: Each entry is the log file for the different ShakeMap3.5 calls. """ config = ConfigObj(os.path.join(os.path.expanduser('~'), 'scenarios.conf')) shakehome = config['system']['shakehome'] log = {} shakebin = os.path.join(shakehome, 'bin') datadir = os.path.join(shakehome, 'data') # Read event.xml eventdir = os.path.join(datadir, eventid) inputdir = os.path.join(eventdir, 'input') xml_file = os.path.join(inputdir, 'event.xml') # Read in event.xml event = read_event_file(xml_file) # Read in gmpe set name gmpefile = open(os.path.join(inputdir, "gmpe_set_name.txt"), "r") set_name = gmpefile.read() gmpefile.close() # Add scenario-specific fields: eventtree = ET.parse(xml_file) eventroot = eventtree.getroot() for eq in eventroot.iter('earthquake'): description = eq.attrib['description'] directivity = eq.attrib['directivity'] if 'reference' in eq.attrib.keys(): reference = eq.attrib['reference'] else: reference = '' event['description'] = description event['directivity'] = directivity event['reference'] = reference grd = os.path.join(inputdir, 'pgv_estimates.grd') gdict = GMTGrid.getFileGeoDict(grd)[0] # Tolerance is a bit hacky but necessary to prevent GMT # from barfing becasue it thinks that the estimates files # do not cover the desired area sampled by grind's call # with grdsample. tol = gdict.dx W = gdict.xmin + tol E = gdict.xmax - tol S = gdict.ymin + tol N = gdict.ymax - tol # Put into grind.conf (W S E N) confdir = os.path.join(eventdir, 'config') if os.path.isdir(confdir) == False: os.mkdir(confdir) # need to copy default grind.conf default_grind_conf = os.path.join(shakehome, 'config', 'grind.conf') grind_conf = os.path.join(confdir, 'grind.conf') shutil.copyfile(default_grind_conf, grind_conf) # Set strictbound and resolution to match estiamtes.grd files with open(grind_conf, 'a') as f: f.write('x_grid_interval : %.16f\n' % gdict.dx) f.write('y_grid_interval : %.16f\n' % gdict.dy) f.write('strictbound : %.9f %.9f %.9f %.9f\n' % (W, S, E, N)) # Grind callgrind = os.path.join(shakebin, 'grind') + \ ' -event ' + eventid + ' -psa' rc, so, se = get_command_output(callgrind) log['grind'] = {'rc': rc, 'so': so, 'se': se} # Add GMPE set name to info.json cmd = os.path.join(shakebin, 'edit_info') + ' -event ' + eventid + \ ' -tag gmpe_reference' + ' -value ' + set_name rc, so, se = get_command_output(cmd) log['edit_info'] = {'rc': rc, 'so': so, 'se': se} # Tag calltag = os.path.join(shakebin, 'tag') + \ ' -event ' + eventid + ' -name \"' + event['locstring'] + ' - ' + \ event['description'] + '\"' rc, so, se = get_command_output(calltag) log['tag'] = {'rc': rc, 'so': so, 'se': se} # Copy rock_grid.xml from input to output directory rg_scr = os.path.join(inputdir, 'rock_grid.xml') rg_dst = os.path.join(eventdir, 'output', 'rock_grid.xml') cmd = shutil.copy(rg_scr, rg_dst) # Mapping if topo is True: topostr = '-itopo' else: topostr = '' callmapping = os.path.join(shakebin, 'mapping') + ' -event ' + \ eventid + ' -timestamp -nohinges ' + topostr rc, so, se = get_command_output(callmapping) log['mapping'] = {'rc': rc, 'so': so, 'se': se} # Genex if genex is True: callgenex = os.path.join(shakebin, 'genex') + ' -event ' + \ eventid + ' -metadata -zip -verbose -shape shape -shape hazus' rc, so, se = get_command_output(callgenex) log['genex'] = {'rc': rc, 'so': so, 'se': se} return log
def drawContourMap(self,outfolder,cmin=None,cmax=None): if self.contour_colormap is None: raise ShakeMapException('MapMaker.setGMTColormap() has not been called.') t0 = time.time() #resample shakemap to topogrid #get the geodict for the topo file topodict = GMTGrid.getFileGeoDict(self.topofile) #get the geodict for the ShakeMap smdict = self.shakemap.getGeoDict() #get a geodict that is aligned with topo, but inside shakemap sampledict = topodict.getBoundsWithin(smdict) self.shakemap = self.shakemap.interpolateToGrid(sampledict) gd = self.shakemap.getGeoDict() #establish the basemap object m = self._setMap(gd) #get topo layer and project it topogrid = GMTGrid.load(self.topofile,samplegeodict=sampledict,resample=False) topodata = topogrid.getData().copy() ptopo = self._projectGrid(topodata,m,gd) #get contour layer and project it1 imtdata = self.shakemap.getLayer(self.contour_layer).getData().copy() pimt = self._projectGrid(imtdata,m,gd) #get the draped intensity data hillshade = self._getShaded(ptopo) #draw the draped intensity data m.imshow(hillshade, interpolation='none',zorder=IMG_ZORDER); #draw the contours of imt data xmin = gd.xmin if gd.xmax < gd.xmin: xmin -= 360 lons = np.linspace(xmin, gd.xmax, gd.nx) lats = np.linspace(gd.ymax, gd.ymin, gd.ny) # backwards so it plots right side up x, y = m(*np.meshgrid(lons,lats)) pimt = gaussian_filter(pimt,5.0) dmin = pimt.min() dmax = pimt.max() levels = self.getContourLevels(dmin,dmax,self.contour_layer) cs = m.contour(x,y,np.flipud(pimt),colors='w',cmap=None,levels=levels,zorder=CONTOUR_ZORDER) clabels = plt.clabel(cs,colors='k',fmt='%.1f',fontsize=8.0,zorder=CONTOUR_ZORDER) for cl in clabels: bbox = dict(boxstyle="round",facecolor='white',edgecolor='w') cl.set_bbox(bbox) cl.set_zorder(CONTOUR_ZORDER) #draw country/state boundaries self._drawBoundaries(m) #draw lakes self._drawLakes(m,gd) #draw oceans (pre-processed with islands taken out) t1 = time.time() self._drawOceans(m,gd) t2 = time.time() print('%.1f seconds to render oceans.' % (t2-t1)) #draw coastlines self._drawCoastlines(m,gd) #draw meridians, parallels, labels, ticks self._drawGraticules(m,gd) #draw filled symbols for MMI and instrumented measures self._drawStations(m,fill=True,imt=self.contour_layer) #draw map scale scalex = gd.xmin + (gd.xmax-gd.xmin)/5.0 scaley = gd.ymin + (gd.ymax-gd.ymin)/10.0 yoff = (0.007*(m.ymax-m.ymin)) clon = (gd.xmin + gd.xmax)/2.0 clat = (gd.ymin + gd.ymax)/2.0 m.drawmapscale(scalex,scaley,clon,clat,length=100,barstyle='fancy',yoffset=yoff,zorder=SCALE_ZORDER) #draw fault polygon, if present self._drawFault(m) #get the fault loaded #draw epicenter hlon = self.shakemap.getEventDict()['lon'] hlat = self.shakemap.getEventDict()['lat'] m.plot(hlon,hlat,'k*',latlon=True,fillstyle='none',markersize=22,mew=1.2,zorder=EPICENTER_ZORDER); #draw cities #reduce the number of cities to those whose labels don't collide #set up cities if self.city_cols is not None: self.cities = self.cities.limitByBounds((gd.xmin,gd.xmax,gd.ymin,gd.ymax)) self.cities = self.cities.limitByGrid(nx=self.city_cols,ny=self.city_rows, cities_per_grid=self.cities_per_grid) self.cities = self.cities.limitByMapCollision(m) self.cities.renderToMap(m.ax,zorder=CITIES_ZORDER) #draw title and supertitle eventid = self._drawTitle(isContour=True) #draw whatever road data is available #self._drawRoads(m) #save plot to file plt.draw() outfile = os.path.join(outfolder,'contour_%s_%s.pdf' % (self.contour_layer,eventid)) plt.savefig(outfile) tn = time.time() print('%.1f seconds to render entire map.' % (tn-t0)) return outfile
def calculate(self): """Calculate the model :returns: a dictionary containing the model results and model inputs if saveinputs was set to True when class was set up, see <https://github.com/usgs/groundfailure#api-for-model-output> for a description of the structure of this output """ X = eval(self.equation) P = 1 / (1 + np.exp(-X)) if 'vs30max' in self.config[self.model].keys(): vs30 = self.layerdict['vs30'].getData() P[vs30 > float(self.config[self.model]['vs30max'])] = 0.0 if 'minpgv' in self.config[self.model].keys(): pgv = self.shakemap.getLayer('pgv').getData() P[pgv < float(self.config[self.model]['minpgv'])] = 0.0 if 'coverage' in self.config[self.model].keys(): eqn = self.config[self.model]['coverage']['eqn'] ind = copy.copy(P) P = eval(eqn) if self.uncert is not None: print(self.numstd) print(type(self.numstd)) Xmin = eval(self.equationmin) Xmax = eval(self.equationmax) Pmin = 1 / (1 + np.exp(-Xmin)) Pmax = 1 / (1 + np.exp(-Xmax)) if 'vs30max' in self.config[self.model].keys(): vs30 = self.layerdict['vs30'].getData() Pmin[vs30 > float(self.config[self.model]['vs30max'])] = 0.0 Pmax[vs30 > float(self.config[self.model]['vs30max'])] = 0.0 if 'minpgv' in self.config[self.model].keys(): pgv = self.shakemap.getLayer('pgv').getData() Pmin[pgv < float(self.config[self.model]['minpgv'])] = 0.0 Pmax[pgv < float(self.config[self.model]['minpgv'])] = 0.0 if 'coverage' in self.config[self.model].keys(): eqnmin = eqn.replace('P', 'Pmin') eqnmax = eqn.replace('P', 'Pmax') Pmin = eval(eqnmin) Pmax = eval(eqnmax) if self.slopefile is not None: ftype = getFileType(self.slopefile) sampledict = self.shakemap.getGeoDict() if ftype == 'gmt': if GMTGrid.getFileGeoDict(self.slopefile)[0] == sampledict: slope = GMTGrid.load( self.slopefile).getData() / self.slopediv else: slope = GMTGrid.load( self.slopefile, sampledict, resample=True, method='linear', doPadding=True).getData() / self.slopediv # Apply slope min/max limits print('applying slope thresholds') P[slope > self.slopemax] = 0. P[slope < self.slopemin] = 0. if self.uncert is not None: Pmin[slope > self.slopemax] = 0. Pmin[slope < self.slopemin] = 0. Pmax[slope > self.slopemax] = 0. Pmax[slope < self.slopemin] = 0. elif ftype == 'esri': if GDALGrid.getFileGeoDict(self.slopefile)[0] == sampledict: slope = GDALGrid.load( self.slopefile).getData() / self.slopediv else: slope = GDALGrid.load( self.slopefile, sampledict, resample=True, method='linear', doPadding=True).getData() / self.slopediv # Apply slope min/max limits print('applying slope thresholds') P[slope > self.slopemax] = 0. P[slope < self.slopemin] = 0. if self.uncert is not None: Pmin[slope > self.slopemax] = 0. Pmin[slope < self.slopemin] = 0. Pmax[slope > self.slopemax] = 0. Pmax[slope < self.slopemin] = 0. else: print( 'Slope file does not appear to be a valid GMT or ESRI file, not applying any slope thresholds.' % (self.slopefile)) else: print('No slope file provided, slope thresholds not applied') # Stuff into Grid2D object temp = self.shakemap.getShakeDict() shakedetail = '%s_ver%s' % (temp['shakemap_id'], temp['shakemap_version']) description = { 'name': self.modelrefs['shortref'], 'longref': self.modelrefs['longref'], 'units': 'probability', 'shakemap': shakedetail, 'parameters': { 'slopemin': self.slopemin, 'slopemax': self.slopemax } } Pgrid = Grid2D(P, self.geodict) rdict = collections.OrderedDict() rdict['model'] = { 'grid': Pgrid, 'label': ('%s Probability') % (self.modeltype.capitalize()), 'type': 'output', 'description': description } if self.uncert is not None: rdict['modelmin'] = { 'grid': Grid2D(Pmin, self.geodict), 'label': ('%s Probability (-%0.1f std ground motion)') % (self.modeltype.capitalize(), self.numstd), 'type': 'output', 'description': description } rdict['modelmax'] = { 'grid': Grid2D(Pmax, self.geodict), 'label': ('%s Probability (+%0.1f std ground motion)') % (self.modeltype.capitalize(), self.numstd), 'type': 'output', 'description': description } if self.saveinputs is True: for layername, layergrid in list(self.layerdict.items()): units = self.units[layername] if units is None: units = '' rdict[layername] = { 'grid': layergrid, 'label': '%s (%s)' % (layername, units), 'type': 'input', 'description': { 'units': units, 'shakemap': shakedetail } } for gmused in self.gmused: if 'pga' in gmused: units = '%g' getkey = 'pga' elif 'pgv' in gmused: units = 'cm/s' getkey = 'pgv' elif 'mmi' in gmused: units = 'intensity' getkey = 'mmi' else: continue # Layer is derived from several input layers, skip outputting this layer if getkey in rdict: continue layer = self.shakemap.getLayer(getkey) rdict[getkey] = { 'grid': layer, 'label': '%s (%s)' % (getkey.upper(), units), 'type': 'input', 'description': { 'units': units, 'shakemap': shakedetail } } if self.uncert is not None: layer1 = np.exp( np.log(layer.getData()) - self.uncert.getLayer('std' + getkey).getData()) rdict[getkey + 'modelmin'] = { 'grid': Grid2D(layer1, self.geodict), 'label': '%s - %0.1f std (%s)' % (getkey.upper(), self.numstd, units), 'type': 'input', 'description': { 'units': units, 'shakemap': shakedetail } } layer2 = np.exp( np.log(layer.getData()) + self.uncert.getLayer('std' + getkey).getData()) rdict[getkey + 'modelmax'] = { 'grid': Grid2D(layer2, self.geodict), 'label': '%s + %0.1f std (%s)' % (getkey.upper(), self.numstd, units), 'type': 'input', 'description': { 'units': units, 'shakemap': shakedetail } } return rdict
def __init__(self, shakefile, config, uncertfile=None, saveinputs=False, slopefile=None, slopediv=1., bounds=None, numstd=1): """Set up the logistic model # ADD BOUNDS TO THIS MODEL :param config: configobj (config .ini file read in using configobj) defining the model and its inputs. Only one model should be described in each config file. :type config: dictionary :param shakefile: Full file path to shakemap.xml file for the event of interest :type shakefile: string :param uncertfile: Full file path to xml file of shakemap uncertainties :type uncertfile: string :param saveinputs: if True, saves all the input layers as Grid2D objects in addition to the model if false, it will just output the model :type saveinputs: boolean :param slopefile: optional file path to slopefile that will be resampled to the other input files for applying thresholds OVERWRITES VALUE IN CONFIG :type slopefile: string :param slopediv: number to divide slope by to get to degrees (usually will be default of 1.) :type slopediv: float :param numstd: number of +/- standard deviations to use if uncertainty is computed (uncertfile is not None) """ mnames = getLogisticModelNames(config) if len(mnames) == 0: raise Exception( 'No config file found or problem with config file format') if len(mnames) > 1: raise Exception( 'Config file contains more than one model which is no longer allowed,\ update your config file to the newer format') self.model = mnames[0] self.config = config cmodel = config[self.model] self.modeltype = cmodel['gfetype'] self.coeffs = validateCoefficients(cmodel) self.layers = validateLayers( cmodel) # key = layer name, value = file name self.terms, timeField = validateTerms(cmodel, self.coeffs, self.layers) self.interpolations = validateInterpolations(cmodel, self.layers) self.units = validateUnits(cmodel, self.layers) self.gmused = [ value for term, value in cmodel['terms'].items() if 'pga' in value.lower() or 'pgv' in value.lower() or 'mmi' in value.lower() ] self.modelrefs, self.longrefs, self.shortrefs = validateRefs(cmodel) self.numstd = numstd if cmodel['baselayer'] not in list(self.layers.keys()): raise Exception( 'You must specify a base layer corresponding to one of the files in the layer section.' ) self.saveinputs = saveinputs if slopefile is None: try: self.slopefile = cmodel['slopefile'] except: print( 'Could not find slopefile term in config, no slope thresholds will be applied\n' ) self.slopefile = None else: self.slopefile = slopefile self.slopediv = slopediv #get the geodict for the shakemap geodict = ShakeGrid.getFileGeoDict(shakefile, adjust='res') griddict, eventdict, specdict, fields, uncertainties = getHeaderData( shakefile) #YEAR = eventdict['event_timestamp'].year MONTH = MONTHS[(eventdict['event_timestamp'].month) - 1] #DAY = eventdict['event_timestamp'].day #HOUR = eventdict['event_timestamp'].hour #now find the layer that is our base layer and get the largest bounds we can guarantee not to exceed shakemap bounds basefile = self.layers[cmodel['baselayer']] ftype = getFileType(basefile) if ftype == 'esri': basegeodict, firstcol = GDALGrid.getFileGeoDict(basefile) sampledict = basegeodict.getBoundsWithin(geodict) elif ftype == 'gmt': basegeodict, firstcol = GMTGrid.getFileGeoDict(basefile) sampledict = basegeodict.getBoundsWithin(geodict) else: raise Exception( 'All predictor variable grids must be a valid GMT or ESRI file type' ) #now load the shakemap, resampling and padding if necessary if ShakeGrid.getFileGeoDict(shakefile, adjust='res') == sampledict: self.shakemap = ShakeGrid.load(shakefile, adjust='res') flag = 1 else: self.shakemap = ShakeGrid.load(shakefile, samplegeodict=sampledict, resample=True, doPadding=True, adjust='res') flag = 0 # take uncertainties into account if uncertfile is not None: try: if flag == 1: self.uncert = ShakeGrid.load(uncertfile, adjust='res') else: self.uncert = ShakeGrid.load(uncertfile, samplegeodict=sampledict, resample=True, doPadding=True, adjust='res') except: print( 'Could not read uncertainty file, ignoring uncertainties') self.uncert = None else: self.uncert = None #load the predictor layers into a dictionary self.layerdict = {} # key = layer name, value = grid object for layername, layerfile in self.layers.items(): if isinstance(layerfile, list): for lfile in layerfile: if timeField == 'MONTH': if lfile.find(MONTH) > -1: layerfile = lfile ftype = getFileType(layerfile) interp = self.interpolations[layername] if ftype == 'gmt': if GMTGrid.getFileGeoDict( layerfile)[0] == sampledict: lyr = GMTGrid.load(layerfile) else: lyr = GMTGrid.load(layerfile, sampledict, resample=True, method=interp, doPadding=True) elif ftype == 'esri': if GDALGrid.getFileGeoDict( layerfile)[0] == sampledict: lyr = GDALGrid.load(layerfile) else: lyr = GDALGrid.load(layerfile, sampledict, resample=True, method=interp, doPadding=True) else: msg = 'Layer %s (file %s) does not appear to be a valid GMT or ESRI file.' % ( layername, layerfile) raise Exception(msg) self.layerdict[layername] = lyr else: #first, figure out what kind of file we have (or is it a directory?) ftype = getFileType(layerfile) interp = self.interpolations[layername] if ftype == 'gmt': if GMTGrid.getFileGeoDict(layerfile)[0] == sampledict: lyr = GMTGrid.load(layerfile) else: lyr = GMTGrid.load(layerfile, sampledict, resample=True, method=interp, doPadding=True) elif ftype == 'esri': if GDALGrid.getFileGeoDict(layerfile)[0] == sampledict: lyr = GDALGrid.load(layerfile) else: lyr = GDALGrid.load(layerfile, sampledict, resample=True, method=interp, doPadding=True) else: msg = 'Layer %s (file %s) does not appear to be a valid GMT or ESRI file.' % ( layername, layerfile) raise Exception(msg) self.layerdict[layername] = lyr shapes = {} for layername, layer in self.layerdict.items(): shapes[layername] = layer.getData().shape self.nuggets = [str(self.coeffs['b0'])] ckeys = list(self.terms.keys()) ckeys.sort() for key in ckeys: term = self.terms[key] coeff = self.coeffs[key] self.nuggets.append('(%g * %s)' % (coeff, term)) self.equation = ' + '.join(self.nuggets) if self.uncert is not None: self.nugmin = copy.copy(self.nuggets) self.nugmax = copy.copy(self.nuggets) # Find the term with the shakemap input and replace for these nuggets for k, nug in enumerate(self.nuggets): if "self.shakemap.getLayer('pga').getData()" in nug: self.nugmin[k] = self.nugmin[k].replace( "self.shakemap.getLayer('pga').getData()", "(np.exp(np.log(self.shakemap.getLayer('pga').getData())\ - self.numstd * self.uncert.getLayer('stdpga').getData()))" ) self.nugmax[k] = self.nugmax[k].replace( "self.shakemap.getLayer('pga').getData()", "(np.exp(np.log(self.shakemap.getLayer('pga').getData())\ + self.numstd * self.uncert.getLayer('stdpga').getData()))" ) elif "self.shakemap.getLayer('pgv').getData()" in nug: self.nugmin[k] = self.nugmin[k].replace( "self.shakemap.getLayer('pgv').getData()", "(np.exp(np.log(self.shakemap.getLayer('pgv').getData())\ - self.numstd * self.uncert.getLayer('stdpgv').getData()))" ) self.nugmax[k] = self.nugmax[k].replace( "self.shakemap.getLayer('pgv').getData()", "(np.exp(np.log(self.shakemap.getLayer('pgv').getData())\ + self.numstd * self.uncert.getLayer('stdpgv').getData()))" ) elif "self.shakemap.getLayer('mmi').getData()" in nug: self.nugmin[k] = self.nugmin[k].replace( "self.shakemap.getLayer('mmi').getData()", "(np.exp(np.log(self.shakemap.getLayer('mmi').getData())\ - self.numstd * self.uncert.getLayer('stdmmi').getData()))" ) self.nugmax[k] = self.nugmax[k].replace( "self.shakemap.getLayer('mmi').getData()", "(np.exp(np.log(self.shakemap.getLayer('mmi').getData())\ + self.numstd * self.uncert.getLayer('stdmmi').getData()))" ) self.equationmin = ' + '.join(self.nugmin) self.equationmax = ' + '.join(self.nugmax) else: self.equationmin = None self.equationmax = None self.geodict = self.shakemap.getGeoDict() try: self.slopemin = float(config[self.model]['slopemin']) self.slopemax = float(config[self.model]['slopemax']) except: print( 'could not find slopemin and/or slopemax in config, no limits will be applied' ) self.slopemin = 0. self.slopemax = 90.
def __init__(self, config, shakefile, model, uncertfile=None): """Set up the logistic model :param config: configobj (config .ini file read in using configobj) defining the model and its inputs :type config: dictionary :param shakefile: Full file path to shakemap.xml file for the event of interest :type shakefile: string :param model: Name of model defined in config that should be run for the event of interest :type model: string :param uncertfile: :type uncertfile: """ if model not in getLogisticModelNames(config): raise Exception('Could not find a model called "%s" in config %s.' % (model, config)) #do everything here short of calculations - parse config, assemble eqn strings, load data. self.model = model cmodel = config['logistic_models'][model] self.modeltype = cmodel['gfetype'] self.coeffs = validateCoefficients(cmodel) self.layers = validateLayers(cmodel) # key = layer name, value = file name self.terms, timeField = validateTerms(cmodel, self.coeffs, self.layers) self.interpolations = validateInterpolations(cmodel, self.layers) self.units = validateUnits(cmodel, self.layers) self.gmused = [value for term, value in cmodel['terms'].items() if 'pga' in value.lower() or 'pgv' in value.lower() or 'mmi' in value.lower()] self.modelrefs, self.longrefs, self.shortrefs = validateRefs(cmodel) if 'baselayer' not in cmodel: raise Exception('You must specify a base layer file in config.') if cmodel['baselayer'] not in list(self.layers.keys()): raise Exception('You must specify a base layer corresponding to one of the files in the layer section.') #get the geodict for the shakemap geodict = ShakeGrid.getFileGeoDict(shakefile, adjust='res') griddict, eventdict, specdict, fields, uncertainties = getHeaderData(shakefile) #YEAR = eventdict['event_timestamp'].year MONTH = MONTHS[(eventdict['event_timestamp'].month)-1] #DAY = eventdict['event_timestamp'].day #HOUR = eventdict['event_timestamp'].hour #now find the layer that is our base layer and get the largest bounds we can guarantee not to exceed shakemap bounds basefile = self.layers[cmodel['baselayer']] ftype = getFileType(basefile) if ftype == 'esri': basegeodict, firstcol = GDALGrid.getFileGeoDict(basefile) sampledict = basegeodict.getBoundsWithin(geodict) elif ftype == 'gmt': basegeodict, firstcol = GMTGrid.getFileGeoDict(basefile) sampledict = basegeodict.getBoundsWithin(geodict) else: raise Exception('All predictor variable grids must be a valid GMT or ESRI file type') #now load the shakemap, resampling and padding if necessary self.shakemap = ShakeGrid.load(shakefile, samplegeodict=sampledict, resample=True, doPadding=True, adjust='res') # take uncertainties into account if uncertfile is not None: try: self.uncert = ShakeGrid.load(uncertfile, samplegeodict=sampledict, resample=True, doPadding=True, adjust='res') except: print('Could not read uncertainty file, ignoring uncertainties') self.uncert = None else: self.uncert = None #load the predictor layers into a dictionary self.layerdict = {} # key = layer name, value = grid object for layername, layerfile in self.layers.items(): if isinstance(layerfile, list): for lfile in layerfile: if timeField == 'MONTH': if lfile.find(MONTH) > -1: layerfile = lfile ftype = getFileType(layerfile) interp = self.interpolations[layername] if ftype == 'gmt': lyr = GMTGrid.load(layerfile, sampledict, resample=True, method=interp, doPadding=True) elif ftype == 'esri': lyr = GDALGrid.load(layerfile, sampledict, resample=True, method=interp, doPadding=True) else: msg = 'Layer %s (file %s) does not appear to be a valid GMT or ESRI file.' % (layername, layerfile) raise Exception(msg) self.layerdict[layername] = lyr else: #first, figure out what kind of file we have (or is it a directory?) ftype = getFileType(layerfile) interp = self.interpolations[layername] if ftype == 'gmt': lyr = GMTGrid.load(layerfile, sampledict, resample=True, method=interp, doPadding=True) elif ftype == 'esri': lyr = GDALGrid.load(layerfile, sampledict, resample=True, method=interp, doPadding=True) else: msg = 'Layer %s (file %s) does not appear to be a valid GMT or ESRI file.' % (layername, layerfile) raise Exception(msg) self.layerdict[layername] = lyr shapes = {} for layername, layer in self.layerdict.items(): shapes[layername] = layer.getData().shape self.nuggets = [str(self.coeffs['b0'])] ckeys = list(self.terms.keys()) ckeys.sort() for key in ckeys: term = self.terms[key] coeff = self.coeffs[key] self.nuggets.append('(%g * %s)' % (coeff, term)) self.equation = ' + '.join(self.nuggets) if self.uncert is not None: self.nugmin = copy.copy(self.nuggets) self.nugmax = copy.copy(self.nuggets) # Find the term with the shakemap input and replace for these nuggets for k, nug in enumerate(self.nuggets): if "self.shakemap.getLayer('pga').getData()" in nug: self.nugmin[k] = self.nugmin[k].replace("self.shakemap.getLayer('pga').getData()", "(np.exp(np.log(self.shakemap.getLayer('pga').getData()) - self.uncert.getLayer('stdpga').getData()))") self.nugmax[k] = self.nugmax[k].replace("self.shakemap.getLayer('pga').getData()", "(np.exp(np.log(self.shakemap.getLayer('pga').getData()) + self.uncert.getLayer('stdpga').getData()))") elif "self.layerdict['pgv'].getData()" in nug: self.nugmin[k] = self.nugmin[k].replace("self.shakemap.getLayer('pgv').getData()", "(np.exp(np.log(self.shakemap.getLayer('pgv').getData()) - self.uncert.getLayer('stdpgv').getData()))") self.nugmax[k] = self.nugmax[k].replace("self.shakemap.getLayer('pgv').getData()", "(np.exp(np.log(self.shakemap.getLayer('pgv').getData()) + self.uncert.getLayer('stdpgv').getData()))") elif "self.layerdict['mmi'].getData()" in nug: self.nugmin[k] = self.nugmin[k].replace("self.shakemap.getLayer('mmi').getData()", "(np.exp(np.log(self.shakemap.getLayer('mmi').getData()) - self.uncert.getLayer('stdmmi').getData()))") self.nugmax[k] = self.nugmax[k].replace("self.shakemap.getLayer('mmi').getData()", "(np.exp(np.log(self.shakemap.getLayer('mmi').getData()) + self.uncert.getLayer('stdmmi').getData()))") self.equationmin = ' + '.join(self.nugmin) self.equationmax = ' + '.join(self.nugmax) else: self.equationmin = None self.equationmax = None self.geodict = self.shakemap.getGeoDict() try: self.slopemin = float(config['logistic_models'][model]['slopemin']) self.slopemax = float(config['logistic_models'][model]['slopemax']) except: print('could not find slopemin and/or slopemax in config, no limits will be applied') self.slopemin = 0. self.slopemax = 90.
def drawContourMap(self, imt, outfolder, cmin=None, cmax=None): """ Render IMT data as contours over topography, with oceans, coastlines, etc. Args: outfolder (str): Path to directory where output map should be saved. Returns: str: Path to output IMT map. """ if self.contour_colormap is None: raise Exception('MapMaker.setGMTColormap() has not been called.') t0 = time.time() # resample shakemap to topogrid # get the geodict for the topo file topodict = GMTGrid.getFileGeoDict(self.topofile)[0] # get the geodict for the ShakeMap comp = self.container.getComponents(imt)[0] imtdict = self.container.getIMTGrids(imt, comp) imtgrid = imtdict['mean'] smdict = imtgrid.getGeoDict() # get a geodict that is aligned with topo, but inside shakemap sampledict = topodict.getBoundsWithin(smdict) imtgrid = imtgrid.interpolateToGrid(sampledict) gd = imtgrid.getGeoDict() # establish the basemap object m = self._setMap(gd) # get topo layer and project it topogrid = GMTGrid.load( self.topofile, samplegeodict=sampledict, resample=False) topodata = topogrid.getData().copy() ptopo = self._projectGrid(topodata, m, gd) # get contour layer and project it1 imtdata = imtgrid.getData().copy() # convert units if necessary if imt == 'MMI': pass elif imt == 'PGV': imtdata = np.exp(imtdata) else: imtdata = np.exp(imtdata) * 100 pimt = self._projectGrid(imtdata, m, gd) # get the draped intensity data hillshade = self._getShaded(ptopo) # draw the draped intensity data m.imshow(hillshade, interpolation='none', zorder=IMG_ZORDER) # draw the contours of imt data xmin = gd.xmin if gd.xmax < gd.xmin: xmin -= 360 lons = np.linspace(xmin, gd.xmax, gd.nx) # backwards so it plots right side up lats = np.linspace(gd.ymax, gd.ymin, gd.ny) x, y = m(*np.meshgrid(lons, lats)) pimt = gaussian_filter(pimt, 5.0) dmin = pimt.min() dmax = pimt.max() levels = self.getContourLevels(dmin, dmax, imt) cs = m.contour(x, y, np.flipud(pimt), colors='w', cmap=None, levels=levels, zorder=CONTOUR_ZORDER) clabels = plt.clabel(cs, colors='k', fmt='%.1f', fontsize=8.0, zorder=CONTOUR_ZORDER) for cl in clabels: bbox = dict(boxstyle="round", facecolor='white', edgecolor='w') cl.set_bbox(bbox) cl.set_zorder(CONTOUR_ZORDER) # draw country/state boundaries self._drawBoundaries(m) # draw lakes self._drawLakes(m, gd) # draw oceans (pre-processed with islands taken out) t1 = time.time() self._drawOceans(m, gd) t2 = time.time() self.logger.debug('%.1f seconds to render oceans.' % (t2 - t1)) # draw coastlines self._drawCoastlines(m, gd) # draw meridians, parallels, labels, ticks self._drawGraticules(m, gd) # draw filled symbols for MMI and instrumented measures self._drawStations(m, fill=True, imt=imt) # draw map scale self._drawMapScale(m, gd) # draw fault polygon, if present self._drawFault(m) # get the fault loaded # draw epicenter origin = self.fault.getOrigin() hlon = origin.lon hlat = origin.lat m.plot(hlon, hlat, 'k*', latlon=True, fillstyle='none', markersize=22, mew=1.2, zorder=EPICENTER_ZORDER) # draw cities # reduce the number of cities to those whose labels don't collide # set up cities if self.city_cols is not None: self.cities = self.cities.limitByBounds( (gd.xmin, gd.xmax, gd.ymin, gd.ymax)) self.cities = self.cities.limitByGrid( nx=self.city_cols, ny=self.city_rows, cities_per_grid=self.cities_per_grid) if 'Times New Roman' in self.cities._fontlist: font = 'Times New Roman' else: font = 'DejaVu Sans' self.cities = self.cities.limitByMapCollision(m, fontname=font) self.cities.renderToMap(m.ax, zorder=CITIES_ZORDER) # draw title and supertitle self._drawTitle(imt) # save plot to file fileimt = oq_to_file(imt) plt.draw() outfile = os.path.join(outfolder, 'contour_%s.pdf' % (fileimt)) plt.savefig(outfile) tn = time.time() self.logger.debug('%.1f seconds to render entire map.' % (tn - t0)) return outfile
def drawIntensityMap(self, outfolder): """ Render the MMI data as intensity draped over topography, with oceans, coastlines, etc. Args: outfolder (str): Path to directory where output map should be saved. Returns: str: Path to output intensity map. """ t0 = time.time() # resample shakemap to topogrid # get the geodict for the topo file topodict = GMTGrid.getFileGeoDict(self.topofile)[0] # get the geodict for the ShakeMap comp = self.container.getComponents('MMI')[0] imtdict = self.container.getIMTGrids('MMI', comp) mmigrid = imtdict['mean'] smdict = mmigrid.getGeoDict() # get a geodict that is aligned with topo, but inside shakemap sampledict = topodict.getBoundsWithin(smdict) mmigrid = mmigrid.interpolateToGrid(sampledict) gd = mmigrid.getGeoDict() # establish the basemap object m = self._setMap(gd) # get topo layer and project it topogrid = GMTGrid.load( self.topofile, samplegeodict=sampledict, resample=False) topodata = topogrid.getData().copy() ptopo = self._projectGrid(topodata, m, gd) # get intensity layer and project it imtdata = mmigrid.getData().copy() pimt = self._projectGrid(imtdata, m, gd) # get the draped intensity data draped_hsv = self._getDraped(pimt, ptopo) # where will 10.0 come from # draw the draped intensity data m.imshow(draped_hsv, interpolation='none', zorder=IMG_ZORDER) # draw country/state boundaries self._drawBoundaries(m) # draw whatever road data is available # self.logger.debug('Drawing roads...') # self._drawRoads(m) # self.logger.debug('Done drawing roads...') # draw lakes self._drawLakes(m, gd) # draw oceans (pre-processed with islands taken out) t1 = time.time() self._drawOceans(m, gd) t2 = time.time() self.logger.debug('%.1f seconds to render oceans.' % (t2 - t1)) # draw coastlines self._drawCoastlines(m, gd) # draw meridians, parallels, labels, ticks self._drawGraticules(m, gd) # draw map scale self._drawMapScale(m, gd) # draw fault polygon, if present self._drawFault(m) # get the fault loaded # draw epicenter origin = self.fault.getOrigin() hlon = origin.lon hlat = origin.lat m.plot(hlon, hlat, 'k*', latlon=True, fillstyle='none', markersize=22, mew=1.2, zorder=EPICENTER_ZORDER) # draw cities # reduce the number of cities to those whose labels don't collide # set up cities if self.city_cols is not None: self.cities = self.cities.limitByBounds( (gd.xmin, gd.xmax, gd.ymin, gd.ymax)) self.cities = self.cities.limitByGrid( nx=self.city_cols, ny=self.city_rows, cities_per_grid=self.cities_per_grid) # self.logger.debug("Available fonts: ", self.cities._fontlist) if 'Times New Roman' in self.cities._fontlist: font = 'Times New Roman' else: font = 'DejaVu Sans' self.cities = self.cities.limitByMapCollision(m, fontname=font) self.cities.renderToMap(m.ax, zorder=CITIES_ZORDER) # draw title and supertitle self._drawTitle('MMI') # draw station and macroseismic locations self._drawStations(m) # need stationlist object # save plot to file plt.draw() outfile = os.path.join(outfolder, 'intensity.pdf') plt.savefig(outfile) tn = time.time() self.logger.debug('%.1f seconds to render entire map.' % (tn - t0)) return outfile
def __init__(self,config,shakefile,model): if model not in getLogisticModelNames(config): raise Exception('Could not find a model called "%s" in config %s.' % (model,config)) #do everything here short of calculations - parse config, assemble eqn strings, load data. self.model = model cmodel = config['logistic_models'][model] self.coeffs = validateCoefficients(cmodel) self.layers = validateLayers(cmodel)#key = layer name, value = file name self.terms,timeField = validateTerms(cmodel,self.coeffs,self.layers) self.interpolations = validateInterpolations(cmodel,self.layers) self.units = validateUnits(cmodel,self.layers) if 'baselayer' not in cmodel: raise Exception('You must specify a base layer file in config.') if cmodel['baselayer'] not in list(self.layers.keys()): raise Exception('You must specify a base layer corresponding to one of the files in the layer section.') #get the geodict for the shakemap geodict = ShakeGrid.getFileGeoDict(shakefile,adjust='res') griddict,eventdict,specdict,fields,uncertainties = getHeaderData(shakefile) YEAR = eventdict['event_timestamp'].year MONTH = MONTHS[(eventdict['event_timestamp'].month)-1] DAY = eventdict['event_timestamp'].day HOUR = eventdict['event_timestamp'].hour #now find the layer that is our base layer and get the largest bounds we can guaranteed not to exceed shakemap bounds basefile = self.layers[cmodel['baselayer']] ftype = getFileType(basefile) if ftype == 'esri': basegeodict = GDALGrid.getFileGeoDict(basefile) sampledict = basegeodict.getBoundsWithin(geodict) elif ftype == 'gmt': basegeodict = GMTGrid.getFileGeoDict(basefile) sampledict = basegeodict.getBoundsWithin(geodict) else: raise Exception('All predictor variable grids must be a valid GMT or ESRI file type') #now load the shakemap, resampling and padding if necessary self.shakemap = ShakeGrid.load(shakefile,samplegeodict=sampledict,resample=True,doPadding=True,adjust='res') #load the predictor layers into a dictionary self.layerdict = {} #key = layer name, value = grid object for layername,layerfile in self.layers.items(): if isinstance(layerfile,list): for lfile in layerfile: if timeField == 'MONTH': if lfile.find(MONTH) > -1: layerfile = lfile ftype = getFileType(layerfile) interp = self.interpolations[layername] if ftype == 'gmt': lyr = GMTGrid.load(layerfile,sampledict,resample=True,method=interp,doPadding=True) elif ftype == 'esri': lyr = GDALGrid.load(layerfile,sampledict,resample=True,method=interp,doPadding=True) else: msg = 'Layer %s (file %s) does not appear to be a valid GMT or ESRI file.' % (layername,layerfile) raise Exception(msg) self.layerdict[layername] = lyr else: #first, figure out what kind of file we have (or is it a directory?) ftype = getFileType(layerfile) interp = self.interpolations[layername] if ftype == 'gmt': lyr = GMTGrid.load(layerfile,sampledict,resample=True,method=interp,doPadding=True) elif ftype == 'esri': lyr = GDALGrid.load(layerfile,sampledict,resample=True,method=interp,doPadding=True) else: msg = 'Layer %s (file %s) does not appear to be a valid GMT or ESRI file.' % (layername,layerfile) raise Exception(msg) self.layerdict[layername] = lyr shapes = {} for layername,layer in self.layerdict.items(): shapes[layername] = layer.getData().shape x = 1 self.nuggets = [str(self.coeffs['b0'])] ckeys = list(self.terms.keys()) ckeys.sort() for key in ckeys: term = self.terms[key] coeff = self.coeffs[key] self.nuggets.append('(%g * %s)' % (coeff, term)) self.equation = ' + '.join(self.nuggets) self.geodict = self.shakemap.getGeoDict()
def drawIntensityMap(self,outfolder): if self.intensity_colormap is None: raise ShakeMapException('MapMaker.setGMTColormap() has not been called.') t0 = time.time() #resample shakemap to topogrid #get the geodict for the topo file topodict = GMTGrid.getFileGeoDict(self.topofile) #get the geodict for the ShakeMap smdict = self.shakemap.getGeoDict() #get a geodict that is aligned with topo, but inside shakemap sampledict = topodict.getBoundsWithin(smdict) self.shakemap = self.shakemap.interpolateToGrid(sampledict) gd = self.shakemap.getGeoDict() #establish the basemap object m = self._setMap(gd) #get topo layer and project it topogrid = GMTGrid.load(self.topofile,samplegeodict=sampledict,resample=False) topodata = topogrid.getData().copy() ptopo = self._projectGrid(topodata,m,gd) #get intensity layer and project it imtdata = self.shakemap.getLayer(self.imt_layer).getData().copy() pimt = self._projectGrid(imtdata,m,gd) #get the draped intensity data draped_hsv = self._getDraped(pimt,ptopo) #where will 10.0 come from #draw the draped intensity data m.imshow(draped_hsv, interpolation='none',zorder=IMG_ZORDER); #draw country/state boundaries self._drawBoundaries(m) #draw whatever road data is available self._drawRoads(m) #draw lakes self._drawLakes(m,gd) #draw oceans (pre-processed with islands taken out) t1 = time.time() self._drawOceans(m,gd) t2 = time.time() print('%.1f seconds to render oceans.' % (t2-t1)) #draw coastlines self._drawCoastlines(m,gd) #draw meridians, parallels, labels, ticks self._drawGraticules(m,gd) #draw map scale scalex = gd.xmin + (gd.xmax-gd.xmin)/5.0 scaley = gd.ymin + (gd.ymax-gd.ymin)/10.0 yoff = (0.007*(m.ymax-m.ymin)) clon = (gd.xmin + gd.xmax)/2.0 clat = (gd.ymin + gd.ymax)/2.0 m.drawmapscale(scalex,scaley,clon,clat,length=100,barstyle='fancy',yoffset=yoff,zorder=SCALE_ZORDER) #draw fault polygon, if present self._drawFault(m) #get the fault loaded #draw epicenter hlon = self.shakemap.getEventDict()['lon'] hlat = self.shakemap.getEventDict()['lat'] m.plot(hlon,hlat,'k*',latlon=True,fillstyle='none',markersize=22,mew=1.2,zorder=EPICENTER_ZORDER); #draw cities #reduce the number of cities to those whose labels don't collide #set up cities if self.city_cols is not None: self.cities = self.cities.limitByBounds((gd.xmin,gd.xmax,gd.ymin,gd.ymax)) self.cities = self.cities.limitByGrid(nx=self.city_cols,ny=self.city_rows, cities_per_grid=self.cities_per_grid) self.cities = self.cities.limitByMapCollision(m) self.cities.renderToMap(m.ax,zorder=CITIES_ZORDER) #draw title and supertitle eventid = self._drawTitle() #draw station and macroseismic locations self._drawStations(m) #need stationlist object #save plot to file plt.draw() outfile = os.path.join(outfolder,'intensity_%s.pdf' % eventid) plt.savefig(outfile) tn = time.time() print('%.1f seconds to render entire map.' % (tn-t0)) return outfile
def __init__(self, shakefile, config, uncertfile=None, saveinputs=False, slopefile=None, bounds=None, slopemod=None, trimfile=None): """ Sets up the logistic model Args: shakefile (str): Path to shakemap grid.xml file for the event. config: configobj object defining the model and its inputs. Only one model should be described in each config file. uncertfile (str): Path to uncertainty.xml file. saveinputs (bool): Save input layers as Grid2D objects in addition to the model? If false (the default), it will just output the model. slopefile (str): Optional path to slopefile that will be resampled to the other input files for applying thresholds. OVERWRITES VALUE IN CONFIG. bounds (dict): Default of None uses ShakeMap boundaries, otherwise a dictionary of boundaries to cut to like .. code-block:: python bounds = { 'xmin': lonmin, 'xmax': lonmax, 'ymin': latmin, 'ymax': latmax } slopemod (str): How slope input should be modified to be in degrees: e.g., ``np.arctan(slope) * 180. / np.pi`` or ``slope/100.`` (note that this may be in the config file already). trimfile (str): shapefile of earth's landmasses to use to cut offshore areas. """ mnames = getLogisticModelNames(config) if len(mnames) == 0: raise Exception('No config file found or problem with config ' 'file format') if len(mnames) > 1: raise Exception('Config file contains more than one model which ' 'is no longer allowed, update your config file ' 'to the newer format') self.model = mnames[0] self.config = config cmodel = config[self.model] self.modeltype = cmodel['gfetype'] self.coeffs = validateCoefficients(cmodel) # key = layer name, value = file name self.layers = validateLayers(cmodel) self.terms, timeField = validateTerms(cmodel, self.coeffs, self.layers) self.interpolations = validateInterpolations(cmodel, self.layers) self.units = validateUnits(cmodel, self.layers) self.gmused = [ value for term, value in cmodel['terms'].items() if 'pga' in value.lower() or 'pgv' in value.lower() or 'mmi' in value.lower() ] self.modelrefs, self.longrefs, self.shortrefs = validateRefs(cmodel) #self.numstd = numstd self.clips = validateClips(cmodel, self.layers, self.gmused) self.notes = '' if cmodel['baselayer'] not in list(self.layers.keys()): raise Exception('You must specify a base layer corresponding to ' 'one of the files in the layer section.') self.saveinputs = saveinputs if slopefile is None: try: self.slopefile = cmodel['slopefile'] except: # print('Slopefile not specified in config, no slope ' # 'thresholds will be applied\n') self.slopefile = None else: self.slopefile = slopefile if slopemod is None: try: self.slopemod = cmodel['slopemod'] except: self.slopemod = None # See if trimfile exists if trimfile is not None: if not os.path.exists(trimfile): print('trimfile defined does not exist: %s\nOcean will not be ' 'trimmed' % trimfile) self.trimfile = None elif os.path.splitext(trimfile)[1] != '.shp': print('trimfile must be a shapefile, ocean will not be ' 'trimmed') self.trimfile = None else: self.trimfile = trimfile else: self.trimfile = None # Get month of event griddict, eventdict, specdict, fields, uncertainties = \ getHeaderData(shakefile) MONTH = MONTHS[(eventdict['event_timestamp'].month) - 1] # Figure out how/if need to cut anything geodict = ShakeGrid.getFileGeoDict(shakefile, adjust='res') if bounds is not None: # Make sure bounds are within ShakeMap Grid if geodict.xmin < geodict.xmax: # only if signs are not opposite if (geodict.xmin > bounds['xmin'] or geodict.xmax < bounds['xmax'] or geodict.ymin > bounds['ymin'] or geodict.ymax < bounds['ymax']): print('Specified bounds are outside shakemap area, using ' 'ShakeMap bounds instead.') bounds = None if bounds is not None: tempgdict = GeoDict.createDictFromBox(bounds['xmin'], bounds['xmax'], bounds['ymin'], bounds['ymax'], geodict.dx, geodict.dy, inside=False) # If Shakemap geodict crosses 180/-180 line, fix geodict so things don't break if geodict.xmin > geodict.xmax: if tempgdict.xmin < 0: geodict._xmin -= 360. else: geodict._xmax += 360. gdict = geodict.getBoundsWithin(tempgdict) else: gdict = geodict # Now find the layer that is our base layer and get the largest bounds # we can guarantee not to exceed shakemap bounds basefile = self.layers[cmodel['baselayer']] ftype = getFileType(basefile) if ftype == 'esri': basegeodict, firstcol = GDALGrid.getFileGeoDict(basefile) if basegeodict == gdict: sampledict = gdict else: sampledict = basegeodict.getBoundsWithin(gdict) elif ftype == 'gmt': basegeodict, firstcol = GMTGrid.getFileGeoDict(basefile) if basegeodict == gdict: sampledict = gdict else: sampledict = basegeodict.getBoundsWithin(gdict) else: raise Exception('All predictor variable grids must be a valid ' 'GMT or ESRI file type.') # Do we need to subdivide baselayer? if 'divfactor' in self.config[self.model].keys(): divfactor = float(self.config[self.model]['divfactor']) if divfactor != 1.: # adjust sampledict so everything will be resampled newxmin = sampledict.xmin - sampledict.dx / \ 2. + sampledict.dx/(2.*divfactor) newymin = sampledict.ymin - sampledict.dy / \ 2. + sampledict.dy/(2.*divfactor) newxmax = sampledict.xmax + sampledict.dx / \ 2. - sampledict.dx/(2.*divfactor) newymax = sampledict.ymax + sampledict.dy / \ 2. - sampledict.dy/(2.*divfactor) newdx = sampledict.dx / divfactor newdy = sampledict.dy / divfactor sampledict = GeoDict.createDictFromBox(newxmin, newxmax, newymin, newymax, newdx, newdy, inside=True) # Find slope thresholds, if applicable self.slopemin = 'none' self.slopemax = 'none' if self.slopefile is not None: try: self.slopemin = float(config[self.model]['slopemin']) self.slopemax = float(config[self.model]['slopemax']) except: print('Could not find slopemin and/or slopemax in config, ' 'limits. No slope thresholds will be applied.') self.slopemin = 'none' self.slopemax = 'none' # Make temporary directory for hdf5 pytables file storage self.tempdir = tempfile.mkdtemp() # now load the shakemap, resampling and padding if necessary temp = ShakeGrid.load(shakefile) # , adjust='res') self.shakedict = temp.getShakeDict() self.eventdict = temp.getEventDict() self.shakemap = {} # Read both PGA and PGV in, may need them for thresholds for gm in ['pga', 'pgv']: junkfile = os.path.join(self.tempdir, 'temp.bil') GDALGrid.copyFromGrid(temp.getLayer(gm)).save(junkfile) if gm in self.interpolations.keys(): intermeth = self.interpolations[gm] else: intermeth = 'bilinear' junkgrid = quickcut(junkfile, sampledict, precise=True, method=intermeth) if gm in self.clips: junkgrid.setData( np.clip(junkgrid.getData(), self.clips[gm][0], self.clips[gm][1])) self.shakemap[gm] = TempHdf( junkgrid, os.path.join(self.tempdir, '%s.hdf5' % gm)) os.remove(junkfile) del (temp) # get updated geodict sampledict = junkgrid.getGeoDict() # take uncertainties into account, if available if uncertfile is not None: self.uncert = {} try: # Only read in the ones that will be needed temp = ShakeGrid.load(uncertfile) already = [] for gm in self.gmused: if 'pgv' in gm: gmsimp = 'pgv' elif 'pga' in gm: gmsimp = 'pga' elif 'mmi' in gm: gmsimp = 'mmi' if gmsimp in already: continue junkfile = os.path.join(self.tempdir, 'temp.bil') GDALGrid.copyFromGrid(temp.getLayer('std%s' % gmsimp)).save(junkfile) if gmsimp in self.interpolations.keys(): intermeth = self.interpolations[gmsimp] else: intermeth = 'bilinear' junkgrid = quickcut(junkfile, sampledict, precise=True, method=intermeth) if gmsimp in self.clips: junkgrid.setData( np.clip(junkgrid.getData(), self.clips[gmsimp][0], self.clips[gmsimp][1])) self.uncert['std' + gmsimp] = TempHdf( junkgrid, os.path.join(self.tempdir, 'std%s.hdf5' % gmsimp)) already.append(gmsimp) os.remove(junkfile) del (temp) except: print('Could not read uncertainty file, ignoring ' 'uncertainties') self.uncert = None else: self.uncert = None # Load the predictor layers, save as hdf5 temporary files, put file # locations into a dictionary. # Will be replaced in the next section if a slopefile was defined self.nonzero = None # key = layer name, value = grid object self.layerdict = {} didslope = False for layername, layerfile in self.layers.items(): start = timer() if isinstance(layerfile, list): for lfile in layerfile: if timeField == 'MONTH': if lfile.find(MONTH) > -1: layerfile = lfile ftype = getFileType(layerfile) interp = self.interpolations[layername] temp = quickcut(layerfile, sampledict, precise=True, method=interp) if layername in self.clips: temp.setData( np.clip(temp.getData(), self.clips[layername][0], self.clips[layername][1])) self.layerdict[layername] = TempHdf( temp, os.path.join(self.tempdir, '%s.hdf5' % layername)) del (temp) else: interp = self.interpolations[layername] temp = quickcut(layerfile, sampledict, precise=True, method=interp) if layername in self.clips: temp.setData( np.clip(temp.getData(), self.clips[layername][0], self.clips[layername][1])) if layername == 'rock': # Convert unconsolidated sediments to a more reasonable coefficient sub1 = temp.getData() # Change to mixed sed rock coeff sub1[sub1 <= -3.21] = -1.36 temp.setData(sub1) self.notes += 'unconsolidated sediment coefficient changed\ to -1.36 (weaker) from -3.22 to better reflect that this \ unit is not actually strong\n' self.layerdict[layername] = TempHdf( temp, os.path.join(self.tempdir, '%s.hdf5' % layername)) td = temp.getGeoDict() if td != sampledict: raise Exception( 'Geodictionaries of resampled files do not match') if layerfile == self.slopefile: flag = 0 if self.slopemin == 'none' and self.slopemax == 'none': flag = 1 if self.slopemod is None: slope1 = temp.getData().astype(float) slope = 0 else: try: slope = temp.getData().astype(float) slope1 = eval(self.slopemod) except: print('slopemod provided not valid, continuing ' 'without slope thresholds.') flag = 1 if flag == 0: nonzero = np.array([(slope1 > self.slopemin) & (slope1 <= self.slopemax)]) self.nonzero = nonzero[0, :, :] del (slope1) del (slope) else: # Still remove areas where the slope equals exactly # 0.0 to remove offshore liq areas. nonzero = np.array([slope1 != 0.0]) self.nonzero = nonzero[0, :, :] del (slope1) didslope = True del (temp) print('Loading %s layer: %1.1f sec' % (layername, timer() - start)) if didslope is False and self.slopefile is not None: # Slope didn't get read in yet temp = quickcut(self.slopefile, sampledict, precise=True, method='bilinear') flag = 0 if self.slopemin == 'none' and self.slopemax == 'none': flag = 1 if self.slopemod is None: slope1 = temp.getData().astype(float) slope = 0 else: try: slope = temp.getData().astype(float) slope1 = eval(self.slopemod) except: print('slopemod provided not valid, continuing without ' 'slope thresholds') flag = 1 if flag == 0: nonzero = np.array([ (slope1 > self.slopemin) & (slope1 <= self.slopemax) ]) self.nonzero = nonzero[0, :, :] del (slope1) del (slope) else: # Still remove areas where the slope equals exactly # 0.0 to remove offshore liq areas. nonzero = np.array([slope1 != 0.0]) self.nonzero = nonzero[0, :, :] del (slope1) self.nuggets = [str(self.coeffs['b0'])] ckeys = list(self.terms.keys()) ckeys.sort() for key in ckeys: term = self.terms[key] coeff = self.coeffs[key] self.nuggets.append('(%g * %s)' % (coeff, term)) self.equation = ' + '.join(self.nuggets) self.geodict = sampledict
def check_input_extents(config, shakefile=None, bounds=None): """Make sure all input files exist and cover the extent desired Args: config: configObj of a single model shakefile: path to ShakeMap grid.xml file (used for bounds). If not provided, bounds must be provided bounds: dictionary of bounds with keys: 'xmin', 'xmax', 'ymin', 'ymax' Returns: tuple containing: notcovered: list of files that do not cover the entire area defined by bounds or shakefile newbounds: new dictionary of bounds of subarea of original bounds or shakefile extent that is covered by all input files """ if shakefile is None and bounds is None: raise Exception('Must define either a shakemap file or bounds') modelname = config.keys()[0] # Make dummy geodict to use if bounds is None: evdict = ShakeGrid.getFileGeoDict(shakefile) else: evdict = GeoDict.createDictFromBox( bounds['xmin'], bounds['xmax'], bounds['ymin'], bounds['ymax'], 0.00001, 0.00001, inside=False) # Check extents of all input layers notcovered = [] notcovgdicts = [] newbounds = None for item, value in config[modelname]['layers'].items(): if 'file' in value.keys(): filelook = value['file'] if getFileType(filelook) == 'gmt': tmpgd, _ = GMTGrid.getFileGeoDict(filelook) else: tmpgd, _ = GDALGrid.getFileGeoDict(filelook) # See if tempgd contains evdict contains = tmpgd.contains(evdict) if not contains: notcovered.append(filelook) notcovgdicts.append(tmpgd) # print(filelook) if len(notcovered) > 0: # Figure out what bounds COULD be run xmins = [gd.xmin for gd in notcovgdicts] xmaxs = [gd.xmax for gd in notcovgdicts] ymins = [gd.ymin for gd in notcovgdicts] ymaxs = [gd.ymax for gd in notcovgdicts] # Set in by a buffer of 0.05 degrees because mapio doesn't like # when bounds are exactly the same for getboundswithin newbounds = dict(xmin=evdict.xmin + 0.05, xmax=evdict.xmax - 0.05, ymin=evdict.ymin + 0.05, ymax=evdict.ymax - 0.05) # Which one is the problem? if evdict.xmin < np.max(xmins): newbounds['xmin'] = np.max(xmins) + 0.05 if evdict.xmax > np.min(xmaxs): newbounds['xmax'] = np.min(xmaxs) - 0.05 if evdict.ymin < np.max(ymins): newbounds['ymin'] = np.max(ymins) + 0.05 if evdict.ymax > np.min(ymaxs): newbounds['ymax'] = np.min(ymaxs) - 0.05 # See if this is a possible extent try: test = GeoDict.createDictFromBox( newbounds['xmin'], newbounds['xmax'], newbounds['ymin'], newbounds['ymax'], 0.00001, 0.00001, inside=False) except BaseException: print('Cannot make new bounds that will work') newbounds = None return notcovered, newbounds
def holzer_liq(shakefile, config, uncertfile=None, saveinputs=False, modeltype=None, displmodel=None, probtype=None, bounds=None): """ Method for computing the probability of liquefaction using the Holzer method using the Wills et al. (2015) Vs30 map of California to define the susceptibility classes and the Fan et al. global water table model. """ layers = config['holzer_liq_cal']['layers'] vs30_file = layers['vs30']['file'] wtd_file = layers['watertable']['file'] shkgdict = ShakeGrid.getFileGeoDict(shakefile) fgeodict = GMTGrid.getFileGeoDict(vs30_file)[0] #--------------------------------------------------------------------------- # Loading info #--------------------------------------------------------------------------- shakemap = ShakeGrid.load(shakefile, fgeodict, resample=True, method='linear', doPadding=True) PGA = shakemap.getLayer('pga').getData() / 100 # convert to g griddict, eventdict, specdict, fields, uncertainties = getHeaderData( shakefile) mag = eventdict['magnitude'] #--------------------------------------------------------------------------- # Logistic funciton parameters from Vs30 #--------------------------------------------------------------------------- vs30_grid = GMTGrid.load(vs30_file) vs30 = vs30_grid.getData() a0 = np.zeros_like(vs30) b0 = np.zeros_like(vs30) c0 = np.zeros_like(vs30) a1 = np.zeros_like(vs30) b1 = np.zeros_like(vs30) c1 = np.zeros_like(vs30) for k, v in config['holzer_liq_cal']['parameters'].items(): ind = np.where(vs30 == float(v[0])) a0[ind] = v[1] b0[ind] = v[2] c0[ind] = v[3] a1[ind] = v[4] b1[ind] = v[5] c1[ind] = v[6] #--------------------------------------------------------------------------- # Water table #--------------------------------------------------------------------------- wtd_grid = GMTGrid.load(wtd_file, fgeodict, resample=True, method=layers['watertable']['interpolation'], doPadding=True) tmp = wtd_grid._data tmp = np.nan_to_num(tmp) # Compute water weights w0, w1 = get_water_weights(tmp) #--------------------------------------------------------------------------- # Compute probability of liquefaction #--------------------------------------------------------------------------- prob0 = get_prob(PGA, a0, b0, c0, mag) prob1 = get_prob(PGA, a1, b1, c1, mag) prob = prob0 * w0 + prob1 * w1 #--------------------------------------------------------------------------- # Turn output and inputs into into grids and put in maplayers dictionary #--------------------------------------------------------------------------- maplayers = collections.OrderedDict() temp = shakemap.getShakeDict() shakedetail = '%s_ver%s' % (temp['shakemap_id'], temp['shakemap_version']) modelsref = config['holzer_liq_cal']['shortref'] modellref = config['holzer_liq_cal']['longref'] modeltype = 'Holzer/Wills' maplayers['model'] = { 'grid': GDALGrid(prob, fgeodict), 'label': 'Probability', 'type': 'output', 'description': { 'name': modelsref, 'longref': modellref, 'units': 'coverage', 'shakemap': shakedetail, 'parameters': { 'modeltype': modeltype } } } if saveinputs is True: maplayers['pga'] = { 'grid': GDALGrid(PGA, fgeodict), 'label': 'PGA (g)', 'type': 'input', 'description': { 'units': 'g', 'shakemap': shakedetail } } maplayers['vs30'] = { 'grid': GDALGrid(vs30, fgeodict), 'label': 'Vs30 (m/s)', 'type': 'input', 'description': { 'units': 'm/s' } } maplayers['wtd'] = { 'grid': GDALGrid(wtd_grid._data, fgeodict), 'label': 'wtd (m)', 'type': 'input', 'description': { 'units': 'm' } } return maplayers
def quickcut(filename, gdict, tempname=None, extrasamp=5., method='bilinear', precise=True, cleanup=True, verbose=False, override=False): """ Use gdal to trim a large global file down quickly so mapio can read it efficiently. (Cannot read Shakemap.xml files, must save as .bil filrst) Args: filename (str): File path to original input file (raster). gdict (geodict): Geodictionary to cut around and align with. tempname (str): File path to desired location of clipped part of filename. extrasamp (int): Number of extra cells to cut around each edge of geodict to have resampling buffer for future steps. method (str): If resampling is necessary, method to use. precise (bool): If true, will resample to the gdict as closely as possible, if False it will just roughly cut around the area of interest without changing resolution cleanup (bool): if True, delete tempname after reading it back in verbose (bool): if True, prints more details override (bool): if True, if filename extent is not fully contained by gdict, read in the entire file (only used for ShakeMaps) Returns: New grid2D layer Note: This function uses the subprocess approach because ``gdal.Translate`` doesn't hang on the command until the file is created which causes problems in the next steps. """ if gdict.xmax < gdict.xmin: raise Exception('quickcut: your geodict xmax is smaller than xmin') try: filegdict = GDALGrid.getFileGeoDict(filename) except: try: filegdict = GMTGrid.getFileGeoDict(filename) except: raise Exception('Cannot get geodict for %s' % filename) if tempname is None: tempdir = tempfile.mkdtemp() tempname = os.path.join(tempdir, 'junk.tif') deltemp = True else: tempdir = None deltemp = False # if os.path.exists(tempname): # os.remove(tempname) # print('Temporary file already there, removing file') filegdict = filegdict[0] # Get the right methods for mapio (method) and gdal (method2) if method == 'linear': method2 = 'bilinear' if method == 'nearest': method2 = 'near' if method == 'bilinear': method = 'linear' method2 = 'bilinear' if method == 'near': method = 'nearest' method2 = 'near' else: method2 = method if filegdict != gdict: # First cut without resampling tempgdict = GeoDict.createDictFromBox(gdict.xmin, gdict.xmax, gdict.ymin, gdict.ymax, filegdict.dx, filegdict.dy, inside=True) try: egdict = filegdict.getBoundsWithin(tempgdict) ulx = egdict.xmin - extrasamp * egdict.dx uly = egdict.ymax + extrasamp * egdict.dy lrx = egdict.xmax + (extrasamp + 1) * egdict.dx lry = egdict.ymin - (extrasamp + 1) * egdict.dy cmd = 'gdal_translate -a_srs EPSG:4326 -of GTiff -projwin %1.8f \ %1.8f %1.8f %1.8f -r %s %s %s' % (ulx, uly, lrx, lry, method2, filename, tempname) except Exception as e: if override: # When ShakeMap is being loaded, sometimes they won't align # right because it's already cut to the area, so just load # the whole file cmd = 'gdal_translate -a_srs EPSG:4326 -of GTiff -r %s %s %s' \ % (method2, filename, tempname) else: raise Exception('Failed to cut layer: %s' % e) rc, so, se = get_command_output(cmd) if not rc: raise Exception(se.decode()) else: if verbose: print(so.decode()) newgrid2d = GDALGrid.load(tempname) if precise: # Resample to exact geodictionary newgrid2d = newgrid2d.interpolate2(gdict, method=method) if cleanup: os.remove(tempname) if deltemp: shutil.rmtree(tempdir) else: ftype = GMTGrid.getFileType(filename) if ftype != 'unknown': newgrid2d = GMTGrid.load(filename) elif filename.endswith('.xml'): newgrid2d = ShakeGrid.load(filename) else: newgrid2d = GDALGrid.load(filename) return newgrid2d
gridtype = 'gmt' except Exception,error: try: fdict = GDALGrid.getFileGeoDict(gridfile) gridtype = 'esri' except: pass if gridtype is None: raise Exception('File "%s" does not appear to be either a GMT grid or an ESRI grid.' % gridfile) xmin = xmin - fdict.dx*3 xmax = xmax + fdict.dx*3 ymin = ymin - fdict.dy*3 ymax = ymax + fdict.dy*3 bounds = (xmin,xmax,ymin,ymax) if gridtype == 'gmt': fgeodict = GMTGrid.getFileGeoDict(gridfile) else: fgeodict = GDALGrid.getFileGeoDict(gridfile) dx,dy = (fgeodict.dx,fgeodict.dy) sdict = GeoDict.createDictFromBox(xmin,xmax,ymin,ymax,dx,dy) if gridtype == 'gmt': grid = GMTGrid.load(gridfile,samplegeodict=sdict,resample=False,method=method,doPadding=True) else: grid = GDALGrid.load(gridfile,samplegeodict=sdict,resample=False,method=method,doPadding=True) return sampleFromGrid(grid,xypoints) def sampleFromGrid(grid,xypoints,method='nearest'): """ Sample 2D grid object at each of a set of XY (decimal degrees) points. :param grid:
def hazus_liq(shakefile, config, uncertfile=None, saveinputs=False, modeltype=None, displmodel=None, probtype=None, bounds=None): """ Method for computing the probability of liquefaction using the Hazus method using the Wills et al. (2015) Vs30 map of California to define the susceptibility classes and the Fan et al. global water table model. """ layers = config['hazus_liq_cal']['layers'] vs30_file = layers['vs30']['file'] wtd_file = layers['watertable']['file'] shkgdict = ShakeGrid.getFileGeoDict(shakefile) fgeodict = GMTGrid.getFileGeoDict(vs30_file)[0] #--------------------------------------------------------------------------- # Loading #--------------------------------------------------------------------------- shakemap = ShakeGrid.load(shakefile, fgeodict, resample=True, method='linear', doPadding=True) PGA = shakemap.getLayer('pga').getData() / 100 # convert to g griddict, eventdict, specdict, fields, uncertainties = getHeaderData( shakefile) mag = eventdict['magnitude'] # Correction factor for moment magnitudes other than M=7.5 k_m = 0.0027 * mag**3 - 0.0267 * mag**2 - 0.2055 * mag + 2.9188 #--------------------------------------------------------------------------- # Susceptibility from Vs30 #--------------------------------------------------------------------------- vs30_grid = GMTGrid.load(vs30_file) vs30 = vs30_grid.getData() p_ml = np.zeros_like(vs30) a = np.zeros_like(vs30) b = np.zeros_like(vs30) for k, v in config['hazus_liq_cal']['parameters'].items(): ind = np.where(vs30 == float(v[0])) if v[1] == "VH": p_ml[ind] = 0.25 a[ind] = 9.09 b[ind] = -0.82 if v[1] == "H": p_ml[ind] = 0.2 a[ind] = 7.67 b[ind] = -0.92 if v[1] == "M": p_ml[ind] = 0.1 a[ind] = 6.67 b[ind] = -1.0 if v[1] == "L": p_ml[ind] = 0.05 a[ind] = 5.57 b[ind] = -1.18 if v[1] == "VL": p_ml[ind] = 0.02 a[ind] = 4.16 b[ind] = -1.08 # Conditional liquefaction probability for a given susceptibility category # at a specified PGA p_liq_pga = a * PGA + b p_liq_pga = p_liq_pga.clip(min=0, max=1) #--------------------------------------------------------------------------- # Water table #--------------------------------------------------------------------------- wtd_grid = GMTGrid.load(wtd_file, fgeodict, resample=True, method=layers['watertable']['interpolation'], doPadding=True) tmp = wtd_grid._data tmp = np.nan_to_num(tmp) # Convert to ft wt_ft = tmp * 3.28084 # Correction factor for groundwater depths other than five feet k_w = 0.022 * wt_ft + 0.93 #--------------------------------------------------------------------------- # Combine to get conditional liquefaction probability #--------------------------------------------------------------------------- p_liq_sc = p_liq_pga * p_ml / k_m / k_w #--------------------------------------------------------------------------- # Turn output and inputs into into grids and put in maplayers dictionary #--------------------------------------------------------------------------- maplayers = collections.OrderedDict() temp = shakemap.getShakeDict() shakedetail = '%s_ver%s' % (temp['shakemap_id'], temp['shakemap_version']) modelsref = config['hazus_liq_cal']['shortref'] modellref = config['hazus_liq_cal']['longref'] modeltype = 'Hazus/Wills' maplayers['model'] = { 'grid': GDALGrid(p_liq_sc, fgeodict), 'label': 'Probability', 'type': 'output', 'description': { 'name': modelsref, 'longref': modellref, 'units': 'coverage', 'shakemap': shakedetail, 'parameters': { 'modeltype': modeltype } } } if saveinputs is True: maplayers['pga'] = { 'grid': GDALGrid(PGA, fgeodict), 'label': 'PGA (g)', 'type': 'input', 'description': { 'units': 'g', 'shakemap': shakedetail } } maplayers['vs30'] = { 'grid': GDALGrid(vs30, fgeodict), 'label': 'Vs30 (m/s)', 'type': 'input', 'description': { 'units': 'm/s' } } maplayers['wtd'] = { 'grid': GDALGrid(wtd_grid._data, fgeodict), 'label': 'wtd (m)', 'type': 'input', 'description': { 'units': 'm' } } return maplayers
def slhrf_liq(shakefile, config, uncertfile=None, saveinputs=False, modeltype=None, displmodel=None, probtype=None, bounds=None): """ Method for computing the probability of liquefaction using the SLHRF, primarily relying on the Wills et al. (2015) Vs30 map of California and Hydrosheds distance to rivers. """ layers = config['slhrf_liq_cal']['layers'] vs30_file = layers['vs30']['file'] elev_file = layers['elev']['file'] dc_file = layers['dc']['file'] dr_file = layers['dr']['file'] fgeodict = GMTGrid.getFileGeoDict(vs30_file)[0] #--------------------------------------------------------------------------- # Read in data layers #--------------------------------------------------------------------------- shakemap = ShakeGrid.load(shakefile, fgeodict, resample=True, method='linear', doPadding=True) PGA = shakemap.getLayer('pga').getData()/100 # convert to g griddict,eventdict,specdict,fields,uncertainties = getHeaderData(shakefile) mag = eventdict['magnitude'] vs30_grid = GMTGrid.load(vs30_file) vs30 = vs30_grid.getData() elev = GDALGrid.load(elev_file, fgeodict, resample=True, method=layers['elev']['interpolation'], doPadding = True).getData() dc = GDALGrid.load(dc_file, fgeodict, resample=True, method=layers['dc']['interpolation'], doPadding = True).getData() dr = GDALGrid.load(dr_file, fgeodict, resample=True, method=layers['dr']['interpolation'], doPadding = True).getData() dw = np.minimum(dr, dc) #--------------------------------------------------------------------------- # Evaluate the different factors #--------------------------------------------------------------------------- Fgeo = np.zeros_like(vs30) for k,v in config['slhrf_liq_cal']['parameters'].items(): ind = np.where(vs30 == float(v[0])) Fgeo[ind] = float(v[1]) Fz = z_factor(elev) Fmag = mag_factor(mag) Fpga = pga_factor(PGA) Fdw = dw_factor(dw) Fnehrp = nehrp_factor(vs30) #--------------------------------------------------------------------------- # Combine factors #--------------------------------------------------------------------------- SLHRF = Fz * Fmag * Fpga * Fdw * Fgeo * Fnehrp # Transform into a 'probability' prob = 0.4 * (1 - np.exp(-0.2 * SLHRF**2) ) #--------------------------------------------------------------------------- # Turn output and inputs into into grids and put in maplayers dictionary #--------------------------------------------------------------------------- maplayers = collections.OrderedDict() temp = shakemap.getShakeDict() shakedetail = '%s_ver%s' % (temp['shakemap_id'], temp['shakemap_version']) modelsref = config['slhrf_liq_cal']['shortref'] modellref = config['slhrf_liq_cal']['longref'] modeltype = 'SLHRF/Wills' maplayers['model'] = {'grid': GDALGrid(prob, fgeodict), 'label': 'Probability', 'type': 'output', 'description': {'name': modelsref, 'longref': modellref, 'units': 'coverage', 'shakemap': shakedetail, 'parameters': {'modeltype': modeltype} } } if saveinputs is True: maplayers['slhrf'] = {'grid': GDALGrid(SLHRF, fgeodict), 'label': 'SLHRF', 'type': 'input', 'description': {'units': 'none'}} maplayers['pga'] = {'grid': GDALGrid(PGA, fgeodict), 'label': 'PGA (g)', 'type': 'input', 'description': {'units': 'g', 'shakemap': shakedetail}} maplayers['vs30'] = {'grid': GDALGrid(vs30, fgeodict), 'label': 'Vs30 (m/s)', 'type': 'input', 'description': {'units': 'm/s'}} maplayers['dw'] = {'grid': GDALGrid(dw, fgeodict), 'label': 'dw (km)', 'type': 'input', 'description': {'units': 'km'}} maplayers['elev'] = {'grid': GDALGrid(elev, fgeodict), 'label': 'elev (m)', 'type': 'input', 'description': {'units': 'm'}} maplayers['FPGA'] = {'grid': GDALGrid(Fpga, fgeodict), 'label': 'Fpga', 'type': 'input', 'description': {'units': 'none'}} maplayers['FDW'] = {'grid': GDALGrid(Fdw, fgeodict), 'label': 'Fdw', 'type': 'input', 'description': {'units': 'none'}} maplayers['FGEO'] = {'grid': GDALGrid(Fgeo, fgeodict), 'label': 'Fgeo', 'type': 'input', 'description': {'units': 'none'}} maplayers['FZ'] = {'grid': GDALGrid(Fz, fgeodict), 'label': 'Fz', 'type': 'input', 'description': {'units': 'none'}} maplayers['FNEHRP'] = {'grid': GDALGrid(Fnehrp, fgeodict), 'label': 'Fnehrp', 'type': 'input', 'description': {'units': 'none'}} return maplayers