def main(args,config): eventid = args.eventID shakehome = config.get('SHAKEMAP','shakehome') xmlfile = os.path.join(shakehome,'data',eventid,'input',args.dataFile) gridfile = os.path.join(shakehome,'data',eventid,'output','grid.xml') #list of grid.xml variable names and corresponding data file variable names variables = [('PGA','acc'),('PGV','vel'),('PSA03','psa03'),('PSA10','psa10'),('PSA30','psa30')] shakemap = ShakeGrid(gridfile,variable='MMI') #doesn't matter gdict = shakemap.getGeoDict() atts = shakemap.getAttributes() location = atts['event']['event_description'] etime = atts['event']['event_timestamp'] epilat = atts['event']['lat'] epilon = atts['event']['lon'] nrows = gdict['nrows'] ncols = gdict['ncols'] root = minidom.parse(xmlfile) f = plt.figure(figsize=(8.5,11)) pnum = 1 pgaobs = [] pgaexp = [] pgadist = [] for vartuple in variables: gridvar,stationvar = vartuple shakemap = ShakeGrid(gridfile,variable=gridvar) stations = root.getElementsByTagName('station') observed = [] expected = [] for i in range(0,len(stations)): station = stations[i] lat = float(station.getAttribute('lat')) lon = float(station.getAttribute('lon')) row,col = shakemap.getRowCol(lat,lon) if row < 0 or row > nrows or col < 0 or col > ncols: continue pgael = station.getElementsByTagName('comp')[0].getElementsByTagName(stationvar)[0] pga = float(pgael.getAttribute('value')) gridpga = shakemap.getValue(lat,lon) observed.append(pga) expected.append(gridpga) if gridvar == 'PGA': pgaobs.append(pga) pgaexp.append(gridpga) distance,az1,az2 = gps2DistAzimuth(epilat,epilon,lat,lon) pgadist.append(distance/1000.0) observed = np.array(observed) expected = np.array(expected) xmax = observed.max() ymax = expected.max() dmax = max(xmax,ymax) * 1.05 v = [0,dmax,0,dmax] plt.subplot(3,2,pnum) plt.plot(observed,expected,'b.') plt.xlabel('Observed %s' % gridvar) plt.ylabel('Modeled %s' % gridvar) plt.axis(v) pnum += 1 #Add in one final plot - pga differences vs distance, just to see if that's a factor pgaobs = np.array(pgaobs) pgaexp = np.array(pgaexp) pgadist = np.array(pgadist) pgadiff = np.power((pgaobs-pgaexp),2) mdiff = np.mean(pgadiff) stddiff = np.std(pgadiff) ymax = mdiff + 2*stddiff plt.subplot(3,2,6) plt.plot(pgadist,pgadiff,'b.') plt.ylabel('pga diff (squared)') plt.xlabel('Distance (km)') plt.axis([0,pgadist.max(),0,ymax]) f.suptitle('Event %s %s - %s' % (eventid,etime.strftime('%Y-%m-%d %H:%M:%S'),location)) plt.savefig('%s_qa.pdf' % eventid)
def main(args): #read in global config file configfile = os.path.join(os.path.expanduser('~'),'.lsprocess','lsprocess.cfg') hasconfig = os.path.isfile(configfile) if not hasconfig: print() print('No config file "%s" found.' % configfile) print() sys.exit(1) global_grids,outfolder = readConfig(configfile) #returns a dictionary just like global_config above #read in event specific grid file try: covdict,predictors,ename = parseEvent(args.eventfile) except Exception as msg: print('There is something wrong with your event file. See errors below.') print(msg) sys.exit(1) #construct output folder from global/event configs outfolder = os.path.join(outfolder,ename) if not os.path.isdir(outfolder): os.mkdir(outfolder) #look for bounding box and resolution in event config file, or get from shakemap bbox = None shakemap = ShakeGrid(predictors['shakemap'][0],'MMI') if 'bbox' in covdict: bbox = covdict['bbox'] else: #bbox = shakemap.getRange() #default to the bounding box of the coverage data with fiona.open(covdict['filename']) as src: tbbox = src.bounds bbox = (tbbox[0],tbbox[2],tbbox[1],tbbox[3]) if 'resolution' in covdict: resolution = covdict['resolution'] else: resolution = shakemap.getGeoDict()['xdim'] #get input coverage projection from event config OR from .prj file #projstr = covdict['projstr'] #get format of coverage, check against list of supported fiona formats, read in data #we'll do other support later #if necessary, project coverage into lat/lon #skip projection for now as well #determine what the grid shape and (potentially) new bbox is given bbox and resolution nrows,ncols,bbox = getShape(bbox,resolution) #if the coverage dataset is larger than the ShakeMap, we need to make sure our output grid #is contained by the shakemap for interpolation purposes. shakebounds = shakemap.getRange() shakexdim,shakeydim = (shakemap.geodict['xdim'],shakemap.geodict['ydim']) xmin = max(bbox[0],shakebounds[0]+shakexdim*2) xmax = min(bbox[1],shakebounds[1]-shakexdim*2) ymin = max(bbox[2],shakebounds[2]+shakeydim*2) ymax = min(bbox[3],shakebounds[3]-shakeydim*2) geodict = {'xdim':resolution,'ydim':resolution, 'xmin':xmin,'xmax':xmax, 'ymin':ymin,'ymax':ymax, 'nrows':nrows,'ncols':ncols} #rasterize projected coverage defined bounding box and resolution shpfile = covdict['filename'] print('Creating coverage grid...') covgrid = makeCoverageGrid(shpfile,geodict) outgridfile = os.path.join(outfolder,'coverage.grd') print('Saving coverage to %s...' % outgridfile) covgrid.save(outgridfile) #make a grid of lat,lon values row = np.arange(0,nrows) col = np.arange(0,ncols) rows = repmat(row,ncols,1).T cols = repmat(col,nrows,1) lat,lon = covgrid.getLatLon(rows,cols) #create a list of arrays that we'll dump out to a text file when done vardict = {} vardict['coverage'] = covgrid.griddata.flatten() vardict['lat'] = lat.flatten() vardict['lon'] = lon.flatten() #subset shakemap and global grids using defined bounding box and resolution shakefile = predictors['shakemap'][0] variables = predictors['shakemap'][1] for var in variables: shakemap = ShakeGrid(shakefile,var.upper()) shakemap.interpolateToGrid(geodict) gmtshake = GMTGrid() gmtshake.geodict = shakemap.geodict gmtshake.griddata = shakemap.griddata outshakefile = os.path.join(outfolder,'%s.grd' % var) print('Saving %s to %s...' % (var,outshakefile)) gmtshake.save(outshakefile) vardict[var] = gmtshake.griddata.flatten() #write netcdf versions of coverage, shakemap, and global grids to output folder for gridname,gridfile in global_grids.items(): if not os.path.isfile(gridfile): pass try: grid = sampleGrid(gridfile,geodict) except Exception as e: print('There was an error while sampling the "%s" grid "%s". - "%s"' % (gridname,gridfile,str(e))) outgridfile = os.path.join(outfolder,gridname+'.grd') print('Saving %s to %s...' % (gridname,outgridfile)) grid.save(outgridfile) vardict[gridname] = grid.griddata.flatten() #create text file with columns of data for all predictor variables firstcols = ['lat','lon','coverage'] outmat = np.zeros((nrows*ncols,len(vardict))) for i in range(0,len(firstcols)): col = firstcols[i] outmat[:,i] = vardict[col] colidx = i+1 colnames = [] for col,column in vardict.items(): if col in firstcols: continue outmat[:,colidx] = vardict[col] colnames.append(col) colidx += 1 colnames = firstcols + colnames m,n = outmat.shape datfile = os.path.join(outfolder,'%s.dat' % ename) print('Saving all variables to data file %s...' % datfile) f = open(datfile,'wt') f.write(','.join(colnames)+'\n') for i in range(0,m): line = ','.join('%.4f' % col for col in outmat[i,:]) f.write(line+'\n') f.close()
def main(args): globaldict = getGlobalConfig() shakehome = globaldict['shakehome'] popfile = globaldict['popfile'] if shakehome is None: print 'Cannot find ShakeMap home folder on this system.' sys.exit(1) datadir = os.path.join(shakehome,'data',args.event) if not os.path.isdir(datadir): print 'Cannot find event %s on the system' % args.event sys.exit(1) #Make sure the timeoutput folder is there (can't put our time grids in output - that gets #wiped out every time shakemap runs outfolder = os.path.join(datadir,'timeoutput') if not os.path.isdir(outfolder): os.makedirs(outfolder) #now look for config file in top-level folder configfile = os.path.join(datadir,'alert.conf') if not os.path.isfile(configfile): print 'Cannot find alert config file for %s in the data directory' % args.event sys.exit(1) config = ConfigParser.ConfigParser() config.readfp(open(configfile)) #get the bounds of the map so we can find cities xmin = float(config.get('MAP','xmin')) xmax = float(config.get('MAP','xmax')) ymin = float(config.get('MAP','ymin')) ymax = float(config.get('MAP','ymax')) citylist = getCityList(xmin,xmax,ymin,ymax,globaldict['cityfile']) #Get the MMI threshold below which alert times will NOT be saved mmithresh = float(config.get('MAP','mmithresh')) #get the array of epicenters lats = [float(p) for p in config.get('FAULT','lats').split()] lons = [float(p) for p in config.get('FAULT','lons').split()] #write out a new grind.conf file writeGrind(config,datadir) #instantiate our p/s travel time calculator calc = TravelTimeCalculator() #where is the grind binary? grindbin = os.path.join(shakehome,'bin','grind') #specify the event.xml file, get the depth of the event eventfile = os.path.join(datadir,'input','event.xml') root = parse(eventfile) eq = root.getElementsByTagName('earthquake')[0] depth = float(eq.getAttribute('depth')) root.unlink() #get the dimensionality of the grid file and of the pop grid we'll interpolate to gridfile = os.path.join(datadir,'output','grid.xml') if not os.path.isfile(gridfile): grindcmd = '%s -event %s' % (grindbin,args.event) res,stdout,stderr = getCommandOutput(grindcmd) mmigrid = ShakeGrid(gridfile,variable='MMI') popgrid = EsriGrid(popfile) popgrid.load(bounds=mmigrid.getRange()) m,n = popgrid.griddata.shape #loop over all the event realizations timefiles = [] timestack = np.zeros((m,n,len(lats)),dtype=np.float32) for i in range(0,len(lats)): print 'Calculating arrival times for scenario %i of %i' % (i+1,len(lats)) lat = lats[i] lon = lons[i] if i == 0: lonoff = 0 latoff = 0 else: lonoff = -1* (lons[i] - lons[i-1]) latoff = lats[i] - lats[i-1] #modify the event.xml file to have the new lat/lon epicenter sourcetext = getEventText(eventfile,lat,lon) f = open(eventfile,'wt') f.write(sourcetext) f.close() sdict = getSlowestStation(lat,lon,depth,calc) ptime = sdict['time'] stationlat = sdict['lat'] stationlon = sdict['lon'] grindcmd = '%s -latoff %f -lonoff %f -event %s' % (grindbin,latoff,lonoff,args.event) res,stdout,stderr = getCommandOutput(grindcmd) if not res: print 'Grind command failed: "%s", "%s"' % (stdout,stderr) sys.exit(1) #Get the grid.xml output, do some time calculations mmigrid = ShakeGrid(gridfile,variable='MMI') timegrid = np.zeros((m,n),dtype=np.float32) for row in range(0,m): for col in range(0,n): mmilat,mmilon = mmigrid.getLatLon(row,col) distance = locations2degrees(lat,lon,mmilat,mmilon) tmp,stime = calc.getTravelTimes(distance,depth) timegrid[row,col] = stime - ptime #debugging f = plt.figure() plt.subplot(2,1,1) plt.imshow(mmigrid.griddata) plt.colorbar() plt.subplot(2,1,2) plt.imshow(timegrid) plt.colorbar() plt.savefig(os.path.join(outfolder,'timegrid.png')) plt.close(f) with warnings.catch_warnings(): warnings.simplefilter("ignore") exposure,timegrid = getTimeExposure(timegrid,mmigrid,popfile,mmithresh) print 'Population Warning Times for epicenter %.4f,%.4f' % (lat,lon) printExposure(exposure) expofile = os.path.join(outfolder,'expo%03i.json' % (i+1)) f = open(expofile,'wt') f.write(json.dumps(exposure)) f.close() timefile = os.path.join(outfolder,'timegrid%03i.flt' % (i+1)) timefiles.append(timefile) metadict = {'epilat':lat,'epilon':lon,'eventid':args.event} saveTimeGrid(timefile,timegrid,mmigrid.geodict,metadict) timestack[:,:,i] = timegrid alertgrid = popgrid alertgrid.griddata = timegrid makeMap(alertgrid,'alertmap_%i' % i,outfolder,popfile,globaldict['popcolormap'],sdict,citylist,[lat],[lon]) methods = config.get('MAP','output').split(',') for method in methods: if method == 'median': statgrid = np.median(timestack,axis=2) if method == 'mean': statgrid = np.nanmean(timestack,axis=2) if method == 'min': statgrid = np.nanmin(timestack,axis=2) if method == 'max': statgrid = np.nanmax(timestack,axis=2) timegrid = popgrid timegrid.griddata = statgrid makeMap(timegrid,method,outfolder,popfile,globaldict['popcolormap'],sdict,citylist,lats,lons)
def main(args, config): eventid = args.eventID shakehome = config.get('SHAKEMAP', 'shakehome') xmlfile = os.path.join(shakehome, 'data', eventid, 'input', args.dataFile) gridfile = os.path.join(shakehome, 'data', eventid, 'output', 'grid.xml') #list of grid.xml variable names and corresponding data file variable names variables = [('PGA', 'acc'), ('PGV', 'vel'), ('PSA03', 'psa03'), ('PSA10', 'psa10'), ('PSA30', 'psa30')] shakemap = ShakeGrid(gridfile, variable='MMI') #doesn't matter gdict = shakemap.getGeoDict() atts = shakemap.getAttributes() location = atts['event']['event_description'] etime = atts['event']['event_timestamp'] epilat = atts['event']['lat'] epilon = atts['event']['lon'] nrows = gdict['nrows'] ncols = gdict['ncols'] root = minidom.parse(xmlfile) f = plt.figure(figsize=(8.5, 11)) pnum = 1 pgaobs = [] pgaexp = [] pgadist = [] for vartuple in variables: gridvar, stationvar = vartuple shakemap = ShakeGrid(gridfile, variable=gridvar) stations = root.getElementsByTagName('station') observed = [] expected = [] for i in range(0, len(stations)): station = stations[i] lat = float(station.getAttribute('lat')) lon = float(station.getAttribute('lon')) row, col = shakemap.getRowCol(lat, lon) if row < 0 or row > nrows or col < 0 or col > ncols: continue pgael = station.getElementsByTagName( 'comp')[0].getElementsByTagName(stationvar)[0] pga = float(pgael.getAttribute('value')) gridpga = shakemap.getValue(lat, lon) observed.append(pga) expected.append(gridpga) if gridvar == 'PGA': pgaobs.append(pga) pgaexp.append(gridpga) distance, az1, az2 = gps2DistAzimuth(epilat, epilon, lat, lon) pgadist.append(distance / 1000.0) observed = np.array(observed) expected = np.array(expected) xmax = observed.max() ymax = expected.max() dmax = max(xmax, ymax) * 1.05 v = [0, dmax, 0, dmax] plt.subplot(3, 2, pnum) plt.plot(observed, expected, 'b.') plt.xlabel('Observed %s' % gridvar) plt.ylabel('Modeled %s' % gridvar) plt.axis(v) pnum += 1 #Add in one final plot - pga differences vs distance, just to see if that's a factor pgaobs = np.array(pgaobs) pgaexp = np.array(pgaexp) pgadist = np.array(pgadist) pgadiff = np.power((pgaobs - pgaexp), 2) mdiff = np.mean(pgadiff) stddiff = np.std(pgadiff) ymax = mdiff + 2 * stddiff plt.subplot(3, 2, 6) plt.plot(pgadist, pgadiff, 'b.') plt.ylabel('pga diff (squared)') plt.xlabel('Distance (km)') plt.axis([0, pgadist.max(), 0, ymax]) f.suptitle('Event %s %s - %s' % (eventid, etime.strftime('%Y-%m-%d %H:%M:%S'), location)) plt.savefig('%s_qa.pdf' % eventid)
#read in event specific grid file try: covdict,predictors,ename = parseEvent(args.eventfile) except Exception,msg: print 'There is something wrong with your event file. See errors below.' print msg sys.exit(1) #construct output folder from global/event configs outfolder = os.path.join(outfolder,ename) if not os.path.isdir(outfolder): os.mkdir(outfolder) #look for bounding box and resolution in event config file, or get from shakemap bbox = None shakemap = ShakeGrid(predictors['shakemap'][0],'MMI') if covdict.has_key('bbox'): bbox = covdict['bbox'] else: #bbox = shakemap.getRange() #default to the bounding box of the coverage data with fiona.open(covdict['filename']) as src: tbbox = src.bounds bbox = (tbbox[0],tbbox[2],tbbox[1],tbbox[3]) if covdict.has_key('resolution'): resolution = covdict['resolution'] else: resolution = shakemap.getGeoDict()['xdim'] #get input coverage projection from event config OR from .prj file
from Correlation.loop import main from Correlation.realizations import realizations from Correlation.plotting import plot voi = 'PGA' r = [45] num_realizations = 100 corr_model = 'JB2009' vscorr = True plot_on = False for R in range(0, np.size(r)): radius = r[R] # Get shakemap for desired variable, PGA, uncertainty grid and stationdata shakemap = ShakeGrid('Inputs/grid.xml', variable='%s' % voi) # Uncertainty Data: Units in ln(pctg) unc_INTRA = ShakeGrid('Inputs/uncertainty.xml', variable='GMPE_INTRA_STD%s' % voi) unc_INTER = ShakeGrid('Inputs/uncertainty.xml', variable='GMPE_INTER_STD%s' % voi) # Station Data: Units in pctg stationlist = 'Inputs/stationlist.xml' stationdata = readStation(stationlist) print 'Calling initialize' variables = initialize(shakemap, unc_INTRA, unc_INTRA, stationdata) print 'Radius: ', radius
def getShakeMapParams(self,config,shakefile,geodict): #get the shakemap params the user wants smparams = config.get('SHAKEMAP','variables').split(',') #load the shakemap intset = set(SM_TERMS).intersection(smparams) if not len(intset): print 'The allowed ShakeMap variables are: "%s". Your config file has "%s".' % (str(SM_TERMS),str(smparams)) sys.exit(1) shakedict = {} shakemap = None if 'PGA' in smparams: shakemap = ShakeGrid(shakefile,variable='PGA') shakemap.interpolateToGrid(geodict) tmpgrid = gmt.GMTGrid() tmpgrid.loadFromGrid(shakemap) shakedict['PGA'] = tmpgrid if 'PGV' in smparams: shakemap = ShakeGrid(shakefile,variable='PGV') shakemap.interpolateToGrid(geodict) tmpgrid = gmt.GMTGrid() tmpgrid.loadFromGrid(shakemap) shakedict['PGV'] = tmpgrid if 'MMI' in smparams: shakemap = ShakeGrid(shakefile,variable='MMI') shakemap.interpolateToGrid(geodict) tmpgrid = gmt.GMTGrid() tmpgrid.loadFromGrid(shakemap) shakedict['MMI'] = tmpgrid if 'MW' in smparams: if shakemap is None: shakemap = ShakeGrid(shakefile,variable='MMI') attdict = shakemap.getAttributes() shakedict['MW'] = attdict['event']['magnitude'] return (shakedict)
def main(args): #define location for config file homedir = os.path.expanduser("~") #where is the user's home directory? configfile = args.configFile shakefile = args.shakefile if not os.path.isfile(shakefile): if isURL(shakefile): shakefile = getGridURL(shakefile) #returns a file object else: print 'Could not find "%s" as a file or a url. Returning.' % (shakefile) shakemap = ShakeGrid(shakefile) #figure out the bounds that are greater than the biggest bounds #of any of the grids shakerange = shakemap.getRange() lonrange = shakerange[1] - shakerange[0] latrange = shakerange[3] - shakerange[2] xmin = shakerange[0] - lonrange*0.1 xmax = shakerange[1] + lonrange*0.1 ymin = shakerange[2] - latrange*0.1 ymax = shakerange[3] + latrange*0.1 bigbounds = (xmin,xmax,ymin,ymax) # shakeheader = shakemap.getAttributes() edict = {'mag':shakeheader['event']['magnitude'], 'time':shakeheader['event']['event_timestamp'], 'loc':shakeheader['event']['event_description'], 'epicenter':(shakeheader['event']['lat'],shakeheader['event']['lon']), 'version':int(shakeheader['shakemap_grid']['shakemap_version']), 'eventid':shakeheader['shakemap_grid']['event_id']} config = ConfigParser.RawConfigParser() config.read(configfile) network = shakeheader['shakemap_grid']['shakemap_originator'] eventcode = shakeheader['shakemap_grid']['shakemap_id'] if eventcode.startswith(network): eventid = eventcode else: eventid = network + eventcode outfolder = os.path.join(config.get('OUTPUT','folder'),eventid) if not os.path.isdir(outfolder): os.makedirs(outfolder) slopefile = config.get('MAPDATA','slope') slopegrid = GMTGrid(slopefile,bounds=shakemap.getRange()) slopeout = os.path.join(outfolder,'slope.grd') cityfile = config.get('MAPDATA','cityfile') #get all of the colors that people want colors = {} for option in config.options('MAPDATA'): if option.endswith('color'): colors[option] = config.get('MAPDATA',option) #if they have roads configured, go find the appropriate roads segments hasRoads = config.has_option('MAPDATA','roadfolder') roadslist = [] if hasRoads and args.roads: roadroot = config.get('MAPDATA','roadfolder') xmin,xmax,ymin,ymax = shakemap.getRange() for folder in os.listdir(roadroot): roadfolder = os.path.join(roadroot,folder) shpfiles = glob.glob(os.path.join(roadfolder,'*.shp')) if len(shpfiles): shpfile = shpfiles[0] f = fiona.open(shpfile) shapes = list(f.items(bbox=(xmin,ymin,xmax,ymax))) for shapeid,shapedict in shapes: roadslist.append(shapedict) f.close() #get the thresholds for liquefaction/landslide model slopemin = float(config.get('MAPDATA','slopemin'))*100 slopemax = float(config.get('MAPDATA','slopemax'))*100 probdict = {} gridbounds = [999,-999,999,-999] #this will hold the smallest bounding box enclosing both models for model in getModelNames(configfile): lm = LogisticModel(configfile,shakefile,model) colormaps = getColorMaps(configfile) print 'Equation for %s model:' % model print print lm.getEquation() print P = lm.calculate() probgrid = GMTGrid() probgrid.griddata = P.copy() probgrid.geodict = lm.layerdict[lm.layerdict.keys()[0]].geodict.copy() #resample the slope grid to model slopegrid2 = GMTGrid() slopegrid2.loadFromGrid(slopegrid) slopegrid2.interpolateToGrid(probgrid.geodict) if model == 'liquefaction': ithresh = slopegrid2.griddata > slopemax else: ithresh = slopegrid2.griddata < slopemin probgrid.griddata[ithresh] = 0.0 xmin,xmax,ymin,ymax = probgrid.getRange() if xmin < gridbounds[0]: gridbounds[0] = xmin if xmax > gridbounds[1]: gridbounds[1] = xmax if ymin < gridbounds[2]: gridbounds[2] = ymin if ymax > gridbounds[3]: gridbounds[3] = ymax probdict[model] = probgrid probfile = os.path.join(outfolder,'%s.grd' % model) print 'Saving %s model output to %s' % (model,probfile) probgrid.save(probfile) #renderPanel(lm,colormaps,outfolder,edict) # for layername,layergrid in lm.layerdict.iteritems(): # layerfile = os.path.join(outfolder,layername+'.grd') # print 'Saving input grid %s to %s...' % (layername,layerfile) # layergrid.save(layerfile) # renderLayer(layergrid,layername,outfolder,edict,model,colormaps) topofile = config.get('MAPDATA','topo') #bigbounds = shakemap.getRange() xdim = shakemap.geodict['xdim'] ydim = shakemap.geodict['xdim'] #bigbounds = (bigbounds[0]-xdim*4,bigbounds[1]+xdim*4,bigbounds[2]-ydim*4,bigbounds[3]+ydim*4) topogrid = GMTGrid(topofile,bounds=bigbounds) topogrid = adjustTopoGrid(topogrid,bigbounds) #make this grid as big as bigbounds if we hit an upper or lower bound topoout = os.path.join(outfolder,'topography.grd') print 'Saving topography to %s' % topoout topogrid.save(topoout) print 'Saving slope to %s' % slopeout slopegrid.save(slopeout) isScenario = shakeheader['shakemap_grid']['shakemap_event_type'].lower() == 'scenario' if args.noscenario: isScenario = False timestr = renderDate(shakeheader['event']['event_timestamp']) location = shakeheader['event']['event_description'] #hillshfile = config.get('MAPDATA','hillshadefile') #hillshgrid = GMTGrid(hillshfile,bounds=bigbounds) makeDualMap(probdict['liquefaction'],probdict['landslide'],topogrid,slopegrid,edict,outfolder,isScenario=isScenario,roadslist=roadslist,colors=colors,cityfile=cityfile)