コード例 #1
0
ファイル: travel.py プロジェクト: mhearne-usgs/alertmap
def readTimeGrid(timefile):
    stkeys = ['TOTALROWBYTES','NBITS','LAYOUT','YDIM','NCOLS',
              'BANDROWBYTES','PIXELTYPE','XDIM','NROWS',
              'NBANDS','ULXMAP','ULYMAP','BYTEORDER']
    src = rasterio.open(timefile,'r',driver='EHdr')
    timedata, = src.read()
    m,n = timedata.shape
    aff = src.affine
    xdim = aff[0]
    xmin = aff[2]
    ydim = -aff[4]
    ymax = aff[5]
    src.close()
    timegrid = GMTGrid()
    timegrid.griddata = timedata
    timegrid.geodict = {'nrows':m,'ncols':n,'nbands':1,'bandnames':['Alert Time'],
                        'xmin':xmin,'xmax':xmin+n*xdim,'ymin':ymax-m*ydim,'ymax':ymax,
                        'xdim':xdim,'ydim':ydim}
    
    timepath,timefile = os.path.split(timefile)
    timebase,timext = os.path.splitext(timefile)
    timehdr = os.path.join(timepath,timebase+'.hdr')
    timedict = readTimeHeader(timehdr)
    for key in stkeys:
        timedict.pop(key)
    for key,value in timedict.iteritems():
        if isinstance(value,str):
            timedict[key] = value.replace('"','')
    return (timegrid,timedict)
コード例 #2
0
ファイル: map.py プロジェクト: kallstadt-usgs/secondary
def renderPanel(logmodel, colormaps, outfolder, edict):
    nparray = "<type 'numpy.ndarray'>"
    # first, figure out how many layers we have
    layerdict = logmodel.layerdict
    outfiles = []
    for smterm in model.SM_TERMS:
        for term in logmodel.terms.values():
            if term.find(smterm) > -1 and not isinstance(logmodel.shakedict[smterm], float):
                layerdict[smterm] = logmodel.shakedict[smterm]

    for layername, layergrid in layerdict.iteritems():
        fig = plt.figure(figsize=(8, 8))
        ax = plt.gca()
        renderLayer(layername, layergrid, outfolder, edict, fig, ax, logmodel.model, colormaps)
        outfile = os.path.join(outfolder, "%s_%s.pdf" % (layername, logmodel.model))
        print "Saving input layer %s to %s" % (layername, outfile)
        plt.savefig(outfile)
        outfiles.append(outfile)

    outfile = os.path.join(outfolder, "%s_model.pdf" % logmodel.model)
    fig = plt.figure(figsize=(8, 8))
    ax = plt.gca()
    P = logmodel.calculate()
    pgrid = GMTGrid()
    pgrid.griddata = P.copy()
    pgrid.geodict = layergrid.geodict.copy()
    renderLayer(logmodel.model, pgrid, outfolder, edict, fig, ax, logmodel.model, colormaps)
    print "Saving %s model to %s" % (logmodel.model, outfile)
    outfiles.append(outfile)
    return outfiles
コード例 #3
0
ファイル: lsprocess.py プロジェクト: mhearne-usgs/lsprocess
def makeCoverageGrid(covshp,geodict):
    shapes = fiona.open(covshp)
    geoms = []
    for shape in shapes:
        geoms.append(shape['geometry'])
    shapes.close()
    outshape = (geodict['nrows'],geodict['ncols'])
    transform = Affine.from_gdal(geodict['xmin'],geodict['xdim'],0.0,geodict['ymax'],0.0,-geodict['ydim'])
    img = features.rasterize(geoms,out_shape=outshape,fill=0,
                             transform=transform,all_touched=True,
                             default_value=1)
    covgrid = GMTGrid()
    covgrid.geodict = geodict
    covgrid.griddata = np.int8(img.copy())
    return covgrid
コード例 #4
0
ファイル: alertmap.py プロジェクト: ibrahim85/alertmap
def getTimeExposure(timegriddata,mmigrid,popfile,mmithresh):
    timegrid = GMTGrid()
    timegrid.griddata = timegriddata.copy()
    timegrid.geodict = mmigrid.geodict.copy()
    popgrid = EsriGrid(popfile)
    popgrid.load(bounds=timegrid.getRange())
    timegrid.interpolateToGrid(popgrid.geodict)
    timegrid.griddata[mmigrid.griddata < mmithresh] = np.NaN
    times = np.arange(MINTIME,MAXTIME+DTIME,DTIME)
    exposure = []
    mintime = MINTIME
    ireal = np.isfinite(timegrid.griddata)
    for time in times[1:]:
        ipop = ((timegrid.griddata >= mintime) & (timegrid.griddata < time) & np.isfinite(timegrid.griddata))
        exposum = int(np.sum(popgrid.griddata[ipop]))
        exposure.append({'mintime':mintime,'maxtime':time,'exposure':exposum})
        mintime = time
    return (exposure,timegrid.griddata)
コード例 #5
0
ファイル: travel.py プロジェクト: ibrahim85/alertmap
def readTimeGrid(timefile):
    stkeys = [
        'TOTALROWBYTES', 'NBITS', 'LAYOUT', 'YDIM', 'NCOLS', 'BANDROWBYTES',
        'PIXELTYPE', 'XDIM', 'NROWS', 'NBANDS', 'ULXMAP', 'ULYMAP', 'BYTEORDER'
    ]
    src = rasterio.open(timefile, 'r', driver='EHdr')
    timedata, = src.read()
    m, n = timedata.shape
    aff = src.affine
    xdim = aff[0]
    xmin = aff[2]
    ydim = -aff[4]
    ymax = aff[5]
    src.close()
    timegrid = GMTGrid()
    timegrid.griddata = timedata
    timegrid.geodict = {
        'nrows': m,
        'ncols': n,
        'nbands': 1,
        'bandnames': ['Alert Time'],
        'xmin': xmin,
        'xmax': xmin + n * xdim,
        'ymin': ymax - m * ydim,
        'ymax': ymax,
        'xdim': xdim,
        'ydim': ydim
    }

    timepath, timefile = os.path.split(timefile)
    timebase, timext = os.path.splitext(timefile)
    timehdr = os.path.join(timepath, timebase + '.hdr')
    timedict = readTimeHeader(timehdr)
    for key in stkeys:
        timedict.pop(key)
    for key, value in timedict.iteritems():
        if isinstance(value, str):
            timedict[key] = value.replace('"', '')
    return (timegrid, timedict)
コード例 #6
0
ファイル: lsprocess.py プロジェクト: mhearne-usgs/lsprocess
def main(args):
    #read in global config file
    configfile = os.path.join(os.path.expanduser('~'),'.lsprocess','lsprocess.cfg')
    hasconfig = os.path.isfile(configfile)
    if not hasconfig:
        print()
        print('No config file "%s" found.' % configfile)
        print()
        sys.exit(1)
    global_grids,outfolder = readConfig(configfile) #returns a dictionary just like global_config above
    
    
    #read in event specific grid file
    try:
        covdict,predictors,ename = parseEvent(args.eventfile)
    except Exception as msg:
        print('There is something wrong with your event file.  See errors below.')
        print(msg)
        sys.exit(1)
    
    #construct output folder from global/event configs
    outfolder = os.path.join(outfolder,ename)
    if not os.path.isdir(outfolder):
        os.mkdir(outfolder)
    
    #look for bounding box and resolution in event config file, or get from shakemap
    bbox = None
    shakemap = ShakeGrid(predictors['shakemap'][0],'MMI')
    if 'bbox' in covdict:
        bbox = covdict['bbox']
    else:
        #bbox = shakemap.getRange()
        #default to the bounding box of the coverage data
        with fiona.open(covdict['filename']) as src:
            tbbox = src.bounds
            bbox = (tbbox[0],tbbox[2],tbbox[1],tbbox[3])
            
    if 'resolution' in covdict:
        resolution = covdict['resolution']
    else:
        resolution = shakemap.getGeoDict()['xdim']
    
    #get input coverage projection from event config OR from .prj file
    #projstr = covdict['projstr']
    
    #get format of coverage, check against list of supported fiona formats, read in data
    #we'll do other support later
    
    #if necessary, project coverage into lat/lon
    #skip projection for now as well

    #determine what the grid shape and (potentially) new bbox is given bbox and resolution
    nrows,ncols,bbox = getShape(bbox,resolution)
    #if the coverage dataset is larger than the ShakeMap, we need to make sure our output grid
    #is contained by the shakemap for interpolation purposes.
    shakebounds = shakemap.getRange()
    shakexdim,shakeydim = (shakemap.geodict['xdim'],shakemap.geodict['ydim'])
    xmin = max(bbox[0],shakebounds[0]+shakexdim*2)
    xmax = min(bbox[1],shakebounds[1]-shakexdim*2)
    ymin = max(bbox[2],shakebounds[2]+shakeydim*2)
    ymax = min(bbox[3],shakebounds[3]-shakeydim*2)
    geodict = {'xdim':resolution,'ydim':resolution,
               'xmin':xmin,'xmax':xmax,
               'ymin':ymin,'ymax':ymax,
               'nrows':nrows,'ncols':ncols}
    
    #rasterize projected coverage defined bounding box and resolution
    shpfile = covdict['filename']
    print('Creating coverage grid...')
    covgrid = makeCoverageGrid(shpfile,geodict)
    outgridfile = os.path.join(outfolder,'coverage.grd')
    print('Saving coverage to %s...' % outgridfile)
    covgrid.save(outgridfile)

    #make a grid of lat,lon values
    row = np.arange(0,nrows)
    col = np.arange(0,ncols)
    rows = repmat(row,ncols,1).T
    cols = repmat(col,nrows,1)
    lat,lon = covgrid.getLatLon(rows,cols)

    #create a list of arrays that we'll dump out to a text file when done
    vardict = {}
    vardict['coverage'] = covgrid.griddata.flatten()
    vardict['lat'] = lat.flatten()
    vardict['lon'] = lon.flatten()
        
    #subset shakemap and global grids using defined bounding box and resolution
    shakefile = predictors['shakemap'][0]
    variables = predictors['shakemap'][1]
    for var in variables:
        shakemap = ShakeGrid(shakefile,var.upper())
        shakemap.interpolateToGrid(geodict)
        gmtshake = GMTGrid()
        gmtshake.geodict = shakemap.geodict
        gmtshake.griddata = shakemap.griddata
        outshakefile = os.path.join(outfolder,'%s.grd' % var)
        print('Saving %s to %s...' % (var,outshakefile))
        gmtshake.save(outshakefile)
        vardict[var] = gmtshake.griddata.flatten()
        
    #write netcdf versions of coverage, shakemap, and global grids to output folder
    for gridname,gridfile in global_grids.items():
        if not os.path.isfile(gridfile):
            pass
        try:
            grid = sampleGrid(gridfile,geodict)
        except Exception as e:
            print('There was an error while sampling the "%s" grid "%s". - "%s"' % (gridname,gridfile,str(e)))
            
        outgridfile = os.path.join(outfolder,gridname+'.grd')
        print('Saving %s to %s...' % (gridname,outgridfile))
        grid.save(outgridfile)
        vardict[gridname] = grid.griddata.flatten()
        
    #create text file with columns of data for all predictor variables
    firstcols = ['lat','lon','coverage']
    outmat = np.zeros((nrows*ncols,len(vardict)))
    for i in range(0,len(firstcols)):
        col = firstcols[i]
        outmat[:,i] = vardict[col]
    colidx = i+1
    colnames = []
    for col,column in vardict.items():
        if col in firstcols:
            continue
        outmat[:,colidx] = vardict[col]
        colnames.append(col)
        colidx += 1

    colnames = firstcols + colnames
    m,n = outmat.shape
    datfile = os.path.join(outfolder,'%s.dat' % ename)
    print('Saving all variables to data file %s...' % datfile)
    f = open(datfile,'wt')
    f.write(','.join(colnames)+'\n')
    for i in range(0,m):
        line = ','.join('%.4f' % col for col in outmat[i,:])
        f.write(line+'\n')
    f.close()
コード例 #7
0
    #create a list of arrays that we'll dump out to a text file when done
    vardict = {}
    vardict['coverage'] = covgrid.griddata.flatten()
    vardict['lat'] = lat.flatten()
    vardict['lon'] = lon.flatten()
        
    #subset shakemap and global grids using defined bounding box and resolution
    shakefile = predictors['shakemap'][0]
    variables = predictors['shakemap'][1]
    for var in variables:
        shakemap = ShakeGrid(shakefile,var.upper())
        shakemap.interpolateToGrid(geodict)
        gmtshake = GMTGrid()
        gmtshake.geodict = shakemap.geodict
        gmtshake.griddata = shakemap.griddata
        outshakefile = os.path.join(outfolder,'%s.grd' % var)
        print 'Saving %s to %s...' % (var,outshakefile)
        gmtshake.save(outshakefile)
        vardict[var] = gmtshake.griddata.flatten()
        
    #write netcdf versions of coverage, shakemap, and global grids to output folder
    for gridname,gridfile in global_grids.iteritems():
        if not os.path.isfile(gridfile):
            pass
        try:
            grid = sampleGrid(gridfile,geodict)
        except Exception,e:
            print 'There was an error while sampling the "%s" grid "%s". - "%s"' % (gridname,gridfile,str(e))
            
        outgridfile = os.path.join(outfolder,gridname+'.grd')
コード例 #8
0
ファイル: sechaz.py プロジェクト: kallstadt-usgs/secondary
def main(args):
    #define location for config file
    homedir = os.path.expanduser("~") #where is the user's home directory?
    configfile = args.configFile
    
    shakefile = args.shakefile

    if not os.path.isfile(shakefile):
        if isURL(shakefile):
            shakefile = getGridURL(shakefile) #returns a file object
        else:
            print 'Could not find "%s" as a file or a url.  Returning.' % (shakefile)
    
    shakemap = ShakeGrid(shakefile)
    #figure out the bounds that are greater than the biggest bounds
    #of any of the grids
    shakerange = shakemap.getRange()
    lonrange = shakerange[1] - shakerange[0]
    latrange = shakerange[3] - shakerange[2]
    xmin = shakerange[0] - lonrange*0.1
    xmax = shakerange[1] + lonrange*0.1
    ymin = shakerange[2] - latrange*0.1
    ymax = shakerange[3] + latrange*0.1
    bigbounds = (xmin,xmax,ymin,ymax)
    #
    shakeheader = shakemap.getAttributes()
    edict = {'mag':shakeheader['event']['magnitude'],
             'time':shakeheader['event']['event_timestamp'],
             'loc':shakeheader['event']['event_description'],
             'epicenter':(shakeheader['event']['lat'],shakeheader['event']['lon']),
             'version':int(shakeheader['shakemap_grid']['shakemap_version']),
             'eventid':shakeheader['shakemap_grid']['event_id']}
    config = ConfigParser.RawConfigParser()
    config.read(configfile)
    network = shakeheader['shakemap_grid']['shakemap_originator']
    eventcode =  shakeheader['shakemap_grid']['shakemap_id']
    if eventcode.startswith(network):
        eventid = eventcode
    else:
        eventid = network + eventcode
    outfolder = os.path.join(config.get('OUTPUT','folder'),eventid)
    if not os.path.isdir(outfolder):
        os.makedirs(outfolder)

    slopefile = config.get('MAPDATA','slope')
    slopegrid = GMTGrid(slopefile,bounds=shakemap.getRange())
    slopeout = os.path.join(outfolder,'slope.grd')

    cityfile = config.get('MAPDATA','cityfile')
    
    #get all of the colors that people want
    colors = {}
    for option in config.options('MAPDATA'):
        if option.endswith('color'):
            colors[option] = config.get('MAPDATA',option)

    #if they have roads configured, go find the appropriate roads segments
    hasRoads = config.has_option('MAPDATA','roadfolder')
    roadslist = []
    if hasRoads and args.roads:
        roadroot = config.get('MAPDATA','roadfolder')
        xmin,xmax,ymin,ymax = shakemap.getRange()
        for folder in os.listdir(roadroot):
            roadfolder = os.path.join(roadroot,folder)
            shpfiles = glob.glob(os.path.join(roadfolder,'*.shp'))
            if len(shpfiles):
                shpfile = shpfiles[0]
                f = fiona.open(shpfile)
                shapes = list(f.items(bbox=(xmin,ymin,xmax,ymax)))
                for shapeid,shapedict in shapes:
                    roadslist.append(shapedict)
                f.close()

    #get the thresholds for liquefaction/landslide model
    slopemin = float(config.get('MAPDATA','slopemin'))*100
    slopemax = float(config.get('MAPDATA','slopemax'))*100
    
    probdict = {}
    gridbounds = [999,-999,999,-999] #this will hold the smallest bounding box enclosing both models
    for model in getModelNames(configfile):
        lm = LogisticModel(configfile,shakefile,model)
        colormaps = getColorMaps(configfile)
        print 'Equation for %s model:' % model
        print 
        print lm.getEquation()
        print
        P = lm.calculate()
        probgrid = GMTGrid()
        probgrid.griddata = P.copy()
        probgrid.geodict = lm.layerdict[lm.layerdict.keys()[0]].geodict.copy()

        #resample the slope grid to model
        slopegrid2 = GMTGrid()
        slopegrid2.loadFromGrid(slopegrid)
        slopegrid2.interpolateToGrid(probgrid.geodict)

        if model == 'liquefaction':
            ithresh = slopegrid2.griddata > slopemax
        else:
            ithresh = slopegrid2.griddata < slopemin

        probgrid.griddata[ithresh] = 0.0
        
        xmin,xmax,ymin,ymax = probgrid.getRange()
        if xmin < gridbounds[0]:
            gridbounds[0] = xmin
        if xmax > gridbounds[1]:
            gridbounds[1] = xmax
        if ymin < gridbounds[2]:
            gridbounds[2] = ymin
        if ymax > gridbounds[3]:
            gridbounds[3] = ymax
        probdict[model] = probgrid
        probfile = os.path.join(outfolder,'%s.grd' % model)
        print 'Saving %s model output to %s' % (model,probfile)
        probgrid.save(probfile)
        #renderPanel(lm,colormaps,outfolder,edict)
        # for layername,layergrid in lm.layerdict.iteritems():
        #     layerfile = os.path.join(outfolder,layername+'.grd')
        #     print 'Saving input grid %s to %s...' % (layername,layerfile)
        #     layergrid.save(layerfile)
        #     renderLayer(layergrid,layername,outfolder,edict,model,colormaps)

    topofile = config.get('MAPDATA','topo')
    #bigbounds = shakemap.getRange()
    xdim = shakemap.geodict['xdim']
    ydim = shakemap.geodict['xdim']
    #bigbounds = (bigbounds[0]-xdim*4,bigbounds[1]+xdim*4,bigbounds[2]-ydim*4,bigbounds[3]+ydim*4)
    topogrid = GMTGrid(topofile,bounds=bigbounds)
    topogrid = adjustTopoGrid(topogrid,bigbounds) #make this grid as big as bigbounds if we hit an upper or lower bound
    topoout = os.path.join(outfolder,'topography.grd')
    print 'Saving topography to %s' % topoout
    topogrid.save(topoout)
    
    print 'Saving slope to %s' % slopeout
    slopegrid.save(slopeout)

    isScenario = shakeheader['shakemap_grid']['shakemap_event_type'].lower() == 'scenario'
    if args.noscenario:
        isScenario = False
    timestr = renderDate(shakeheader['event']['event_timestamp'])
    location = shakeheader['event']['event_description']
    #hillshfile = config.get('MAPDATA','hillshadefile')
    #hillshgrid = GMTGrid(hillshfile,bounds=bigbounds)
    makeDualMap(probdict['liquefaction'],probdict['landslide'],topogrid,slopegrid,edict,outfolder,isScenario=isScenario,roadslist=roadslist,colors=colors,cityfile=cityfile)