コード例 #1
0
def getFileType(filename):
    """
    Determine whether input file is a shapefile or a grid (ESRI or GMT).

    Args:
        filename (str): Path to candidate filename.

    Returns:
        str: 'shapefile', 'grid', or 'unknown'.
    """
    # TODO MOVE TO MAPIO.
    if os.path.isdir(filename):
        return 'dir'
    ftype = GMTGrid.getFileType(filename)
    if ftype != 'unknown':
        return 'gmt'
    # Skip over ESRI header files
    if filename.endswith('.hdr'):
        return 'unknown'
    try:
        GDALGrid.getFileGeoDict(filename)
        return 'esri'
    except:
        pass
    return 'unknown'
コード例 #2
0
def sampleGridFile(gridfile, xypoints, method='nearest'):
    """Sample grid file (ESRI or GMT format) at each of a set of XY (decimal degrees) points.

    :param gridfile:
      Name of ESRI or GMT grid format file from which to sample values.
    :param xypoints:
      2D numpy array of XY points, decimal degrees.
    :param method:
      Interpolation method, either 'nearest' or 'linear'.
    :returns:
      1D numpy array of grid values at each of input XY points.
    """
    xmin = np.min(xypoints[:, 0])
    xmax = np.max(xypoints[:, 0])
    ymin = np.min(xypoints[:, 1])
    ymax = np.max(xypoints[:, 1])
    gridtype = None
    try:
        fdict = GMTGrid.getFileGeoDict(gridfile)
        gridtype = 'gmt'
    except Exception as error:
        try:
            fdict = GDALGrid.getFileGeoDict(gridfile)
            gridtype = 'esri'
        except:
            pass
    if gridtype is None:
        raise Exception(
            'File "%s" does not appear to be either a GMT grid or an ESRI grid.'
            % gridfile)
    xmin = xmin - fdict.dx * 3
    xmax = xmax + fdict.dx * 3
    ymin = ymin - fdict.dy * 3
    ymax = ymax + fdict.dy * 3
    #bounds = (xmin, xmax, ymin, ymax)
    if gridtype == 'gmt':
        fgeodict = GMTGrid.getFileGeoDict(gridfile)
    else:
        fgeodict = GDALGrid.getFileGeoDict(gridfile)
    dx, dy = (fgeodict.dx, fgeodict.dy)
    sdict = GeoDict.createDictFromBox(xmin, xmax, ymin, ymax, dx, dy)
    if gridtype == 'gmt':
        grid = GMTGrid.load(gridfile,
                            samplegeodict=sdict,
                            resample=False,
                            method=method,
                            doPadding=True)
    else:
        grid = GDALGrid.load(gridfile,
                             samplegeodict=sdict,
                             resample=False,
                             method=method,
                             doPadding=True)

    return sampleFromGrid(grid, xypoints)
コード例 #3
0
ファイル: grid2d_test.py プロジェクト: hschovanec-usgs/MapIO
def test_project():
    data = np.array([[0, 0, 1, 0, 0], [0, 0, 1, 0, 0], [1, 1, 1, 1, 1],
                     [0, 0, 1, 0, 0], [0, 0, 1, 0, 0]],
                    dtype=np.int32)
    geodict = {
        'xmin': 50,
        'xmax': 50.4,
        'ymin': 50,
        'ymax': 50.4,
        'dx': 0.1,
        'dy': 0.1,
        'nx': 5,
        'ny': 5
    }
    gd = GeoDict(geodict)
    grid = GDALGrid(data, gd)
    projstr = "+proj=utm +zone=40 +north +ellps=WGS84 +datum=WGS84 +units=m +no_defs "
    newgrid = grid.project(projstr, method='nearest')

    try:
        tdir = tempfile.mkdtemp()
        outfile = os.path.join(tdir, 'output.bil')
        grid.save(outfile)
        with rasterio.open(outfile) as src:
            aff = get_affine(src)
            data = src.read(1)
            src_crs = CRS().from_string(GeoDict.DEFAULT_PROJ4).to_dict()
            dst_crs = CRS().from_string(projstr).to_dict()
            nrows, ncols = data.shape
            left = aff.xoff
            top = aff.yoff
            right, bottom = aff * (ncols - 1, nrows - 1)
            dst_transform, width, height = calculate_default_transform(
                src_crs, dst_crs, ncols, nrows, left, bottom, right, top)
            destination = np.zeros((height, width))
            reproject(data,
                      destination,
                      src_transform=aff,
                      src_crs=src_crs,
                      dst_transform=dst_transform,
                      dst_crs=dst_crs,
                      src_nodata=src.nodata,
                      dst_nodata=np.nan,
                      resampling=Resampling.nearest)
            x = 1
    except:
        pass
    finally:
        shutil.rmtree(tdir)
コード例 #4
0
def getFileType(filename):
    if os.path.isdir(filename):
        return 'dir'
    ftype = GMTGrid.getFileType(filename)
    if ftype != 'unknown':
        return 'gmt'
    #skip over ESRI header files
    if filename.endswith('.hdr'):
        return 'unknown'
    try:
        GDALGrid.getFileGeoDict(filename)
        return 'esri'
    except:
        pass
    return 'unknown'
コード例 #5
0
def getFileType(filename):
    """
    Determine whether input file is a shapefile or a grid (ESRI or GMT).

    :param filename:
      String path to candidate filename.
    :returns:
      String, one of 'shapefile','grid','unknown'.
    """

    fname, fext = os.path.splitext(filename)
    dbf = fname + '.dbf'
    ftype = 'unknown'
    if os.path.isfile(dbf):
        ftype = 'shapefile'
    else:
        try:
            fdict = GMTGrid.getFileGeoDict(filename)
            ftype = 'grid'
        except Exception as error:
            try:
                fdict = GDALGrid.getFileGeoDict(filename)
                ftype = 'grid'
            except:
                pass
    return ftype
コード例 #6
0
def main():
    """
    Read in data and add comcat IDs, download rupture file if available.
    """
    Vs30grid = GDALGrid.load(VS30_FILE)
    slats = np.array(SHAKE_DF['sta_lat'])
    slons = np.array(SHAKE_DF['sta_lon'])

    new_vs30 = Vs30grid.getValue(slats, slons, method='nearest')

    fig, ax = plt.subplots(1)
    ax.loglog(SHAKE_DF['vs30'], new_vs30, 'ko', fillstyle='none')
    # ax.set(xscale="log", yscale="log")
    lim = [100, 2000]
    ax.plot(lim, lim, 'k--')
    ax.set_xlim(lim)
    ax.set_ylim(lim)
    ax.set_xlabel('Old Vs30')
    ax.set_ylabel('New Vs30')
    fig_path = os.path.join('..', 'figs', 'vs30_compare.png')
    fig.savefig(fig_path, dpi=300)

    n_nan = len(np.where(np.isnan(new_vs30))[0])
    print('There are %s nans.' % n_nan)

    SHAKE_DF['CA Vs30'] = new_vs30
    new_file = 'shakeGrid_add_vs30.csv'
    SHAKE_DF.to_csv(new_file, index=False)
コード例 #7
0
ファイル: sample.py プロジェクト: mhearne-usgs/lsprocess
def getNoDataGrid(predictors,xmin,xmax,ymin,ymax):
    txmin = xmin
    txmax = xmax
    tymin = ymin
    tymax = ymax
    mindx = 9999999999
    mindy = 9999999999
    #figure out bounds enclosing all files
    for predname,predfile in predictors.items():
        if not os.path.isfile(predfile):
            continue
        ftype = getFileType(predfile)
        if ftype == 'shapefile':
            f = fiona.open(predfile,'r')
            bxmin,bymin,bxmax,bymax = f.bounds
            f.close()
            if bxmin < txmin:
                txmin = bxmin
            if bxmax > txmax:
                txmax = bxmax
            if bymin < tymin:
                tymin = bymin
            if bymax > tymax:
                tymax = bymax
        elif ftype == 'grid':
            gridtype = getGridType(predfile)
            if gridtype is None:
                raise Exception('File "%s" does not appear to be either a GMT grid or an ESRI grid.' % gridfile)
            fdict = getFileGeoDict(predfile,gridtype)
            if fdict.dx < mindx:
                mindx = fdict.dx
            if fdict.dy < mindy:
                mindy = fdict.dy
            if fdict.xmin < txmin:
                txmin = fdict.xmin
            if fdict.xmax > txmax:
                txmax = txmax
            if fdict.ymin < tymin:
                tymin = tymin
            if fdict.ymax > tymax:
                tymax = tymax
    sdict = GeoDict.createDictFromBox(txmin,txmax,tymin,tymax,mindx,mindy)
    nanarray = np.zeros((sdict.ny,sdict.nx),dtype=np.int8)
    for predname,predfile in predictors.items():
        if not os.path.isfile(predfile):
            continue
        ftype = getFileType(predfile)
        if ftype == 'shapefile':
            shapes = list(fiona.open(predfile,'r'))
            grid = Grid2D.rasterizeFromGeometry(shapes,sdict)
        else:
            gridtype = getGridType(predfile)
            if gridtype == 'gmt':
                grid = GMTGrid.load(predfile,samplegeodict=sdict,resample=True,method='nearest',doPadding=True)
            else:
                grid = GDALGrid.load(predfile,samplegeodict=sdict,resample=True,method='nearest',doPadding=True)
        nangrid = np.isnan(grid.getData())
        nanarray = nanarray | nangrid
    nangrid = Grid2D(data=nanarray,geodict=sdict)
    return nangrid
コード例 #8
0
def warp(infile, outfile, s_srs, t_srs, method='bilinear', fmt='EHdr'):
    """
    Calls gdalwarp to reproject a raster
    USAGE s_srs, t_srs = warp(infile, outfile, s_srs, t_srs, method='bilinear',
    fmt='EHdr')

    :param infile: Single input file
    :param outfile: Single output file with extension
    :param s_srs: Projection of input file (EPSG or PROJ.4 string), if None,
      uses transverse mercator
    :param t_srs: Projection of output file (EPSG or PROJ.4 string), if None,
      uses transverse mercator
    :param method: method to use in warping, from gdalwarp's options,
      'bilinear', 'nearest' etc., bilinear should be used for slopes to avoid
      weird artifacts
    :param fmt: Format of output, 'EHdr' or 'GMT'
    :returns:
       s_srs: s_srs that was used
       t_srs: t_srs that was used
    """
    if s_srs is None or t_srs is None:
        temp, first_column_duplicated = GDALGrid.getFileGeoDict(infile)
        clat = temp.ymin + (temp.ymax - temp.ymin) / 2.0
        clon = temp.xmin + (temp.xmax - temp.xmin) / 2.0
    if s_srs is None:
        s_srs = '"+proj=tmerc +lat_0=%s +lon_0=%s +x_0=0 +y_0=0 +units=m +no_defs"' % (
            clat, clon)
    elif t_srs is None:
        t_srs = '"+proj=tmerc +lat_0=%s +lon_0=%s +x_0=0 +y_0=0 +units=m +no_defs"' % (
            clat, clon)
    build = 'gdalwarp -overwrite -s_srs %s -t_srs %s -r %s -of %s %s %s' % (
        s_srs, t_srs, method, fmt, infile, outfile)
    #run code
    os.system(build)
    return s_srs, t_srs
コード例 #9
0
def warp(infile, outfile, s_srs, t_srs, method='bilinear', fmt='EHdr'):
    """
    Call gdalwarp to reproject a raster
    USAGE s_srs, t_srs = warp(infile, outfile, s_srs, t_srs, method='bilinear',
    fmt='EHdr')
    :param infile: Single input file
    :param outfile: Single output file with extension
    :param s_srs: Projection of input file (EPSG or PROJ.4 string), if None, 
      uses transverse mercator
    :param t_srs: Projection of output file (EPSG or PROJ.4 string), if None, 
      uses transverse mercator
    :param method: method to use in warping, from gdalwarp's options, 
      'bilinear', 'nearest' etc., bilinear should be used for slopes to avoid 
      weird artifacts
    :param fmt: Format of output, 'EHdr' or 'GMT'
    :returns s_srs: s_srs that was used
    :returns t_srs: t_srs that was used
    """
    if s_srs is None or t_srs is None:
        temp = GDALGrid.getFileGeoDict(infile)
        clat = temp.ymin + (temp.ymax-temp.ymin)/2.0
        clon = temp.xmin + (temp.xmax-temp.xmin)/2.0
    if s_srs is None:
        s_srs = '"+proj=tmerc +lat_0=%s +lon_0=%s +x_0=0 +y_0=0 +units=m +no_defs"' % (clat, clon)
    elif t_srs is None:
        t_srs = '"+proj=tmerc +lat_0=%s +lon_0=%s +x_0=0 +y_0=0 +units=m +no_defs"' % (clat, clon)
    build = 'gdalwarp -overwrite -s_srs %s -t_srs %s -r %s -of %s %s %s' % (s_srs, t_srs, method, fmt, infile, outfile)
    #run code
    os.system(build)
    return s_srs, t_srs
コード例 #10
0
ファイル: sample.py プロジェクト: usgs/groundfailure
def getFileType(filename):
    """
    Determine whether input file is a shapefile or a grid (ESRI or GMT).

    :param filename:
      String path to candidate filename.
    :returns:
      String, one of 'shapefile','grid','unknown'.
    """

    fname, fext = os.path.splitext(filename)
    dbf = fname + '.dbf'
    ftype = 'unknown'
    if os.path.isfile(dbf):
        ftype = 'shapefile'
    else:
        try:
            fdict = GMTGrid.getFileGeoDict(filename)
            ftype = 'grid'
        except Exception as error:
            try:
                fdict = GDALGrid.getFileGeoDict(filename)
                ftype = 'grid'
            except:
                pass
    return ftype
コード例 #11
0
ファイル: sample.py プロジェクト: emthompson-usgs/lsprocess
def sampleGridFile(gridfile,xypoints,method='nearest'):
    """
    Sample grid file (ESRI or GMT format) at each of a set of XY (decimal degrees) points.
    :param gridfile:
      Name of ESRI or GMT grid format file from which to sample values.
    :param xypoints:
      2D numpy array of XY points, decimal degrees.
    :param method:
      Interpolation method, either 'nearest' or 'linear'.
    :returns:
      1D numpy array of grid values at each of input XY points.
    """
    xmin = np.min(xypoints[:,0])
    xmax = np.max(xypoints[:,0])
    ymin = np.min(xypoints[:,1])
    ymax = np.max(xypoints[:,1])
    gridtype = None
    try:
        fdict = GMTGrid.getFileGeoDict(gridfile)
        gridtype = 'gmt'
    except Exception,error:
        try:
            fdict = GDALGrid.getFileGeoDict(gridfile)
            gridtype = 'esri'
        except:
            pass
コード例 #12
0
ファイル: sites.py プロジェクト: ynthdhj/shakemap
    def _load(vs30File, samplegeodict=None, resample=False, method='linear',
              doPadding=False, padValue=np.nan):
        try:
            vs30grid = GMTGrid.load(vs30File,
                                    samplegeodict=samplegeodict,
                                    resample=resample,
                                    method=method,
                                    doPadding=doPadding,
                                    padValue=padValue)
        except Exception as msg1:
            try:
                vs30grid = GDALGrid.load(vs30File,
                                         samplegeodict=samplegeodict,
                                         resample=resample,
                                         method=method,
                                         doPadding=doPadding,
                                         padValue=padValue)
            except Exception as msg2:
                msg = 'Load failure of %s - error messages: "%s"\n "%s"' % (
                    vs30File, str(msg1), str(msg2))
                raise ShakeLibException(msg)

        if vs30grid.getData().dtype != np.float64:
            vs30grid.setData(vs30grid.getData().astype(np.float64))

        return vs30grid
コード例 #13
0
def _load(vs30File,
          samplegeodict=None,
          resample=False,
          method='linear',
          doPadding=False,
          padValue=np.nan):
    try:
        vs30grid = GMTGrid.load(vs30File,
                                samplegeodict=samplegeodict,
                                resample=resample,
                                method=method,
                                doPadding=doPadding,
                                padValue=padValue)
    except Exception as msg1:
        try:
            vs30grid = GDALGrid.load(vs30File,
                                     samplegeodict=samplegeodict,
                                     resample=resample,
                                     method=method,
                                     doPadding=doPadding,
                                     padValue=padValue)
        except Exception as msg2:
            msg = 'Load failure of %s - error messages: "%s"\n "%s"' % (
                vs30File, str(msg1), str(msg2))
            raise ShakeMapException(msg)

    if vs30grid.getData().dtype != np.float64:
        vs30grid.setData(vs30grid.getData().astype(np.float64))

    return vs30grid
コード例 #14
0
ファイル: sample.py プロジェクト: mhearne-usgs/lsprocess
def sampleGridFile(gridfile,xypoints,method='nearest'):
    """
    Sample grid file (ESRI or GMT format) at each of a set of XY (decimal degrees) points.
    :param gridfile:
      Name of ESRI or GMT grid format file from which to sample values.
    :param xypoints:
      2D numpy array of XY points, decimal degrees.
    :param method:
      Interpolation method, either 'nearest' or 'linear'.
    :returns:
      1D numpy array of grid values at each of input XY points.
    """
    if not len(xypoints):
        return np.array([])
    xmin = np.min(xypoints[:,0])
    xmax = np.max(xypoints[:,0])
    ymin = np.min(xypoints[:,1])
    ymax = np.max(xypoints[:,1])
    gridtype = None
    try:
        fdict,tmp = GMTGrid.getFileGeoDict(gridfile)
        gridtype = 'gmt'
    except Exception as error:
        try:
            fdict,tmp = GDALGrid.getFileGeoDict(gridfile)
            gridtype = 'esri'
        except:
            pass
    if gridtype is None:
        raise Exception('File "%s" does not appear to be either a GMT grid or an ESRI grid.' % gridfile)
    xmin = xmin - fdict.dx*3
    xmax = xmax + fdict.dx*3
    ymin = ymin - fdict.dy*3
    ymax = ymax + fdict.dy*3
    bounds = (xmin,xmax,ymin,ymax)
    if gridtype == 'gmt':
        fgeodict,tmp = GMTGrid.getFileGeoDict(gridfile)
    else:
        fgeodict,tmp = GDALGrid.getFileGeoDict(gridfile)
    dx,dy = (fgeodict.dx,fgeodict.dy)
    sdict = GeoDict.createDictFromBox(xmin,xmax,ymin,ymax,dx,dy)
    if gridtype == 'gmt':
        grid = GMTGrid.load(gridfile,samplegeodict=sdict,resample=True,method=method,doPadding=True)
    else:
        grid = GDALGrid.load(gridfile,samplegeodict=sdict,resample=True,method=method,doPadding=True)

    return sampleFromGrid(grid,xypoints)
コード例 #15
0
ファイル: grid2d_test.py プロジェクト: usgs/MapIO
def test_project():
    data = np.array([[0,0,1,0,0],
                     [0,0,1,0,0],
                     [1,1,1,1,1],
                     [0,0,1,0,0],
                     [0,0,1,0,0]],dtype=np.int32)
    geodict = {'xmin':50,'xmax':50.4,'ymin':50,'ymax':50.4,'dx':0.1,'dy':0.1,'nx':5,'ny':5}
    gd = GeoDict(geodict)
    grid = GDALGrid(data,gd)
    projstr = "+proj=utm +zone=40 +north +ellps=WGS84 +datum=WGS84 +units=m +no_defs "
    newgrid = grid.project(projstr,method='nearest')

    try:
        tdir = tempfile.mkdtemp()
        outfile = os.path.join(tdir,'output.bil')
        grid.save(outfile)
        with rasterio.open(outfile) as src:
            aff = src.transform
            data = src.read(1)
            src_crs = CRS().from_string(GeoDict.DEFAULT_PROJ4).to_dict()
            dst_crs = CRS().from_string(projstr).to_dict()
            nrows,ncols = data.shape
            left = aff.xoff
            top = aff.yoff
            right,bottom = aff * (ncols-1, nrows-1)
            dst_transform,width,height = calculate_default_transform(src_crs,dst_crs,
                                                                     ncols,nrows,
                                                                     left,bottom,
                                                                     right,top)
            destination = np.zeros((height,width))
            reproject(data,
                      destination,
                      src_transform=aff,
                      src_crs=src_crs,
                      dst_transform=dst_transform,
                      dst_crs=dst_crs,
                      src_nodata=src.nodata,
                      dst_nodata=np.nan,
                      resampling=Resampling.nearest)
            x = 1
    except:
        pass
    finally:
        shutil.rmtree(tdir)
コード例 #16
0
ファイル: sites.py プロジェクト: klin-usgs/shakemap
def _getFileGeoDict(fname):
    geodict = None
    try:
        geodict = GMTGrid.getFileGeoDict(fname)
    except Exception as msg1:
        try:
            geodict = GDALGrid.getFileGeoDict(fname)
        except Exception as msg2:
            msg = 'File geodict failure with %s - error messages: "%s"\n "%s"' % (fname,str(msg1),str(msg2))
            raise ShakeMapException(msg)
    return geodict
コード例 #17
0
ファイル: sample.py プロジェクト: mhearne-usgs/lsprocess
def getGridType(gridfile):
    gridtype = None
    try:
        fdict = GMTGrid.getFileGeoDict(gridfile)
        gridtype = 'gmt'
    except Exception as error:
        try:
            fdict = GDALGrid.getFileGeoDict(gridfile)
            gridtype = 'esri'
        except:
            pass
    return gridtype
コード例 #18
0
def _getFileGeoDict(fname):
    geodict = None
    try:
        geodict = GMTGrid.getFileGeoDict(fname)
    except Exception as msg1:
        try:
            geodict = GDALGrid.getFileGeoDict(fname)
        except Exception as msg2:
            msg = 'File geodict failure with %s - error messages: "%s"\n "%s"' % (
                fname, str(msg1), str(msg2))
            raise ShakeMapException(msg)
    return geodict
コード例 #19
0
def get_file_type(file):
    """Internal method to figure out which file type (GMT or GDAL) the population/country code 
    grid files are.
    """
    if GMTGrid.getFileType(file) == 'unknown':
        try:
            gdict = GDALGrid.getFileGeoDict(file)
            return GDALGrid
        except:
            pass
    else:
        return GMTGrid
    return None
コード例 #20
0
    def execute(self):
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)
        if container.getDataType() != 'grid':
            raise NotImplementedError('raster module can only operate on '
                                      'gridded data, not sets of points')

        # get the path to the products.conf file, load the config
        config_file = os.path.join(install_path, 'config', 'products.conf')
        config = ConfigObj(config_file)

        # create GIS-readable .flt files of imt and uncertainty
        self.logger.info('Creating GIS grids...')
        layers = config['products']['raster']['layers']
        for layer in layers:
            fileimt = oq_to_file(layer)
            imtdict = container.getIMTGrids(layer, 'Larger')
            mean_grid = imtdict['mean']
            std_grid = imtdict['std']
            mean_gdal = GDALGrid.copyFromGrid(mean_grid)
            std_gdal = GDALGrid.copyFromGrid(std_grid)
            mean_fname = os.path.join(datadir, '%s_mean.flt' % fileimt)
            std_fname = os.path.join(datadir, '%s_std.flt' % fileimt)
            self.logger.info('Saving %s...' % mean_fname)
            mean_gdal.save(mean_fname)
            self.logger.info('Saving %s...' % std_fname)
            std_gdal.save(std_fname)
コード例 #21
0
def getFileType(filename):
    """Determine whether input file is a shapefile or a grid (ESRI or GMT).
    EVENTUALLY WILL BE MOVED TO MAPIO

    :param filename:
      String path to candidate filename.
    :returns:
      String, one of 'shapefile','grid','unknown'.

    """
    if os.path.isdir(filename):
        return 'dir'
    ftype = GMTGrid.getFileType(filename)
    if ftype != 'unknown':
        return 'gmt'
    #skip over ESRI header files
    if filename.endswith('.hdr'):
        return 'unknown'
    try:
        GDALGrid.getFileGeoDict(filename)
        return 'esri'
    except:
        pass
    return 'unknown'
コード例 #22
0
ファイル: logisticmodel.py プロジェクト: usgs/groundfailure
def getFileType(filename):
    """Determine whether input file is a shapefile or a grid (ESRI or GMT).
    EVENTUALLY WILL BE MOVED TO MAPIO

    :param filename:
      String path to candidate filename.
    :returns:
      String, one of 'shapefile','grid','unknown'.

    """
    if os.path.isdir(filename):
        return 'dir'
    ftype = GMTGrid.getFileType(filename)
    if ftype != 'unknown':
        return 'gmt'
    #skip over ESRI header files
    if filename.endswith('.hdr'):
        return 'unknown'
    try:
        GDALGrid.getFileGeoDict(filename)
        return 'esri'
    except:
        pass
    return 'unknown'
コード例 #23
0
ファイル: logisticmodel.py プロジェクト: usgs/groundfailure
    def calculate(self, saveinputs=False, slopefile=None, slopediv=1.):
        """Calculate the model

        :param saveinputs: if True, saves all the input layers as Grid2D objects in addition to the model
          if false, it will just output the model
        :type saveinputs: boolean
        :param slopefile: optional file path to slopefile that will be resampled to the other input files for applying thresholds
        :type slopefile: string
        :param slopediv: number to divide slope by to get to degrees (usually will be default
          of 1.)
        :type slopediv: float

        :returns:
            a dictionary containing the model results and model inputs if saveinputs was set to
            True, see <https://github.com/usgs/groundfailure#api-for-model-output> for a
            description of the structure of this output

        """
        X = eval(self.equation)
        P = 1/(1 + np.exp(-X))
        if self.uncert is not None:
            Xmin = eval(self.equationmin)
            Xmax = eval(self.equationmax)
            Pmin = 1/(1 + np.exp(-Xmin))
            Pmax = 1/(1 + np.exp(-Xmax))
        if slopefile is not None:
            ftype = getFileType(slopefile)
            sampledict = self.shakemap.getGeoDict()
            if ftype == 'gmt':
                slope = GMTGrid.load(slopefile, sampledict, resample=True, method='linear', doPadding=True).getData()/slopediv
                # Apply slope min/max limits
                print('applying slope thresholds')
                P[slope > self.slopemax] = 0.
                P[slope < self.slopemin] = 0.
                if self.uncert is not None:
                    Pmin[slope > self.slopemax] = 0.
                    Pmin[slope < self.slopemin] = 0.
                    Pmax[slope > self.slopemax] = 0.
                    Pmax[slope < self.slopemin] = 0.
            elif ftype == 'esri':
                slope = GDALGrid.load(slopefile, sampledict, resample=True, method='linear', doPadding=True).getData()/slopediv
                # Apply slope min/max limits
                print('applying slope thresholds')
                P[slope > self.slopemax] = 0.
                P[slope < self.slopemin] = 0.
                if self.uncert is not None:
                    Pmin[slope > self.slopemax] = 0.
                    Pmin[slope < self.slopemin] = 0.
                    Pmax[slope > self.slopemax] = 0.
                    Pmax[slope < self.slopemin] = 0.
            else:
                print('Slope file does not appear to be a valid GMT or ESRI file, not applying any slope thresholds.' % (slopefile))
        else:
            print('No slope file provided, slope thresholds not applied')
        # Stuff into Grid2D object
        temp = self.shakemap.getShakeDict()
        shakedetail = '%s_ver%s' % (temp['shakemap_id'], temp['shakemap_version'])
        description = {'name': self.modelrefs['shortref'], 'longref': self.modelrefs['longref'], 'units': 'probability',
                       'shakemap': shakedetail, 'parameters': {'slopemin': self.slopemin, 'slopemax': self.slopemax}}
        Pgrid = Grid2D(P, self.geodict)
        rdict = collections.OrderedDict()
        rdict['model'] = {'grid': Pgrid,
                          'label': ('%s Probability') % (self.modeltype.capitalize()),
                          'type': 'output',
                          'description': description}
        if self.uncert is not None:
            rdict['modelmin'] = {'grid': Grid2D(Pmin, self.geodict),
                                 'label': ('%s Probability (-1 std ground motion)') % (self.modeltype.capitalize()),
                                 'type': 'output',
                                 'description': description}
            rdict['modelmax'] = {'grid': Grid2D(Pmax, self.geodict),
                                 'label': ('%s Probability (+1 std ground motion)') % (self.modeltype.capitalize()),
                                 'type': 'output',
                                 'description': description}

        if saveinputs is True:
            for layername, layergrid in list(self.layerdict.items()):
                units = self.units[layername]
                rdict[layername] = {'grid': layergrid,
                                    'label': '%s (%s)' % (layername, units),
                                    'type': 'input',
                                    'description': {'units': units, 'shakemap': shakedetail}}
            for gmused in self.gmused:
                if 'pga' in gmused:
                    units = '%g'
                    getkey = 'pga'
                if 'pgv' in gmused:
                    units = 'cm/s'
                    getkey = 'pgv'
                if 'mmi' in gmused:
                    units = 'intensity'
                    getkey = 'mmi'
                layer = self.shakemap.getLayer(getkey)
                rdict[gmused] = {'grid': layer,
                                 'label': '%s (%s)' % (getkey.upper(), units),
                                 'type': 'input',
                                 'description': {'units': units, 'shakemap': shakedetail}}
                if self.uncert is not None:
                    layer1 = np.exp(np.log(layer.getData()) - self.uncert.getLayer('std'+getkey).getData())
                    rdict[gmused + '-1std'] = {'grid': Grid2D(layer1, self.geodict),
                                               'label': '%s (%s)' % (getkey.upper()+' -1 std', units),
                                               'type': 'input',
                                               'description': {'units': units, 'shakemap': shakedetail}}
                    layer2 = np.exp(np.log(layer.getData()) + self.uncert.getLayer('std'+getkey).getData())
                    rdict[gmused + '+1std'] = {'grid': Grid2D(layer2, self.geodict),
                                               'label': '%s (%s)' % (getkey.upper()+' +1 std', units),
                                               'type': 'input',
                                               'description': {'units': units, 'shakemap': shakedetail}}

        return rdict
コード例 #24
0
    def calculate(self):
        """Calculate the model

        :returns:
            a dictionary containing the model results and model inputs if saveinputs was set to
            True when class was set up, see <https://github.com/usgs/groundfailure#api-for-model-output> for a
            description of the structure of this output

        """
        X = eval(self.equation)
        P = 1 / (1 + np.exp(-X))
        if 'vs30max' in self.config[self.model].keys():
            vs30 = self.layerdict['vs30'].getData()
            P[vs30 > float(self.config[self.model]['vs30max'])] = 0.0
        if 'minpgv' in self.config[self.model].keys():
            pgv = self.shakemap.getLayer('pgv').getData()
            P[pgv < float(self.config[self.model]['minpgv'])] = 0.0
        if 'coverage' in self.config[self.model].keys():
            eqn = self.config[self.model]['coverage']['eqn']
            ind = copy.copy(P)
            P = eval(eqn)
        if self.uncert is not None:
            print(self.numstd)
            print(type(self.numstd))
            Xmin = eval(self.equationmin)
            Xmax = eval(self.equationmax)
            Pmin = 1 / (1 + np.exp(-Xmin))
            Pmax = 1 / (1 + np.exp(-Xmax))
            if 'vs30max' in self.config[self.model].keys():
                vs30 = self.layerdict['vs30'].getData()
                Pmin[vs30 > float(self.config[self.model]['vs30max'])] = 0.0
                Pmax[vs30 > float(self.config[self.model]['vs30max'])] = 0.0
            if 'minpgv' in self.config[self.model].keys():
                pgv = self.shakemap.getLayer('pgv').getData()
                Pmin[pgv < float(self.config[self.model]['minpgv'])] = 0.0
                Pmax[pgv < float(self.config[self.model]['minpgv'])] = 0.0
            if 'coverage' in self.config[self.model].keys():
                eqnmin = eqn.replace('P', 'Pmin')
                eqnmax = eqn.replace('P', 'Pmax')
                Pmin = eval(eqnmin)
                Pmax = eval(eqnmax)
        if self.slopefile is not None:
            ftype = getFileType(self.slopefile)
            sampledict = self.shakemap.getGeoDict()
            if ftype == 'gmt':
                if GMTGrid.getFileGeoDict(self.slopefile)[0] == sampledict:
                    slope = GMTGrid.load(
                        self.slopefile).getData() / self.slopediv
                else:
                    slope = GMTGrid.load(
                        self.slopefile,
                        sampledict,
                        resample=True,
                        method='linear',
                        doPadding=True).getData() / self.slopediv
                # Apply slope min/max limits
                print('applying slope thresholds')
                P[slope > self.slopemax] = 0.
                P[slope < self.slopemin] = 0.
                if self.uncert is not None:
                    Pmin[slope > self.slopemax] = 0.
                    Pmin[slope < self.slopemin] = 0.
                    Pmax[slope > self.slopemax] = 0.
                    Pmax[slope < self.slopemin] = 0.
            elif ftype == 'esri':
                if GDALGrid.getFileGeoDict(self.slopefile)[0] == sampledict:
                    slope = GDALGrid.load(
                        self.slopefile).getData() / self.slopediv
                else:
                    slope = GDALGrid.load(
                        self.slopefile,
                        sampledict,
                        resample=True,
                        method='linear',
                        doPadding=True).getData() / self.slopediv
                # Apply slope min/max limits
                print('applying slope thresholds')
                P[slope > self.slopemax] = 0.
                P[slope < self.slopemin] = 0.
                if self.uncert is not None:
                    Pmin[slope > self.slopemax] = 0.
                    Pmin[slope < self.slopemin] = 0.
                    Pmax[slope > self.slopemax] = 0.
                    Pmax[slope < self.slopemin] = 0.
            else:
                print(
                    'Slope file does not appear to be a valid GMT or ESRI file, not applying any slope thresholds.'
                    % (self.slopefile))
        else:
            print('No slope file provided, slope thresholds not applied')
        # Stuff into Grid2D object
        temp = self.shakemap.getShakeDict()
        shakedetail = '%s_ver%s' % (temp['shakemap_id'],
                                    temp['shakemap_version'])
        description = {
            'name': self.modelrefs['shortref'],
            'longref': self.modelrefs['longref'],
            'units': 'probability',
            'shakemap': shakedetail,
            'parameters': {
                'slopemin': self.slopemin,
                'slopemax': self.slopemax
            }
        }
        Pgrid = Grid2D(P, self.geodict)
        rdict = collections.OrderedDict()
        rdict['model'] = {
            'grid': Pgrid,
            'label': ('%s Probability') % (self.modeltype.capitalize()),
            'type': 'output',
            'description': description
        }
        if self.uncert is not None:
            rdict['modelmin'] = {
                'grid':
                Grid2D(Pmin, self.geodict),
                'label': ('%s Probability (-%0.1f std ground motion)') %
                (self.modeltype.capitalize(), self.numstd),
                'type':
                'output',
                'description':
                description
            }
            rdict['modelmax'] = {
                'grid':
                Grid2D(Pmax, self.geodict),
                'label': ('%s Probability (+%0.1f std ground motion)') %
                (self.modeltype.capitalize(), self.numstd),
                'type':
                'output',
                'description':
                description
            }

        if self.saveinputs is True:
            for layername, layergrid in list(self.layerdict.items()):
                units = self.units[layername]
                if units is None:
                    units = ''
                rdict[layername] = {
                    'grid': layergrid,
                    'label': '%s (%s)' % (layername, units),
                    'type': 'input',
                    'description': {
                        'units': units,
                        'shakemap': shakedetail
                    }
                }
            for gmused in self.gmused:
                if 'pga' in gmused:
                    units = '%g'
                    getkey = 'pga'
                elif 'pgv' in gmused:
                    units = 'cm/s'
                    getkey = 'pgv'
                elif 'mmi' in gmused:
                    units = 'intensity'
                    getkey = 'mmi'
                else:
                    continue
                    # Layer is derived from several input layers, skip outputting this layer
                if getkey in rdict:
                    continue
                layer = self.shakemap.getLayer(getkey)
                rdict[getkey] = {
                    'grid': layer,
                    'label': '%s (%s)' % (getkey.upper(), units),
                    'type': 'input',
                    'description': {
                        'units': units,
                        'shakemap': shakedetail
                    }
                }
                if self.uncert is not None:
                    layer1 = np.exp(
                        np.log(layer.getData()) -
                        self.uncert.getLayer('std' + getkey).getData())
                    rdict[getkey + 'modelmin'] = {
                        'grid':
                        Grid2D(layer1, self.geodict),
                        'label':
                        '%s - %0.1f std (%s)' %
                        (getkey.upper(), self.numstd, units),
                        'type':
                        'input',
                        'description': {
                            'units': units,
                            'shakemap': shakedetail
                        }
                    }
                    layer2 = np.exp(
                        np.log(layer.getData()) +
                        self.uncert.getLayer('std' + getkey).getData())
                    rdict[getkey + 'modelmax'] = {
                        'grid':
                        Grid2D(layer2, self.geodict),
                        'label':
                        '%s + %0.1f std (%s)' %
                        (getkey.upper(), self.numstd, units),
                        'type':
                        'input',
                        'description': {
                            'units': units,
                            'shakemap': shakedetail
                        }
                    }
        return rdict
コード例 #25
0
ファイル: spatial.py プロジェクト: mhearne-usgs/groundfailure
def quickcut(filename,
             gdict,
             tempname=None,
             extrasamp=5.,
             method='bilinear',
             precise=True,
             cleanup=True,
             verbose=False,
             override=False):
    """
    Use gdal to trim a large global file down quickly so mapio can read it
    efficiently. (Cannot read Shakemap.xml files, must save as .bil filrst)

    Args:
        filename (str): File path to original input file (raster).
        gdict (geodict): Geodictionary to cut around and align with.
        tempname (str): File path to desired location of clipped part of
            filename.
        extrasamp (int): Number of extra cells to cut around each edge of
            geodict to have resampling buffer for future steps.
        method (str): If resampling is necessary, method to use.
        precise (bool): If true, will resample to the gdict as closely as
            possible, if False it will just roughly cut around the area of
            interest without changing resolution
        cleanup (bool): if True, delete tempname after reading it back in
        verbose (bool): if True, prints more details
        override (bool): if True, if filename extent is not fully contained by
            gdict, read in the entire file (only used for ShakeMaps)

    Returns: New grid2D layer

    Note: This function uses the subprocess approach because ``gdal.Translate``
        doesn't hang on the command until the file is created which causes
        problems in the next steps.
    """
    if gdict.xmax < gdict.xmin:
        raise Exception('quickcut: your geodict xmax is smaller than xmin')

    try:
        filegdict = GDALGrid.getFileGeoDict(filename)
    except:
        try:
            filegdict = GMTGrid.getFileGeoDict(filename)
        except:
            raise Exception('Cannot get geodict for %s' % filename)

    if tempname is None:
        tempdir = tempfile.mkdtemp()
        tempname = os.path.join(tempdir, 'junk.tif')
        deltemp = True
    else:
        tempdir = None
        deltemp = False

    # if os.path.exists(tempname):
    #     os.remove(tempname)
    #     print('Temporary file already there, removing file')

    filegdict = filegdict[0]

    # Get the right methods for mapio (method) and gdal (method2)
    if method == 'linear':
        method2 = 'bilinear'
    if method == 'nearest':
        method2 = 'near'
    if method == 'bilinear':
        method = 'linear'
        method2 = 'bilinear'
    if method == 'near':
        method = 'nearest'
        method2 = 'near'
    else:
        method2 = method

    if filegdict != gdict:
        # First cut without resampling
        tempgdict = GeoDict.createDictFromBox(gdict.xmin,
                                              gdict.xmax,
                                              gdict.ymin,
                                              gdict.ymax,
                                              filegdict.dx,
                                              filegdict.dy,
                                              inside=True)

        try:
            egdict = filegdict.getBoundsWithin(tempgdict)

            ulx = egdict.xmin - extrasamp * egdict.dx
            uly = egdict.ymax + extrasamp * egdict.dy
            lrx = egdict.xmax + (extrasamp + 1) * egdict.dx
            lry = egdict.ymin - (extrasamp + 1) * egdict.dy

            cmd = 'gdal_translate -a_srs EPSG:4326 -of GTiff -projwin %1.8f \
            %1.8f %1.8f %1.8f -r %s %s %s' % (ulx, uly, lrx, lry, method2,
                                              filename, tempname)
        except Exception as e:
            if override:
                # When ShakeMap is being loaded, sometimes they won't align
                # right because it's already cut to the area, so just load
                # the whole file
                cmd = 'gdal_translate -a_srs EPSG:4326 -of GTiff -r %s %s %s' \
                      % (method2, filename, tempname)
            else:
                raise Exception('Failed to cut layer: %s' % e)

        rc, so, se = get_command_output(cmd)
        if not rc:
            raise Exception(se.decode())
        else:
            if verbose:
                print(so.decode())

        newgrid2d = GDALGrid.load(tempname)
        if precise:
            # Resample to exact geodictionary
            newgrid2d = newgrid2d.interpolate2(gdict, method=method)
        if cleanup:
            os.remove(tempname)

        if deltemp:
            shutil.rmtree(tempdir)

    else:
        ftype = GMTGrid.getFileType(filename)
        if ftype != 'unknown':
            newgrid2d = GMTGrid.load(filename)
        elif filename.endswith('.xml'):
            newgrid2d = ShakeGrid.load(filename)
        else:
            newgrid2d = GDALGrid.load(filename)

    return newgrid2d
コード例 #26
0
def computeHagg(grid2D, proj='moll', probthresh=0., shakefile=None,
                shakethreshtype='pga', shakethresh=0., stdgrid2D=None,
                stdtype='full', maxP=1., sill1=None, range1=None):
    """
    Computes the Aggregate Hazard (Hagg) which is equal to the
    probability * area of grid cell For models that compute areal coverage,
    this is equivalant to the total predicted area affected in km2.

    Args:
        grid2D: grid2D object of model output.
        proj: projection to use to obtain equal area, 'moll'  mollweide, or
            'laea' lambert equal area.
        probthresh: Probability threshold, any values less than this will not
            be included in aggregate hazard estimation.
        shakefile: Optional, path to shakemap file to use for ground motion
            threshold.
        shakethreshtype: Optional, Type of ground motion to use for
            shakethresh, 'pga', 'pgv', or 'mmi'.
        shakethresh: Optional, Float or list of shaking thresholds in %g for
            pga, cm/s for pgv, float for mmi.
        stdgrid2D: grid2D object of model standard deviations (optional)
        stdtype (str): assumption of spatial correlation used to compute
            the stdev of the statistics, 'max', 'min', 'mean' of max and min,
            or 'full' (default) which estimates the range of correlation and
            accounts for covariance. Will return 'mean' if
            ridge and sill cannot be estimated.
        maxP (float): the maximum possible probability of the model
        sill1 (float): If known, the sill of the variogram of grid2D, will be
            estimated if None and stdtype='full'
        range1 (float): If known, the range of the variogram of grid2D, will
            be estimated if None and stdtype='full'

    Returns:
        dict: Dictionary with keys:
            hagg_#g where # is the shakethresh
            std_# if stdgrid2D is supplied (stdev of exp_pop)
            hlim_#, the maximum exposure value possible with the
            applied thresholds and given maxP value
            cell_area_km2 grid cell area
            p_hagg_# beta distribution shape factor p (sometimes called alpha)
            q_hagg_# beta distribution shape factor q (sometimes called beta)
    """
    bounds = grid2D.getBounds()
    lat0 = np.mean((bounds[2], bounds[3]))
    lon0 = np.mean((bounds[0], bounds[1]))
    projs = ('+proj=%s +lat_0=%f +lon_0=%f +x_0=0 +y_0=0 +ellps=WGS84 '
             '+units=km +no_defs' % (proj, lat0, lon0))
    geodict = grid2D.getGeoDict()

    if shakefile is not None:
        if shakethresh < 0.:
            raise Exception('shaking threshold must be equal or greater '
                            'than zero')
        # resample shakemap to grid2D
        temp = ShakeGrid.load(shakefile)
        shk = temp.getLayer(shakethreshtype)
        shk = shk.interpolate2(geodict)
        if shk.getGeoDict() != geodict:
            raise Exception('shakemap was not resampled to exactly the same '
                            'geodict as the model')

    if probthresh < 0.:
        raise Exception('probability threshold must be equal or greater '
                        'than zero')

    grid = grid2D.project(projection=projs, method='bilinear')
    geodictRS = grid.getGeoDict()
    
    cell_area_km2 = geodictRS.dx * geodictRS.dy
    
    model = grid.getData().copy()

    Hagg = {}

    if shakefile is not None:
        shkgrid = shk.project(projection=projs)
        shkdat = shkgrid.getData()
        model[shkdat < shakethresh] = float('nan')
    else:
        shakethresh = 0.
        shkdat = None

    mu = np.nansum(model[model >= probthresh] * cell_area_km2)
    Hagg['hagg_%1.2fg' % (shakethresh/100.,)] = mu
    Hagg['cell_area_km2'] = cell_area_km2
    N = np.nansum([model >= probthresh])
    #Hagg['N_%1.2fg' % (shakethresh/100.,)] = N
    hlim = cell_area_km2*N*maxP
    Hagg['hlim_%1.2fg' % (shakethresh/100.,)] = hlim

    if stdgrid2D is not None:
        stdgrid = GDALGrid.copyFromGrid(stdgrid2D) # Make a copy
        stdgrid = stdgrid.project(projection=projs, method='bilinear')
        std = stdgrid.getData().copy()
        if np.nanmax(std) > 0. and np.nanmax(model) >= probthresh:
            totalmin = cell_area_km2 * np.sqrt(np.nansum((std[model >= probthresh])**2.))
            totalmax = np.nansum(std[model >= probthresh] * cell_area_km2)
            if stdtype == 'full':
                if sill1 is None or range1 is None:
                    range1, sill1 = semivario(grid.getData().copy(), probthresh,
                                              shakethresh=shakethresh,
                                              shakegrid=shkdat)
                if range1 is None:
                    # Use mean
                    Hagg['hagg_std_%1.2fg' % (shakethresh/100.,)] = (totalmax+totalmin)/2.
                else:
                    # Zero out std at cells where the model probability was below
                    # the threshold because we aren't including those cells in Hagg
                    stdz = std.copy()
                    stdz[model < probthresh] = 0.
                    svar1 = svar(stdz, range1, sill1, scale=cell_area_km2)
                    Hagg['hagg_std_%1.2fg' % (shakethresh/100.,)] = np.sqrt(svar1)
                    #Hagg['hagg_range_%1.2fg' % (shakethresh/100.,)] = range1
                    #Hagg['hagg_sill_%1.2fg' % (shakethresh/100.,)] = sill1 
            elif stdtype == 'max':
                Hagg['hagg_std_%1.2fg' % (shakethresh/100.,)] = totalmax
            elif stdtype == 'min':
                Hagg['hagg_std_%1.2fg' % (shakethresh/100.,)] = totalmin
            else:
                Hagg['hagg_std_%1.2fg' % (shakethresh/100.,)] = (totalmax+totalmin)/2.

            var = Hagg['hagg_std_%1.2fg' % (shakethresh/100.,)]**2.
            # Beta distribution shape factors
            Hagg['p_hagg_%1.2fg' % (shakethresh/100.,)] = (mu/hlim)*((hlim*mu-mu**2)/var-1)
            Hagg['q_hagg_%1.2fg' % (shakethresh/100.,)] = (1-mu/hlim)*((hlim*mu-mu**2)/var-1)
        else:
            print('No model values above threshold, skipping uncertainty '
                  'and filling with zeros')
            Hagg['hagg_std_%1.2fg' % (shakethresh/100.,)] = 0.
            Hagg['p_hagg_%1.2fg' % (shakethresh/100.,)] = 0.
            Hagg['q_hagg_%1.2fg' % (shakethresh/100.,)] = 0.
    else:
        print('No uncertainty provided, filling with zeros')
        Hagg['hagg_std_%1.2fg' % (shakethresh/100.,)] = 0.
        Hagg['p_hagg_%1.2fg' % (shakethresh/100.,)] = 0.
        Hagg['q_hagg_%1.2fg' % (shakethresh/100.,)] = 0.

    return Hagg
コード例 #27
0
ファイル: contourmap.py プロジェクト: kmarano-usgs/pager
def draw_contour(shakefile,
                 popfile,
                 oceanfile,
                 oceangridfile,
                 cityfile,
                 basename,
                 borderfile=None,
                 is_scenario=False):
    """Create a contour map showing population (greyscale) underneath contoured MMI.

    :param shakefile:
      String path to ShakeMap grid.xml file.
    :param popfile:
      String path to GDALGrid-compliant file containing population data.
    :param oceanfile:
      String path to file containing ocean vector data in a format compatible with fiona.
    :param oceangridfile:
      String path to file containing ocean grid data .
    :param cityfile:
      String path to file containing GeoNames cities data.
    :param basename:
      String path containing desired output PDF base name, i.e., /home/pager/exposure.  ".pdf" and ".png" files will
      be made.
    :param make_png:
      Boolean indicating whether a PNG version of the file should also be created in the
      same output folder as the PDF.
    :returns:
      Tuple containing: 
        - Name of PNG file created, or None if PNG output not specified.
        - Cities object containing the cities that were rendered on the contour map.
    """
    #load the shakemap - for the time being, we're interpolating the
    #population data to the shakemap, which would be important
    #if we were doing math with the pop values.  We're not, so I think it's ok.
    shakegrid = ShakeGrid.load(shakefile, adjust='res')
    gd = shakegrid.getGeoDict()

    #Retrieve the epicenter - this will get used on the map
    clat = shakegrid.getEventDict()['lat']
    clon = shakegrid.getEventDict()['lon']

    #Load the population data, sample to shakemap
    popgrid = GDALGrid.load(popfile, samplegeodict=gd, resample=True)

    #load the ocean grid file (has 1s in ocean, 0s over land)
    #having this file saves us almost 30 seconds!
    oceangrid = GDALGrid.load(oceangridfile, samplegeodict=gd, resample=True)

    #load the cities data, limit to cities within shakemap bounds
    allcities = Cities.fromDefault()
    cities = allcities.limitByBounds((gd.xmin, gd.xmax, gd.ymin, gd.ymax))

    #define the map
    #first cope with stupid 180 meridian
    height = (gd.ymax - gd.ymin) * 111.191
    if gd.xmin < gd.xmax:
        width = (gd.xmax - gd.xmin) * np.cos(np.radians(clat)) * 111.191
        xmin, xmax, ymin, ymax = (gd.xmin, gd.xmax, gd.ymin, gd.ymax)
    else:
        xmin, xmax, ymin, ymax = (gd.xmin, gd.xmax, gd.ymin, gd.ymax)
        xmax += 360
        width = (
            (gd.xmax + 360) - gd.xmin) * np.cos(np.radians(clat)) * 111.191

    aspect = width / height

    #if the aspect is not 1, then trim bounds in x or y direction as appropriate
    if width > height:
        dw = (width - height) / 2.0  #this is width in km
        xmin = xmin + dw / (np.cos(np.radians(clat)) * 111.191)
        xmax = xmax - dw / (np.cos(np.radians(clat)) * 111.191)
        width = (xmax - xmin) * np.cos(np.radians(clat)) * 111.191
    if height > width:
        dh = (height - width) / 2.0  #this is width in km
        ymin = ymin + dh / 111.191
        ymax = ymax - dh / 111.191
        height = (ymax - ymin) * 111.191

    aspect = width / height
    figheight = FIGWIDTH / aspect
    bbox = (xmin, ymin, xmax, ymax)
    bounds = (xmin, xmax, ymin, ymax)
    figsize = (FIGWIDTH, figheight)

    #Create the MercatorMap object, which holds a separate but identical
    #axes object used to determine collisions between city labels.
    mmap = MercatorMap(bounds, figsize, cities, padding=0.5)
    fig = mmap.figure
    ax = mmap.axes
    #this needs to be done here so that city label collision detection will work
    fig.canvas.draw()

    clon = xmin + (xmax - xmin) / 2
    clat = ymin + (ymax - ymin) / 2
    geoproj = mmap.geoproj
    proj = mmap.proj

    #project our population grid to the map projection
    projstr = proj.proj4_init
    popgrid_proj = popgrid.project(projstr)
    popdata = popgrid_proj.getData()
    newgd = popgrid_proj.getGeoDict()

    # Use our GMT-inspired palette class to create population and MMI colormaps
    popmap = ColorPalette.fromPreset('pop')
    mmimap = ColorPalette.fromPreset('mmi')

    #set the image extent to that of the data
    img_extent = (newgd.xmin, newgd.xmax, newgd.ymin, newgd.ymax)
    plt.imshow(popdata,
               origin='upper',
               extent=img_extent,
               cmap=popmap.cmap,
               vmin=popmap.vmin,
               vmax=popmap.vmax,
               zorder=POP_ZORDER,
               interpolation='nearest')

    #draw 10m res coastlines
    ax.coastlines(resolution="10m", zorder=COAST_ZORDER)

    #draw country borders using natural earth data set
    if borderfile is not None:
        borders = ShapelyFeature(
            Reader(borderfile).geometries(), ccrs.PlateCarree())
        ax.add_feature(borders,
                       zorder=COAST_ZORDER,
                       edgecolor='black',
                       linewidth=2,
                       facecolor='none')

    #clip the ocean data to the shakemap
    bbox = (gd.xmin, gd.ymin, gd.xmax, gd.ymax)
    oceanshapes = _clip_bounds(bbox, oceanfile)

    ax.add_feature(ShapelyFeature(oceanshapes, crs=geoproj),
                   facecolor=WATERCOLOR,
                   zorder=OCEAN_ZORDER)

    #It turns out that when presented with a map that crosses the 180 meridian,
    #the matplotlib/cartopy contouring routine thinks that the 180 meridian is a map boundary
    #and only plots one side of the contour.  Contouring the geographic MMI data and then
    #projecting the resulting contour vectors does the trick.  Sigh.

    #define contour grid spacing
    contoury = np.linspace(ymin, ymax, gd.ny)
    contourx = np.linspace(xmin, xmax, gd.nx)

    #smooth the MMI data for contouring
    mmi = shakegrid.getLayer('mmi').getData()
    smoothed_mmi = gaussian_filter(mmi, FILTER_SMOOTH)

    #create masked arrays of the ocean grid
    landmask = np.ma.masked_where(oceangrid._data == 0.0, smoothed_mmi)
    oceanmask = np.ma.masked_where(oceangrid._data == 1.0, smoothed_mmi)

    #contour the data
    land_contour = plt.contour(contourx,
                               contoury,
                               np.flipud(oceanmask),
                               linewidths=3.0,
                               linestyles='solid',
                               zorder=LANDC_ZORDER,
                               cmap=mmimap.cmap,
                               vmin=mmimap.vmin,
                               vmax=mmimap.vmax,
                               levels=np.arange(0.5, 10.5, 1.0),
                               transform=geoproj)

    ocean_contour = plt.contour(contourx,
                                contoury,
                                np.flipud(landmask),
                                linewidths=2.0,
                                linestyles='dashed',
                                zorder=OCEANC_ZORDER,
                                cmap=mmimap.cmap,
                                vmin=mmimap.vmin,
                                vmax=mmimap.vmax,
                                levels=np.arange(0.5, 10.5, 1.0),
                                transform=geoproj)

    #the idea here is to plot invisible MMI contours at integer levels and then label them.
    #clabel method won't allow text to appear, which is this case is kind of ok, because
    #it allows us an easy way to draw MMI labels as roman numerals.
    cs_land = plt.contour(contourx,
                          contoury,
                          np.flipud(oceanmask),
                          linewidths=0.0,
                          levels=np.arange(0, 11),
                          zorder=CLABEL_ZORDER,
                          transform=geoproj)

    clabel_text = ax.clabel(cs_land,
                            np.arange(0, 11),
                            colors='k',
                            zorder=CLABEL_ZORDER,
                            fmt='%.0f',
                            fontsize=40)
    for clabel in clabel_text:
        x, y = clabel.get_position()
        label_str = clabel.get_text()
        roman_label = MMI_LABELS[label_str]
        th = plt.text(x,
                      y,
                      roman_label,
                      zorder=CLABEL_ZORDER,
                      ha='center',
                      va='center',
                      color='black',
                      weight='normal',
                      size=16)
        th.set_path_effects([
            path_effects.Stroke(linewidth=2.0, foreground='white'),
            path_effects.Normal()
        ])

    cs_ocean = plt.contour(contourx,
                           contoury,
                           np.flipud(landmask),
                           linewidths=0.0,
                           levels=np.arange(0, 11),
                           zorder=CLABEL_ZORDER,
                           transform=geoproj)

    clabel_text = ax.clabel(cs_ocean,
                            np.arange(0, 11),
                            colors='k',
                            zorder=CLABEL_ZORDER,
                            fmt='%.0f',
                            fontsize=40)
    for clabel in clabel_text:
        x, y = clabel.get_position()
        label_str = clabel.get_text()
        roman_label = MMI_LABELS[label_str]
        th = plt.text(x,
                      y,
                      roman_label,
                      zorder=CLABEL_ZORDER,
                      ha='center',
                      va='center',
                      color='black',
                      weight='normal',
                      size=16)
        th.set_path_effects([
            path_effects.Stroke(linewidth=2.0, foreground='white'),
            path_effects.Normal()
        ])

    #draw meridians and parallels using Cartopy's functions for that
    gl = ax.gridlines(draw_labels=True,
                      linewidth=2,
                      color=(0.9, 0.9, 0.9),
                      alpha=0.5,
                      linestyle='-',
                      zorder=GRID_ZORDER)
    gl.xlabels_top = False
    gl.xlabels_bottom = False
    gl.ylabels_left = False
    gl.ylabels_right = False
    gl.xlines = True
    step = 1

    #let's floor/ceil the edges to nearest half a degree
    gxmin = np.floor(xmin * 2) / 2
    gxmax = np.ceil(xmax * 2) / 2
    gymin = np.floor(ymin * 2) / 2
    gymax = np.ceil(ymax * 2) / 2

    xlocs = np.linspace(gxmin, gxmax + 0.5, num=5)
    ylocs = np.linspace(gymin, gymax + 0.5, num=5)

    gl.xlocator = mticker.FixedLocator(xlocs)
    gl.ylocator = mticker.FixedLocator(ylocs)
    gl.xformatter = LONGITUDE_FORMATTER
    gl.yformatter = LATITUDE_FORMATTER
    gl.xlabel_style = {'size': 15, 'color': 'black'}
    gl.ylabel_style = {'size': 15, 'color': 'black'}

    #TODO - figure out x/y axes data coordinates corresponding to 10% from left
    #and 10% from top
    #use geoproj and proj
    dleft = 0.01
    dtop = 0.97
    proj_str = proj.proj4_init
    merc_to_dd = pyproj.Proj(proj_str)

    #use built-in transforms to get from axes units to data units
    display_to_data = ax.transData.inverted()
    axes_to_display = ax.transAxes

    #these are x,y coordinates in projected space
    yleft, t1 = display_to_data.transform(
        axes_to_display.transform((dleft, 0.5)))
    t2, xtop = display_to_data.transform(axes_to_display.transform(
        (0.5, dtop)))

    #these are coordinates in lon,lat space
    yleft_dd, t1_dd = merc_to_dd(yleft, t1, inverse=True)
    t2_dd, xtop_dd = merc_to_dd(t2, xtop, inverse=True)

    #drawing our own tick labels INSIDE the plot, as Cartopy doesn't seem to support this.
    yrange = ymax - ymin
    xrange = xmax - xmin
    for xloc in gl.xlocator.locs:
        outside = xloc < xmin or xloc > xmax
        #don't draw labels when we're too close to either edge
        near_edge = (xloc - xmin) < (xrange * 0.1) or (xmax - xloc) < (xrange *
                                                                       0.1)
        if outside or near_edge:
            continue
        xtext = r'$%.1f^\circ$W' % (abs(xloc))
        ax.text(xloc,
                xtop_dd,
                xtext,
                fontsize=14,
                zorder=GRID_ZORDER,
                ha='center',
                fontname=DEFAULT_FONT,
                transform=ccrs.Geodetic())

    for yloc in gl.ylocator.locs:
        outside = yloc < gd.ymin or yloc > gd.ymax
        #don't draw labels when we're too close to either edge
        near_edge = (yloc - gd.ymin) < (yrange * 0.1) or (gd.ymax - yloc) < (
            yrange * 0.1)
        if outside or near_edge:
            continue
        if yloc < 0:
            ytext = r'$%.1f^\circ$S' % (abs(yloc))
        else:
            ytext = r'$%.1f^\circ$N' % (abs(yloc))
        thing = ax.text(yleft_dd,
                        yloc,
                        ytext,
                        fontsize=14,
                        zorder=GRID_ZORDER,
                        va='center',
                        fontname=DEFAULT_FONT,
                        transform=ccrs.Geodetic())

    #draw cities
    mapcities = mmap.drawCities(shadow=True, zorder=CITIES_ZORDER)

    #draw the figure border thickly
    #TODO - figure out how to draw map border
    # bwidth = 3
    # ax.spines['top'].set_visible(True)
    # ax.spines['left'].set_visible(True)
    # ax.spines['bottom'].set_visible(True)
    # ax.spines['right'].set_visible(True)
    # ax.spines['top'].set_linewidth(bwidth)
    # ax.spines['right'].set_linewidth(bwidth)
    # ax.spines['bottom'].set_linewidth(bwidth)
    # ax.spines['left'].set_linewidth(bwidth)

    #Get the corner of the map with the lowest population
    corner_rect, filled_corner = _get_open_corner(popgrid, ax)
    clat2 = round_to_nearest(clat, 1.0)
    clon2 = round_to_nearest(clon, 1.0)

    #draw a little globe in the corner showing in small-scale where the earthquake is located.
    proj = ccrs.Orthographic(central_latitude=clat2, central_longitude=clon2)
    ax2 = fig.add_axes(corner_rect, projection=proj)
    ax2.add_feature(cartopy.feature.OCEAN,
                    zorder=0,
                    facecolor=WATERCOLOR,
                    edgecolor=WATERCOLOR)
    ax2.add_feature(cartopy.feature.LAND, zorder=0, edgecolor='black')
    ax2.plot([clon2], [clat2],
             'w*',
             linewidth=1,
             markersize=16,
             markeredgecolor='k',
             markerfacecolor='r')
    gh = ax2.gridlines()
    ax2.set_global()
    ax2.outline_patch.set_edgecolor('black')
    ax2.outline_patch.set_linewidth(2)

    #Draw the map scale in the unoccupied lower corner.
    corner = 'lr'
    if filled_corner == 'lr':
        corner = 'll'
    draw_scale(ax, corner, pady=0.05, padx=0.05)

    #Draw the epicenter as a black star
    plt.sca(ax)
    plt.plot(clon,
             clat,
             'k*',
             markersize=16,
             zorder=EPICENTER_ZORDER,
             transform=geoproj)

    if is_scenario:
        plt.text(clon,
                 clat,
                 'SCENARIO',
                 fontsize=64,
                 zorder=WATERMARK_ZORDER,
                 transform=geoproj,
                 alpha=0.2,
                 color='red',
                 horizontalalignment='center')

    #create pdf and png output file names
    pdf_file = basename + '.pdf'
    png_file = basename + '.png'

    #save to pdf
    plt.savefig(pdf_file)
    plt.savefig(png_file)

    return (pdf_file, png_file, mapcities)
コード例 #28
0
ファイル: newmark.py プロジェクト: mhearne-usgs/groundfailure
def godt2008(shakefile, config, uncertfile=None, saveinputs=False, regressionmodel='J_PGA', bounds=None, slopediv=100., codiv=10.):
    """ This function runs the Godt et al. (2008) global method for a given ShakeMap. The Factor of Safety
    is calculated using infinite slope analysis assumuing dry conditions. The method uses threshold newmark
    displacement and estimates areal coverage by doing the calculations for each slope quantile
    TO DO - add 'all' - averages Dn from all four equations, add term to convert PGA and PGV to Ia and use other equations, add Ambraseys and Menu (1988) option

    :param shakefile: url or filepath to shakemap xml file
    :type shakefile: string
    :param config: ConfigObj of config file containing inputs required for running the model
    :type config: ConfigObj
    :param saveinputs: Whether or not to return the model input layers, False (defeault) returns only the model output (one layer)
    :type saveinputs: boolean
    :param regressionmodel:
        Newmark displacement regression model to use
        'J_PGA' (default) - PGA-based model from Jibson (2007) - equation 6
        'J_PGA_M' - PGA and M-based model from Jibson (2007) - equation 7
        'RS_PGA_M' - PGA and M-based model from from Rathje and Saygili (2009)
        'RS_PGA_PGV' - PGA and PGV-based model from Saygili and Rathje (2008) - equation 6
    :type regressionmodel: string
    :param probtype: Method used to estimate probability. Entering 'jibson2000' uses equation 5 from Jibson et al. (2000) to estimate probability from Newmark displacement. 'threshold' uses a specified threshold of Newmark displacement (defined in config file) and assumes anything greather than this threshold fails
    :type probtype: string
    :param slopediv: Divide slope by this number to get slope in degrees (Verdin datasets need to be divided by 100)
    :type slopediv: float
    :param codiv: Divide cohesion by this number to get reasonable numbers (For Godt method, need to divide by 10 because that is how it was calibrated, but values are reasonable without multiplying for regular analysis)
    :type codiv: float

    :returns maplayers:  Dictionary containing output and input layers (if saveinputs=True) along with metadata formatted like maplayers['layer name']={'grid': mapio grid2D object, 'label': 'label for colorbar and top line of subtitle', 'type': 'output or input to model', 'description': 'detailed description of layer for subtitle, potentially including source information'}
    :type maplayers: OrderedDict

    :raises NameError: when unable to parse the config correctly (probably a formatting issue in the configfile) or when unable to find the shakefile (Shakemap URL or filepath) - these cause program to end
    """

    # Empty refs
    slopesref = 'unknown'
    slopelref = 'unknown'
    cohesionlref = 'unknown'
    cohesionsref = 'unknown'
    frictionsref = 'unknown'
    frictionlref = 'unknown'
    modellref = 'unknown'
    modelsref = 'unknown'

    if uncertfile is not None:
        print('ground motion uncertainty option not implemented yet')

    # Parse config
    try:  # May want to add error handling so if refs aren't given, just includes unknown
        slopefilepath = config['mechanistic_models']['godt_2008']['layers']['slope']['filepath']
        slopeunits = config['mechanistic_models']['godt_2008']['layers']['slope']['units']
        cohesionfile = config['mechanistic_models']['godt_2008']['layers']['cohesion']['file']
        cohesionunits = config['mechanistic_models']['godt_2008']['layers']['cohesion']['units']
        frictionfile = config['mechanistic_models']['godt_2008']['layers']['friction']['file']
        frictionunits = config['mechanistic_models']['godt_2008']['layers']['friction']['units']

        thick = float(config['mechanistic_models']['godt_2008']['parameters']['thick'])
        uwt = float(config['mechanistic_models']['godt_2008']['parameters']['uwt'])
        nodata_cohesion = float(config['mechanistic_models']['godt_2008']['parameters']['nodata_cohesion'])
        nodata_friction = float(config['mechanistic_models']['godt_2008']['parameters']['nodata_friction'])
        dnthresh = float(config['mechanistic_models']['godt_2008']['parameters']['dnthresh'])
        fsthresh = float(config['mechanistic_models']['godt_2008']['parameters']['fsthresh'])
        acthresh = float(config['mechanistic_models']['godt_2008']['parameters']['acthresh'])
    except Exception as e:
        raise NameError('Could not parse configfile, %s' % e)
        return

    # TO DO, ADD ERROR CATCHING ON UNITS, MAKE SURE THEY ARE WHAT THEY SHOULD BE FOR THIS MODEL

    try:  # Try to fetch source information from config
        modelsref = config['mechanistic_models']['godt_2008']['shortref']
        modellref = config['mechanistic_models']['godt_2008']['longref']
        slopesref = config['mechanistic_models']['godt_2008']['layers']['slope']['shortref']
        slopelref = config['mechanistic_models']['godt_2008']['layers']['slope']['longref']
        cohesionsref = config['mechanistic_models']['godt_2008']['layers']['cohesion']['shortref']
        cohesionlref = config['mechanistic_models']['godt_2008']['layers']['cohesion']['longref']
        frictionsref = config['mechanistic_models']['godt_2008']['layers']['friction']['shortref']
        frictionlref = config['mechanistic_models']['godt_2008']['layers']['friction']['longref']
    except:
        print('Was not able to retrieve all references from config file. Continuing')

    # Load in shakefile
    if not os.path.isfile(shakefile):
        if isURL(shakefile):
            shakefile = getGridURL(shakefile)  # returns a file object
        else:
            raise NameError('Could not find "%s" as a file or a valid url' % (shakefile))
            return

    shkgdict = ShakeGrid.getFileGeoDict(shakefile, adjust='res')
    if bounds is not None:  # Make sure bounds are within ShakeMap Grid
        if shkgdict.xmin > bounds['xmin'] or shkgdict.xmax < bounds['xmax'] or shkgdict.ymin > bounds['ymin'] or shkgdict.ymax < bounds['ymax']:
            print('Specified bounds are outside shakemap area, using ShakeMap bounds instead')
            bounds = None
    if bounds is not None:
        tempgdict = GeoDict({'xmin': bounds['xmin'], 'ymin': bounds['ymin'], 'xmax': bounds['xmax'], 'ymax': bounds['ymax'], 'dx': shkgdict.dx, 'dy': shkgdict.dy, 'nx': shkgdict.nx, 'ny': shkgdict.ny}, adjust='res')
        gdict = shkgdict.getBoundsWithin(tempgdict)
        shakemap = ShakeGrid.load(shakefile, samplegeodict=gdict, adjust='bounds')
    else:
        shakemap = ShakeGrid.load(shakefile, adjust='res')
    shkgdict = shakemap.getGeoDict()  # Get updated geodict
    M = shakemap.getEventDict()['magnitude']

    # Read in all the slope files, divide all by 100 to get to slope in degrees (because input files are multiplied by 100.)
    slopes = []
    slopes.append(GDALGrid.load(os.path.join(slopefilepath, 'slope_min.bil'), samplegeodict=shkgdict, resample=True, method='linear').getData()/slopediv)
    slopes.append(GDALGrid.load(os.path.join(slopefilepath, 'slope10.bil'), samplegeodict=shkgdict, resample=True, method='linear').getData()/slopediv)
    slopes.append(GDALGrid.load(os.path.join(slopefilepath, 'slope30.bil'), samplegeodict=shkgdict, resample=True, method='linear').getData()/slopediv)
    slopes.append(GDALGrid.load(os.path.join(slopefilepath, 'slope50.bil'), samplegeodict=shkgdict, resample=True, method='linear').getData()/slopediv)
    slopes.append(GDALGrid.load(os.path.join(slopefilepath, 'slope70.bil'), samplegeodict=shkgdict, resample=True, method='linear').getData()/slopediv)
    slopes.append(GDALGrid.load(os.path.join(slopefilepath, 'slope90.bil'), samplegeodict=shkgdict, resample=True, method='linear').getData()/slopediv)
    slopes.append(GDALGrid.load(os.path.join(slopefilepath, 'slope_max.bil'), samplegeodict=shkgdict, resample=True, method='linear').getData()/slopediv)
    slopestack = np.dstack(slopes)

    # Change any zero slopes to a very small number to avoid dividing by zero later
    slopestack[slopestack == 0] = 1e-8

    # Read in the cohesion and friction files and duplicate layers so they are same shape as slope structure
    cohesion = np.repeat(GDALGrid.load(cohesionfile, samplegeodict=shakemap.getGeoDict(), resample=True, method='nearest').getData()[:, :, np.newaxis]/codiv, 7, axis=2)
    cohesion[cohesion == -999.9] = nodata_cohesion
    cohesion[cohesion == 0] = nodata_cohesion
    friction = np.repeat(GDALGrid.load(frictionfile, samplegeodict=shakemap.getGeoDict(), resample=True, method='nearest').getData().astype(float)[:, :, np.newaxis], 7, axis=2)
    friction[friction == -9999] = nodata_friction
    friction[friction == 0] = nodata_friction

    # Do the calculations using Jibson (2007) PGA only model for Dn
    FS = cohesion/(uwt*thick*np.sin(slopestack*(np.pi/180.))) + np.tan(friction*(np.pi/180.))/np.tan(slopestack*(np.pi/180.))
    FS[FS < fsthresh] = fsthresh

    # Compute critical acceleration, in g
    Ac = (FS-1)*np.sin(slopestack*(np.pi/180.)).astype(float)  # This gives ac in g, equations that multiply by g give ac in m/s2
    Ac[Ac < acthresh] = acthresh

    # Get PGA in g (PGA is %g in ShakeMap, convert to g)
    PGA = np.repeat(shakemap.getLayer('pga').getData()[:, :, np.newaxis]/100., 7, axis=2).astype(float)

    if 'PGV' in regressionmodel:  # Load in PGV also, in cm/sec
        PGV = np.repeat(shakemap.getLayer('pgv').getData()[:, :, np.newaxis], 7, axis=2).astype(float)

    np.seterr(invalid='ignore')  # Ignore errors so still runs when Ac > PGA, just leaves nan instead of crashing

    if regressionmodel is 'J_PGA':
        Dn = J_PGA(Ac, PGA)

    if regressionmodel is 'J_PGA_M':
        Dn = J_PGA_M(Ac, PGA, M)

    if regressionmodel is 'RS_PGA_M':
        Dn = RS_PGA_M(Ac, PGA, M)

    if regressionmodel is 'RS_PGA_PGV':
        Dn = RS_PGA_PGV(Ac, PGA, PGV)

    PROB = Dn.copy()
    PROB[PROB < dnthresh] = 0.
    PROB[PROB >= dnthresh] = 1.
    PROB = np.sum(PROB, axis=2)
    PROB[PROB == 1.] = 0.01
    PROB[PROB == 2.] = 0.10
    PROB[PROB == 3.] = 0.30
    PROB[PROB == 4.] = 0.50
    PROB[PROB == 5.] = 0.70
    PROB[PROB == 6.] = 0.90
    PROB[PROB == 7.] = 0.99

    # Turn output and inputs into into grids and put in mapLayers dictionary
    maplayers = collections.OrderedDict()

    temp = shakemap.getShakeDict()
    shakedetail = '%s_ver%s' % (temp['shakemap_id'], temp['shakemap_version'])

    description = {'name': modelsref, 'longref': modellref, 'units': 'coverage', 'shakemap': shakedetail, 'parameters': {'regressionmodel': regressionmodel, 'thickness_m': thick, 'unitwt_kNm3': uwt, 'dnthresh_cm': dnthresh, 'acthresh_g': acthresh, 'fsthresh': fsthresh}}

    maplayers['model'] = {'grid': GDALGrid(PROB, shakemap.getGeoDict()), 'label': 'Areal coverage', 'type': 'output', 'description': description}

    if saveinputs is True:
        maplayers['pga'] = {'grid': GDALGrid(PGA[:, :, 0], shakemap.getGeoDict()), 'label': 'PGA (g)', 'type': 'input', 'description': {'units': 'g', 'shakemap': shakedetail}}
        if 'PGV' in regressionmodel:
            maplayers['pgv'] = {'grid': GDALGrid(PGV[:, :, 0], shakemap.getGeoDict()), 'label': 'PGV (cm/s)', 'type': 'input', 'description': {'units': 'cm/s', 'shakemap': shakedetail}}
        maplayers['minFS'] = {'grid': GDALGrid(np.min(FS, axis=2), shakemap.getGeoDict()), 'label': 'Min Factor of Safety', 'type': 'input', 'description': {'units': 'unitless'}}
        maplayers['max slope'] = {'grid': GDALGrid(slopestack[:, :, -1], shakemap.getGeoDict()), 'label': 'Maximum slope ($^\circ$)', 'type': 'input', 'description': {'units': 'degrees', 'name': slopesref, 'longref': slopelref}}
        maplayers['cohesion'] = {'grid': GDALGrid(cohesion[:, :, 0], shakemap.getGeoDict()), 'label': 'Cohesion (kPa)', 'type': 'input', 'description': {'units': 'kPa (adjusted)', 'name': cohesionsref, 'longref': cohesionlref}}
        maplayers['friction angle'] = {'grid': GDALGrid(friction[:, :, 0], shakemap.getGeoDict()), 'label': 'Friction angle ($^\circ$)', 'type': 'input', 'description': {'units': 'degrees', 'name': frictionsref, 'longref': frictionlref}}

    return maplayers
コード例 #29
0
ファイル: sample.py プロジェクト: emthompson-usgs/lsprocess
        try:
            fdict = GDALGrid.getFileGeoDict(gridfile)
            gridtype = 'esri'
        except:
            pass
    if gridtype is None:
        raise Exception('File "%s" does not appear to be either a GMT grid or an ESRI grid.' % gridfile)
    xmin = xmin - fdict.dx*3
    xmax = xmax + fdict.dx*3
    ymin = ymin - fdict.dy*3
    ymax = ymax + fdict.dy*3
    bounds = (xmin,xmax,ymin,ymax)
    if gridtype == 'gmt':
        fgeodict = GMTGrid.getFileGeoDict(gridfile)
    else:
        fgeodict = GDALGrid.getFileGeoDict(gridfile)
    dx,dy = (fgeodict.dx,fgeodict.dy)
    sdict = GeoDict.createDictFromBox(xmin,xmax,ymin,ymax,dx,dy)
    if gridtype == 'gmt':
        grid = GMTGrid.load(gridfile,samplegeodict=sdict,resample=False,method=method,doPadding=True)
    else:
        grid = GDALGrid.load(gridfile,samplegeodict=sdict,resample=False,method=method,doPadding=True)

    return sampleFromGrid(grid,xypoints)

def sampleFromGrid(grid,xypoints,method='nearest'):
    """
    Sample 2D grid object at each of a set of XY (decimal degrees) points.
    :param grid:
      Grid2D object at which to sample data.
    :param xypoints:
コード例 #30
0
ファイル: makemaps.py プロジェクト: usgs/groundfailure
def modelMap(grids, shakefile=None, suptitle=None, inventory_shapefile=None,
             plotorder=None, maskthreshes=None, colormaps=None, boundaries=None,
             zthresh=0, scaletype='continuous', lims=None, logscale=False,
             ALPHA=0.7, maproads=True, mapcities=True, isScenario=False,
             roadfolder=None, topofile=None, cityfile=None, oceanfile=None,
             roadcolor='#6E6E6E', watercolor='#B8EEFF', countrycolor='#177F10',
             outputdir=None, savepdf=True, savepng=True, showplots=False,
             roadref='unknown', cityref='unknown', oceanref='unknown',
             printparam=False, ds=True, dstype='mean', upsample=False):
    """
    This function creates maps of mapio grid layers (e.g. liquefaction or
    landslide models with their input layers)
    All grids must use the same bounds
    TO DO change so that all input layers do not have to have the same bounds,
    test plotting multiple probability layers, and add option so that if PDF and
    PNG aren't output, opens plot on screen using plt.show()

    :param grids: Dictionary of N layers and metadata formatted like:
        maplayers['layer name']={
        'grid': mapio grid2D object,
        'label': 'label for colorbar and top line of subtitle',
        'type': 'output or input to model',
        'description': 'detailed description of layer for subtitle'}.
      Layer names must be unique.
    :type name: Dictionary or Ordered dictionary - import collections;
      grids = collections.OrderedDict()
    :param shakefile: optional ShakeMap file (url or full file path) to extract information for labels and folder names
    :type shakefile: Shakemap Event Dictionary
    :param suptitle: This will be displayed at the top of the plots and in the
      figure names
    :type suptitle: string
    :param plotorder: List of keys describing the order to plot the grids, if
      None and grids is an ordered dictionary, it will use the order of the
      dictionary, otherwise it will choose order which may be somewhat random
      but it will always put a probability grid first
    :type plotorder: list
    :param maskthreshes: N x 1 array or list of lower thresholds for masking
      corresponding to order in plotorder or order of OrderedDict if plotorder
      is None. If grids is not an ordered dict and plotorder is not specified,
      this will not work right. If None (default), nothing will be masked
    :param colormaps: List of strings of matplotlib colormaps (e.g. cm.autumn_r)
      corresponding to plotorder or order of dictionary if plotorder is None.
      The list can contain both strings and None e.g. colormaps = ['cm.autumn',
      None, None, 'cm.jet'] and None's will default to default colormap
    :param boundaries: None to show entire study area, 'zoom' to zoom in on the
      area of action (only works if there is a probability layer) using zthresh
      as a threshold, or a dictionary defining lats and lons in the form of
      boundaries.xmin = minlon, boundaries.xmax = maxlon, boundaries.ymin =
      min lat, boundaries.ymax = max lat
    :param zthresh: threshold for computing zooming bounds, only used if
      boundaries = 'zoom'
    :type zthresh: float
    :param scaletype: Type of scale for plotting, 'continuous' or 'binned' -
      will be reflected in colorbar
    :type scaletype: string
    :param lims: None or Nx1 list of tuples or numpy arrays corresponding to
      plotorder defining the limits for saturating the colorbar (vmin, vmax) if
      scaletype is continuous or the bins to use (clev) if scaletype if binned.
      The list can contain tuples, arrays, and Nones, e.g. lims = [(0., 10.),
      None, (0.1, 1.5), np.linspace(0., 1.5, 15)]. When None is specified, the
      program will estimate the limits, when an array is specified but the scale
      type is continuous, vmin will be set to min(array) and vmax will be set
      to max(array)
    :param lims: None or Nx1 list of Trues and Falses corresponding to
      plotorder defining whether to use a linear or log scale (log10) for
      plotting the layer. This will be reflected in the labels
    :param ALPHA: Transparency for mapping, if there is a hillshade that will
      plot below each layer, it is recommended to set this to at least 0.7
    :type ALPHA: float
    :param maproads: Whether to show roads or not, default True, but requires
      that roadfile is specified and valid to work
    :type maproads: boolean
    :param mapcities: Whether to show cities or not, default True, but requires
      that cityfile is specified and valid to work
    :type mapcities: boolean
    :param isScenario: Whether this is a scenario (True) or a real event (False)
      (default False)
    :type isScenario: boolean
    :param roadfolder: Full file path to folder containing road shapefiles
    :type roadfolder: string
    :param topofile: Full file path to topography grid (GDAL compatible) - this
      is only needed to make a hillshade if a premade hillshade is not specified
    :type topofile: string
    :param cityfile: Full file path to Pager file containing city & population
      information
    :type cityfile: string
    :param roadcolor: Color to use for roads, if plotted, default #6E6E6E
    :type roadcolor: Hex color or other matplotlib compatible way of defining
      color
    :param watercolor: Color to use for oceans, lakes, and rivers, default
      #B8EEFF
    :type watercolor: Hex color or other matplotlib compatible way of defining
      color
    :param countrycolor: Color for country borders, default #177F10
    :type countrycolor: Hex color or other matplotlib compatible way of defining
      color
    :param outputdir: File path for outputting figures, if edict is defined, a
      subfolder based on the event id will be created in this folder. If None,
      will use current directory
    :param savepdf: True to save pdf figure, False to not
    :param savepng: True to save png figure, False to not
    :param ds: True to allow downsampling for display (necessary when arrays
      are quite large, False to not allow)
    :param dstype: What function to use in downsampling, options are 'min',
      'max', 'median', or 'mean'
    :param upsample: True to upsample the layer to the DEM resolution for better
      looking hillshades

    :returns:
        * PDF and/or PNG of map
        * Downsampled and trimmed version of input grids. If no
        modification was needed for plotting, this will be identical to grids but
        without the metadata

    """

    if suptitle is None:
        suptitle = ' '

    plt.ioff()

    defaultcolormap = cm.jet

    if shakefile is not None:
        edict = ShakeGrid.load(shakefile, adjust='res').getEventDict()
        temp = ShakeGrid.load(shakefile, adjust='res').getShakeDict()
        edict['eventid'] = temp['shakemap_id']
        edict['version'] = temp['shakemap_version']
    else:
        edict = None

    # Get output file location
    if outputdir is None:
        print('No output location given, using current directory for outputs\n')
        outputdir = os.getcwd()
    if edict is not None:
        outfolder = os.path.join(outputdir, edict['event_id'])
    else:
        outfolder = outputdir
    if not os.path.isdir(outfolder):
        os.makedirs(outfolder)

    # Get plotting order, if not specified
    if plotorder is None:
        plotorder = list(grids.keys())

    # Get boundaries to use for all plots
    cut = True
    if boundaries is None:
        cut = False
        keytemp = list(grids.keys())
        boundaries = grids[keytemp[0]]['grid'].getGeoDict()
    elif boundaries == 'zoom':
        # Find probability layer (will just take the maximum bounds if there is
        # more than one)
        keytemp = list(grids.keys())
        key1 = [key for key in keytemp if 'model' in key.lower()]
        if len(key1) == 0:
            print('Could not find model layer to use for zoom, using default boundaries')
            keytemp = list(grids.keys())
            boundaries = grids[keytemp[0]]['grid'].getGeoDict()
        else:
            lonmax = -1.e10
            lonmin = 1.e10
            latmax = -1.e10
            latmin = 1.e10
            for key in key1:
                # get lat lons of areas affected and add, if no areas affected,
                # switch to shakemap boundaries
                temp = grids[key]['grid']
                xmin, xmax, ymin, ymax = temp.getBounds()
                lons = np.linspace(xmin, xmax, temp.getGeoDict().nx)
                lats = np.linspace(ymax, ymin, temp.getGeoDict().ny)  # backwards so it plots right
                row, col = np.where(temp.getData() > float(zthresh))
                lonmin = lons[col].min()
                lonmax = lons[col].max()
                latmin = lats[row].min()
                latmax = lats[row].max()
                # llons, llats = np.meshgrid(lons, lats)  # make meshgrid
                # llons1 = llons[temp.getData() > float(zthresh)]
                # llats1 = llats[temp.getData() > float(zthresh)]
                # if llons1.min() < lonmin:
                #     lonmin = llons1.min()
                # if llons1.max() > lonmax:
                #     lonmax = llons1.max()
                # if llats1.min() < latmin:
                #     latmin = llats1.min()
                # if llats1.max() > latmax:
                #     latmax = llats1.max()
            boundaries1 = {'dx': 100, 'dy': 100., 'nx': 100., 'ny': 100}  # dummy fillers, only really care about bounds
            if xmin < lonmin-0.15*(lonmax-lonmin):
                boundaries1['xmin'] = lonmin-0.1*(lonmax-lonmin)
            else:
                boundaries1['xmin'] = xmin
            if xmax > lonmax+0.15*(lonmax-lonmin):
                boundaries1['xmax'] = lonmax+0.1*(lonmax-lonmin)
            else:
                boundaries1['xmax'] = xmax
            if ymin < latmin-0.15*(latmax-latmin):
                boundaries1['ymin'] = latmin-0.1*(latmax-latmin)
            else:
                boundaries1['ymin'] = ymin
            if ymax > latmax+0.15*(latmax-latmin):
                boundaries1['ymax'] = latmax+0.1*(latmax-latmin)
            else:
                boundaries1['ymax'] = ymax
            boundaries = GeoDict(boundaries1, adjust='res')
    else:
        # SEE IF BOUNDARIES ARE SAME AS BOUNDARIES OF LAYERS
        keytemp = list(grids.keys())
        tempgdict = grids[keytemp[0]]['grid'].getGeoDict()
        if np.abs(tempgdict.xmin-boundaries['xmin']) < 0.05 and \
           np.abs(tempgdict.ymin-boundaries['ymin']) < 0.05 and \
           np.abs(tempgdict.xmax-boundaries['xmax']) < 0.05 and \
           np.abs(tempgdict.ymax - boundaries['ymax']) < 0.05:
            print('Input boundaries are almost the same as specified boundaries, no cutting needed')
            boundaries = tempgdict
            cut = False
        else:
            try:
                if boundaries['xmin'] > boundaries['xmax'] or \
                   boundaries['ymin'] > boundaries['ymax']:
                    print('Input boundaries are not usable, using default boundaries')
                    keytemp = list(grids.keys())
                    boundaries = grids[keytemp[0]]['grid'].getGeoDict()
                    cut = False
                else:
                    # Build dummy GeoDict
                    boundaries = GeoDict({'xmin': boundaries['xmin'],
                                          'xmax': boundaries['xmax'],
                                          'ymin': boundaries['ymin'],
                                          'ymax': boundaries['ymax'],
                                          'dx': 100.,
                                          'dy': 100.,
                                          'ny': 100.,
                                          'nx': 100.},
                                         adjust='res')
            except:
                print('Input boundaries are not usable, using default boundaries')
                keytemp = list(grids.keys())
                boundaries = grids[keytemp[0]]['grid'].getGeoDict()
                cut = False

    # Pull out bounds for various uses
    bxmin, bxmax, bymin, bymax = boundaries.xmin, boundaries.xmax, boundaries.ymin, boundaries.ymax

    # Determine if need a single panel or multi-panel plot and if multi-panel,
    # how many and how it will be arranged
    fig = plt.figure()
    numpanels = len(grids)
    if numpanels == 1:
        rowpan = 1
        colpan = 1
        # create the figure and axes instances.
        fig.set_figwidth(5)
    elif numpanels == 2 or numpanels == 4:
        rowpan = np.ceil(numpanels/2.)
        colpan = 2
        fig.set_figwidth(13)
    else:
        rowpan = np.ceil(numpanels/3.)
        colpan = 3
        fig.set_figwidth(15)
    if rowpan == 1:
        fig.set_figheight(rowpan*6.0)
    else:
        fig.set_figheight(rowpan*5.3)

    # Need to update naming to reflect the shakemap version once can get
    # getHeaderData to work, add edict['version'] back into title, maybe
    # shakemap id also?
    fontsizemain = 14.
    fontsizesub = 12.
    fontsizesmallest = 10.
    if rowpan == 1.:
        fontsizemain = 12.
        fontsizesub = 10.
        fontsizesmallest = 8.
    if edict is not None:
        if isScenario:
            title = edict['event_description']
        else:
            timestr = edict['event_timestamp'].strftime('%b %d %Y')
            title = 'M%.1f %s v%i - %s' % (edict['magnitude'], timestr, edict['version'], edict['event_description'])
        plt.suptitle(title+'\n'+suptitle, fontsize=fontsizemain)
    else:
        plt.suptitle(suptitle, fontsize=fontsizemain)

    clear_color = [0, 0, 0, 0.0]

    # Cut all of them and release extra memory

    xbuff = (bxmax-bxmin)/10.
    ybuff = (bymax-bymin)/10.
    cutxmin = bxmin-xbuff
    cutymin = bymin-ybuff
    cutxmax = bxmax+xbuff
    cutymax = bymax+ybuff
    if cut is True:
        newgrids = collections.OrderedDict()
        for k, layer in enumerate(plotorder):
            templayer = grids[layer]['grid']
            try:
                newgrids[layer] = {'grid': templayer.cut(cutxmin, cutxmax, cutymin, cutymax, align=True)}
            except Exception as e:
                print(('Cutting failed, %s, continuing with full layers' % e))
                newgrids = grids
                continue
        del templayer
        gc.collect()
    else:
        newgrids = grids
    tempgdict = newgrids[list(grids.keys())[0]]['grid'].getGeoDict()

    # Upsample layers to same as topofile if desired for better looking hillshades
    if upsample is True and topofile is not None:
        try:
            topodict = GDALGrid.getFileGeoDict(topofile)
            if topodict.dx >= tempgdict.dx or topodict.dy >= tempgdict.dy:
                print('Upsampling not possible, resolution of results already smaller than DEM')
                pass
            else:
                tempgdict1 = GeoDict({'xmin': tempgdict.xmin-xbuff,
                                      'ymin': tempgdict.ymin-ybuff,
                                      'xmax': tempgdict.xmax+xbuff,
                                      'ymax': tempgdict.ymax+ybuff,
                                      'dx': topodict.dx,
                                      'dy': topodict.dy,
                                      'nx': topodict.nx,
                                      'ny': topodict.ny},
                                     adjust='res')
                tempgdict2 = tempgdict1.getBoundsWithin(tempgdict)
                for k, layer in enumerate(plotorder):
                    newgrids[layer]['grid'] = newgrids[layer]['grid'].subdivide(tempgdict2)
        except:
            print('Upsampling failed, continuing')

    # Downsample all of them for plotting, if needed, and replace them in
    # grids (to save memory)
    tempgrid = newgrids[list(grids.keys())[0]]['grid']
    xsize = tempgrid.getGeoDict().nx
    ysize = tempgrid.getGeoDict().ny
    inchesx, inchesy = fig.get_size_inches()
    divx = int(np.round(xsize/(500.*inchesx)))
    divy = int(np.round(ysize/(500.*inchesy)))
    xmin, xmax, ymin, ymax = tempgrid.getBounds()
    gdict = tempgrid.getGeoDict()  # Will be replaced if downsampled
    del tempgrid
    gc.collect()

    if divx <= 1:
        divx = 1
    if divy <= 1:
        divy = 1
    if (divx > 1. or divy > 1.) and ds:
        if dstype == 'max':
            func = np.nanmax
        elif dstype == 'min':
            func = np.nanmin
        elif dstype == 'med':
            func = np.nanmedian
        else:
            func = np.nanmean
        for k, layer in enumerate(plotorder):
            layergrid = newgrids[layer]['grid']
            dat = block_reduce(layergrid.getData().copy(),
                               block_size=(divy, divx),
                               cval=float('nan'),
                               func=func)
            if k == 0:
                lons = block_reduce(np.linspace(xmin, xmax, layergrid.getGeoDict().nx),
                                    block_size=(divx,),
                                    func=np.mean,
                                    cval=float('nan'))
                if math.isnan(lons[-1]):
                    lons[-1] = lons[-2] + (lons[1]-lons[0])
                lats = block_reduce(np.linspace(ymax, ymin, layergrid.getGeoDict().ny),
                                    block_size=(divy,),
                                    func=np.mean,
                                    cval=float('nan'))
                if math.isnan(lats[-1]):
                    lats[-1] = lats[-2] + (lats[1]-lats[0])
                gdict = GeoDict({'xmin': lons.min(),
                                 'xmax': lons.max(),
                                 'ymin': lats.min(),
                                 'ymax': lats.max(),
                                 'dx': np.abs(lons[1]-lons[0]),
                                 'dy': np.abs(lats[1]-lats[0]),
                                 'nx': len(lons),
                                 'ny': len(lats)},
                                adjust='res')
            newgrids[layer]['grid'] = Grid2D(dat, gdict)
        del layergrid, dat
    else:
        lons = np.linspace(xmin, xmax, xsize)
        lats = np.linspace(ymax, ymin, ysize)  # backwards so it plots right side up

    #make meshgrid
    llons1, llats1 = np.meshgrid(lons, lats)

    # See if there is an oceanfile for masking
    bbox = PolygonSH(((cutxmin, cutymin), (cutxmin, cutymax), (cutxmax, cutymax), (cutxmax, cutymin)))
    if oceanfile is not None:
        try:
            f = fiona.open(oceanfile)
            oc = next(f)
            f.close
            shapes = shape(oc['geometry'])
            # make boundaries into a shape
            ocean = shapes.intersection(bbox)
        except:
            print('Not able to read specified ocean file, will use default ocean masking')
            oceanfile = None
    if inventory_shapefile is not None:
        try:
            f = fiona.open(inventory_shapefile)
            invshp = list(f.items(bbox=(bxmin, bymin, bxmax, bymax)))
            f.close()
            inventory = [shape(inv[1]['geometry']) for inv in invshp]
        except:
            print('unable to read inventory shapefile specified, will not plot inventory')
            inventory_shapefile = None

    # # Find cities that will be plotted
    if mapcities is True and cityfile is not None:
        try:
            mycity = BasemapCities.loadFromGeoNames(cityfile=cityfile)
            bcities = mycity.limitByBounds((bxmin, bxmax, bymin, bymax))
            #bcities = bcities.limitByPopulation(40000)
            bcities = bcities.limitByGrid(nx=4, ny=4, cities_per_grid=2)
        except:
            print('Could not read in cityfile, not plotting cities')
            mapcities = False
            cityfile = None

    # Load in topofile
    if topofile is not None:
        try:
            topomap = GDALGrid.load(topofile, resample=True, method='linear', samplegeodict=gdict)
        except:
            topomap = GMTGrid.load(topofile, resample=True, method='linear', samplegeodict=gdict)
        topodata = topomap.getData().copy()
        # mask oceans if don't have ocean shapefile
        if oceanfile is None:
            topodata = maskoceans(llons1, llats1, topodata, resolution='h', grid=1.25, inlands=True)
    else:
        print('no hillshade is possible\n')
        topomap = None
        topodata = None

    # Load in roads, if needed
    if maproads is True and roadfolder is not None:
        try:
            roadslist = []
            for folder in os.listdir(roadfolder):
                road1 = os.path.join(roadfolder, folder)
                shpfiles = glob.glob(os.path.join(road1, '*.shp'))
                if len(shpfiles):
                    shpfile = shpfiles[0]
                    f = fiona.open(shpfile)
                    shapes = list(f.items(bbox=(bxmin, bymin, bxmax, bymax)))
                    for shapeid, shapedict in shapes:
                        roadslist.append(shapedict)
                    f.close()
        except:
            print('Not able to plot roads')
            roadslist = None

    val = 1
    for k, layer in enumerate(plotorder):
        layergrid = newgrids[layer]['grid']
        if 'label' in list(grids[layer].keys()):
            label1 = grids[layer]['label']
        else:
            label1 = layer
        try:
            sref = grids[layer]['description']['name']
        except:
            sref = None
        ax = fig.add_subplot(rowpan, colpan, val)
        val += 1
        clat = bymin + (bymax-bymin)/2.0
        clon = bxmin + (bxmax-bxmin)/2.0
        # setup of basemap ('lcc' = lambert conformal conic).
        # use major and minor sphere radii from WGS84 ellipsoid.
        m = Basemap(llcrnrlon=bxmin, llcrnrlat=bymin, urcrnrlon=bxmax, urcrnrlat=bymax,
                    rsphere=(6378137.00, 6356752.3142),
                    resolution='l', area_thresh=1000., projection='lcc',
                    lat_1=clat, lon_0=clon, ax=ax)

        x1, y1 = m(llons1, llats1)  # get projection coordinates
        axsize = ax.get_window_extent().transformed(fig.dpi_scale_trans.inverted())
        if k == 0:
            wid, ht = axsize.width, axsize.height
        if colormaps is not None and \
           len(colormaps) == len(newgrids) and \
           colormaps[k] is not None:
            palette = colormaps[k]
        else:  # Find preferred default color map for each type of layer
            if 'prob' in layer.lower() or 'pga' in layer.lower() or \
               'pgv' in layer.lower() or 'cohesion' in layer.lower() or \
               'friction' in layer.lower() or 'fs' in layer.lower():
                palette = cm.jet
            elif 'slope' in layer.lower():
                palette = cm.gnuplot2
            elif 'precip' in layer.lower():
                palette = cm2.s3pcpn
            else:
                palette = defaultcolormap

        if topodata is not None:
            if k == 0:
                ptopo = m.transform_scalar(
                    np.flipud(topodata), lons+0.5*gdict.dx,
                    lats[::-1]-0.5*gdict.dy, np.round(300.*wid),
                    np.round(300.*ht), returnxy=False, checkbounds=False,
                    order=1, masked=False)
                #use lightsource class to make our shaded topography
                ls = LightSource(azdeg=135, altdeg=45)
                ls1 = LightSource(azdeg=120, altdeg=45)
                ls2 = LightSource(azdeg=225, altdeg=45)
                intensity1 = ls1.hillshade(ptopo, fraction=0.25, vert_exag=1.)
                intensity2 = ls2.hillshade(ptopo, fraction=0.25, vert_exag=1.)
                intensity = intensity1*0.5 + intensity2*0.5
                #hillshm_im = m.transform_scalar(np.flipud(hillshm), lons, lats[::-1], np.round(300.*wid), np.round(300.*ht), returnxy=False, checkbounds=False, order=0, masked=False)
            #m.imshow(hillshm_im, cmap='Greys', vmin=0., vmax=3., zorder=1, interpolation='none')  # vmax = 3 to soften colors to light gray
            #m.pcolormesh(x1, y1, hillshm, cmap='Greys', linewidth=0., rasterized=True, vmin=0., vmax=3., edgecolors='none', zorder=1);
            # plt.draw()

        # Get the data
        dat = layergrid.getData().copy()

        # mask out anything below any specified thresholds
        # Might need to move this up to before downsampling...might give illusion of no hazard in places where there is some that just got averaged out
        if maskthreshes is not None and len(maskthreshes) == len(newgrids):
            if maskthreshes[k] is not None:
                dat[dat <= maskthreshes[k]] = float('NaN')
                dat = np.ma.array(dat, mask=np.isnan(dat))

        if logscale is not False and len(logscale) == len(newgrids):
            if logscale[k] is True:
                dat = np.log10(dat)
                label1 = r'$log_{10}$(' + label1 + ')'

        if scaletype.lower() == 'binned':
            # Find order of range to know how to scale
            order = np.round(np.log(np.nanmax(dat) - np.nanmin(dat)))
            if order < 1.:
                scal = 10**-order
            else:
                scal = 1.
            if lims is None or len(lims) != len(newgrids):
                clev = (np.linspace(np.floor(scal*np.nanmin(dat)), np.ceil(scal*np.nanmax(dat)), 10))/scal
            else:
                if lims[k] is None:
                    clev = (np.linspace(np.floor(scal*np.nanmin(dat)), np.ceil(scal*np.nanmax(dat)), 10))/scal
                else:
                    clev = lims[k]
            # Adjust to colorbar levels
            dat[dat < clev[0]] = clev[0]
            for j, level in enumerate(clev[:-1]):
                dat[(dat >= clev[j]) & (dat < clev[j+1])] = clev[j]
            # So colorbar saturates at top
            dat[dat > clev[-1]] = clev[-1]
            #panelhandle = m.contourf(x1, y1, datm, clev, cmap=palette, linewidth=0., alpha=ALPHA, rasterized=True)
            vmin = clev[0]
            vmax = clev[-1]
        else:
            if lims is not None and len(lims) == len(newgrids):
                if lims[k] is None:
                    vmin = np.nanmin(dat)
                    vmax = np.nanmax(dat)
                else:
                    vmin = lims[k][0]
                    vmax = lims[k][-1]
            else:
                vmin = np.nanmin(dat)
                vmax = np.nanmax(dat)

        # Mask out cells overlying oceans or block with a shapefile if available
        if oceanfile is None:
            dat = maskoceans(llons1, llats1, dat, resolution='h', grid=1.25, inlands=True)
        else:
            #patches = []
            if type(ocean) is PolygonSH:
                ocean = [ocean]
            for oc in ocean:
                patch = getProjectedPatch(oc, m, edgecolor="#006280", facecolor=watercolor, lw=0.5, zorder=4.)
                #x, y = m(oc.exterior.xy[0], oc.exterior.xy[1])
                #xy = zip(x, y)
                #patch = Polygon(xy, facecolor=watercolor, edgecolor="#006280", lw=0.5, zorder=4.)
                ##patches.append(Polygon(xy, facecolor=watercolor, edgecolor=watercolor, zorder=500.))
                ax.add_patch(patch)
            ##ax.add_collection(PatchCollection(patches))

        if inventory_shapefile is not None:
            for in1 in inventory:
                if 'point' in str(type(in1)):
                    x, y = in1.xy
                    x = x[0]
                    y = y[0]
                    m.scatter(x, y, c='m', s=50, latlon=True, marker='^',
                              zorder=100001)
                else:
                    x, y = m(in1.exterior.xy[0], in1.exterior.xy[1])
                    xy = list(zip(x, y))
                    patch = Polygon(xy, facecolor='none', edgecolor='k', lw=0.5, zorder=10.)
                    #patches.append(Polygon(xy, facecolor=watercolor, edgecolor=watercolor, zorder=500.))
                    ax.add_patch(patch)
        palette.set_bad(clear_color, alpha=0.0)
        # Plot it up
        dat_im = m.transform_scalar(
            np.flipud(dat), lons+0.5*gdict.dx, lats[::-1]-0.5*gdict.dy,
            np.round(300.*wid), np.round(300.*ht), returnxy=False,
            checkbounds=False, order=0, masked=True)
        if topodata is not None:  # Drape over hillshade
            #turn data into an RGBA image
            cmap = palette
            #adjust data so scaled between vmin and vmax and between 0 and 1
            dat1 = dat_im.copy()
            dat1[dat1 < vmin] = vmin
            dat1[dat1 > vmax] = vmax
            dat1 = (dat1 - vmin)/(vmax-vmin)
            rgba_img = cmap(dat1)
            maskvals = np.dstack((dat1.mask, dat1.mask, dat1.mask))
            rgb = np.squeeze(rgba_img[:, :, 0:3])
            rgb[maskvals] = 1.
            draped_hsv = ls.blend_hsv(rgb, np.expand_dims(intensity, 2))
            m.imshow(draped_hsv, zorder=3., interpolation='none')
            # This is just a dummy layer that will be deleted to make the
            # colorbar look right
            panelhandle = m.imshow(dat_im, cmap=palette, zorder=0.,
                                   vmin=vmin, vmax=vmax)
        else:
            panelhandle = m.imshow(dat_im, cmap=palette, zorder=3.,
                                   vmin=vmin, vmax=vmax, interpolation='none')
        #panelhandle = m.pcolormesh(x1, y1, dat, linewidth=0., cmap=palette, vmin=vmin, vmax=vmax, alpha=ALPHA, rasterized=True, zorder=2.);
        #panelhandle.set_edgecolors('face')
        # add colorbar
        cbfmt = '%1.1f'
        if vmax is not None and vmin is not None:
            if (vmax - vmin) < 1.:
                cbfmt = '%1.2f'
            elif vmax > 5.:  # (vmax - vmin) > len(clev):
                cbfmt = '%1.0f'

        #norm = mpl.colors.Normalize(vmin=vmin, vmax=vmax)
        if scaletype.lower() == 'binned':
            cbar = fig.colorbar(panelhandle, spacing='proportional',
                                ticks=clev, boundaries=clev, fraction=0.036,
                                pad=0.04, format=cbfmt, extend='both')
            #cbar1 = ColorbarBase(cbar.ax, cmap=palette, norm=norm, spacing='proportional', ticks=clev, boundaries=clev, fraction=0.036, pad=0.04, format=cbfmt, extend='both', extendfrac='auto')

        else:
            cbar = fig.colorbar(panelhandle, fraction=0.036, pad=0.04,
                                extend='both', format=cbfmt)
            #cbar1 = ColorbarBase(cbar.ax, cmap=palette, norm=norm, fraction=0.036, pad=0.04, extend='both', extendfrac='auto', format=cbfmt)

        if topodata is not None:
            panelhandle.remove()

        cbar.set_label(label1, fontsize=10)
        cbar.ax.tick_params(labelsize=8)

        parallels = m.drawparallels(getMapLines(bymin, bymax, 3),
                                    labels=[1, 0, 0, 0], linewidth=0.5,
                                    labelstyle='+/-', fontsize=9, xoffset=-0.8,
                                    color='gray', zorder=100.)
        m.drawmeridians(getMapLines(bxmin, bxmax, 3), labels=[0, 0, 0, 1],
                        linewidth=0.5, labelstyle='+/-', fontsize=9,
                        color='gray', zorder=100.)
        for par in parallels:
            try:
                parallels[par][1][0].set_rotation(90)
            except:
                pass

        #draw roads on the map, if they were provided to us
        if maproads is True and roadslist is not None:
            try:
                for road in roadslist:
                    try:
                        xy = list(road['geometry']['coordinates'])
                        roadx, roady = list(zip(*xy))
                        mapx, mapy = m(roadx, roady)
                        m.plot(mapx, mapy, roadcolor, lw=0.5, zorder=9)
                    except:
                        continue
            except Exception as e:
                print(('Failed to plot roads, %s' % e))

        #add city names to map
        if mapcities is True and cityfile is not None:
            try:
                fontname = 'Arial'
                fontsize = 8
                if k == 0:  # Only need to choose cities first time and then apply to rest
                    fcities = bcities.limitByMapCollision(
                        m, fontname=fontname, fontsize=fontsize)
                    ctlats, ctlons, names = fcities.getCities()
                    cxis, cyis = m(ctlons, ctlats)
                for ctlat, ctlon, cxi, cyi, name in zip(ctlats, ctlons, cxis, cyis, names):
                    m.scatter(ctlon, ctlat, c='k', latlon=True, marker='.',
                              zorder=100000)
                    ax.text(cxi, cyi, name, fontname=fontname,
                            fontsize=fontsize, zorder=100000)
            except Exception as e:
                print('Failed to plot cities, %s' % e)

        #draw star at epicenter
        plt.sca(ax)
        if edict is not None:
            elat, elon = edict['lat'], edict['lon']
            ex, ey = m(elon, elat)
            plt.plot(ex, ey, '*', markeredgecolor='k', mfc='None', mew=1.0,
                     ms=15, zorder=10000.)

        m.drawmapboundary(fill_color=watercolor)

        m.fillcontinents(color=clear_color, lake_color=watercolor)
        m.drawrivers(color=watercolor)
        ##m.drawcoastlines()

        #draw country boundaries
        m.drawcountries(color=countrycolor, linewidth=1.0)

        #add map scale
        m.drawmapscale((bxmax+bxmin)/2., (bymin+(bymax-bymin)/9.), clon, clat, np.round((((bxmax-bxmin)*111)/5)/10.)*10, barstyle='fancy', zorder=10)

        # Add border
        autoAxis = ax.axis()
        rec = Rectangle((autoAxis[0]-0.7, autoAxis[2]-0.2), (autoAxis[1]-autoAxis[0])+1, (autoAxis[3]-autoAxis[2])+0.4, fill=False, lw=1, zorder=1e8)
        rec = ax.add_patch(rec)
        rec.set_clip_on(False)

        plt.draw()

        if sref is not None:
            label2 = '%s\nsource: %s' % (label1, sref)  # '%s\n' % label1 + r'{\fontsize{10pt}{3em}\selectfont{}%s}' % sref  #
        else:
            label2 = label1
        plt.title(label2, axes=ax, fontsize=fontsizesub)

        #draw scenario watermark, if scenario
        if isScenario:
            plt.sca(ax)
            cx, cy = m(clon, clat)
            plt.text(cx, cy, 'SCENARIO', rotation=45, alpha=0.10, size=72, ha='center', va='center', color='red')

        #if ds: # Could add this to print "downsampled" on map
        #    plt.text()

        if k == 1 and rowpan == 1:
            # adjust single level plot
            axsize = ax.get_window_extent().transformed(fig.dpi_scale_trans.inverted())
            ht2 = axsize.height
            fig.set_figheight(ht2*1.6)
        else:
            plt.tight_layout()

        # Make room for suptitle - tight layout doesn't account for it
        plt.subplots_adjust(top=0.92)

    if printparam is True:
        try:
            fig = plt.gcf()
            dictionary = grids['model']['description']['parameters']
            paramstring = 'Model parameters: '
            halfway = np.ceil(len(dictionary)/2.)
            for i, key in enumerate(dictionary):
                if i == halfway and colpan == 1:
                    paramstring += '\n'
                paramstring += ('%s = %s; ' % (key, dictionary[key]))
            print(paramstring)
            fig.text(0.01, 0.015, paramstring, fontsize=fontsizesmallest)
            plt.draw()
        except:
            print('Could not display model parameters')

    if edict is not None:
        eventid = edict['eventid']
    else:
        eventid = ''

    time1 = datetime.datetime.utcnow().strftime('%d%b%Y_%H%M')
    outfile = os.path.join(outfolder, '%s_%s_%s.pdf' % (eventid, suptitle, time1))
    pngfile = os.path.join(outfolder, '%s_%s_%s.png' % (eventid, suptitle, time1))

    if savepdf is True:
        print('Saving map output to %s' % outfile)
        plt.savefig(outfile, dpi=300)
    if savepng is True:
        print('Saving map output to %s' % pngfile)
        plt.savefig(pngfile)
    if showplots is True:
        plt.show()
    else:
        plt.close(fig)

    return newgrids
コード例 #31
0
ファイル: kritikos.py プロジェクト: usgs/groundfailure
def kritikos_fuzzygamma(shakefile, config, bounds=None):
    """
    Runs kritikos procedure with fuzzy gamma
    """

    cmodel = config['statistic_models']['kritikos_2014']
    gamma = cmodel['gamma_value']

    ## Read in layer files and get data
    layers = cmodel['layers']
    try:
        # Slope
        slope_file = layers['slope']
        # DFF
        dff_file = layers['dff']
        # DFS
        dfs_file = layers['dfs']
        # Slope Position
        slope_pos_file = layers['slope_pos']
    except:
        print('Unable to retrieve grid data.')

    try:
        div = cmodel['divisor']
        # Load in divisors
        MMI_div = div['MMI']
        slope_div = div['slope']
        dff_div = div['dff']
        dfs_div = div['dfs']
        slope_pos_div = div['slope_pos']
    except:
        print('Unable to retrieve divisors.')

    try:
        power = cmodel['power']
        # Load in powers
        MMI_power = power['MMI']
        slope_power = power['slope']
        dff_power = power['dff']
        dfs_power = power['dfs']
        slope_pos_power = power['slope_pos']
    except:
        print('Unable to retrieve powers.')

    # Cut and resample all files
    try:
        shkgdict = ShakeGrid.getFileGeoDict(shakefile, adjust='res')
        slopedict = GDALGrid.getFileGeoDict(slope_file)
        if bounds is not None:  # Make sure bounds are within ShakeMap Grid
            if shkgdict.xmin > bounds['xmin'] or shkgdict.xmax < bounds['xmax'] or shkgdict.ymin > bounds['ymin'] or shkgdict.ymax < bounds['ymax']:
                print('Specified bounds are outside shakemap area, using ShakeMap bounds instead')
                bounds = None
        if bounds is not None:
            tempgdict = GeoDict({'xmin': bounds['xmin'], 'ymin': bounds['ymin'], 'xmax': bounds['xmax'], 'ymax': bounds['ymax'], 'dx': 100., 'dy': 100., 'nx': 100., 'ny': 100.}, adjust='res')
            gdict = slpdict.getBoundsWithin(tempgdict)
        else:  # Get boundaries from shakemap if not specified
            shkgdict = ShakeGrid.getFileGeoDict(shakefile, adjust='res')
            slpdict = GDALGrid.getFileGeoDict(slopefile)
            gdict = slpdict.getBoundsWithin(shkgdict)
    except:
        print('Unable to create base geodict.')

    # Load in data
    try:
        # Load in slope data
        slopegrid = GDALGrid.load(slopefile, samplegeodict=gdict, resample=False)
        slope_data = slopefrid.getData().astype(float)
        # Load in MMI
        shakemap = ShakeGrid.load(shakefile, samplegeodict=gdict, resample=True, method='linear', adjust='res')
        MMI_data = shakemap.getLayer('MMI').getData().astype(float)
        # Load in Dff
        dffgrid = GDALGrid.load(slopefile, samplegeodict=gdict, resample=False)
        dff_data = dffgrid.getData().astype(float)
        # Load in DFS
        dfsgrid = GDALGrid.load(slopefile, samplegeodict=gdict, resample=False)
        dfs_data = dfsgrid.getData().astype(float)
        # Load in Slope Position
        slope_pos_grid = GDALGrid.load(slopefile, samplegeodict=gdict, resample=False)
        slope_pos_data = slop_pos_grid.getData().astype(float)
    except:
        print('Data could not be retrieved.')

            [[[classification]]]
            MMI = 5,6,7,8,9
            slope = 0-4, 5-9, 10-14, 15-19, 20-24, 25-29, 30-34, 35-39, 40-44, 45-49, 50+  # Reclassify as 1,2,3,etc.
            dff = 0-4, 5-9, 10-19, 20-29, 30-39, 40-49, 50+  # Reclassify as 1,2,3,etc.
            dfs = 0-0.49, 0.5-0.99, 1.0-1.49, 1.5-1.99, 2.0-2.49, 2.5+  # Reclassify as 1,2,3,etc.
            slope_pos = 'Flat', 'Valley', 'Mid-Slope', 'Ridge'  # Reclassify as 1,2,3,etc.
コード例 #32
0
ファイル: makeTestdata.py プロジェクト: usgs/groundfailure
def makeTestData():
    # make test layers
    X = ['friction', 'slope', 'vs30', 'cti1', 'precip']
    config = OrderedDict()
    config.setdefault('logistic_models', {}).setdefault('test_model', {})
    config['logistic_models']['test_model'].setdefault('shortref', 'Name et al. year')
    config['logistic_models']['test_model'].setdefault('longref', 'full reference')
    config['logistic_models']['test_model'].setdefault('layers', {})
    config['logistic_models']['test_model'].setdefault('interpolations', {})
    config['logistic_models']['test_model'].setdefault('terms', {})
    config['logistic_models']['test_model'].setdefault('coefficients', {})['b0'] = 3.5

    for k, items in enumerate(X):
        coef = 'b%1d' % (k+1)
        # make a GDALGrid object
        testgrid = GDALGrid(eval(items), geodict)
        # Save the file
        if items == 'precip':
            try:
                os.mkdir('test_precip')
            except:
                pass
            filename = 'test_precip/prec_Jan.bil'  # Only make January for testing
        else:
            filename = 'test_%s.bil' % (items)
        testgrid.save(filename, format='EHdr')

        # add to test config file
        config['logistic_models']['test_model']['layers'].update({items: {'file': filename.split('/')[0],
                                                                          'units': units[k], 'longref': 'longref',
                                                                          'shortref': 'shortref'}})
        config['logistic_models']['test_model']['interpolations'].update({items: 'nearest'})
        config['logistic_models']['test_model']['terms'].update({coef: terms[k]})
        config['logistic_models']['test_model']['coefficients'].update({coef: coefficients[k]})

    config['logistic_models']['test_model']['gfeype'] = 'landslide'
    config['logistic_models']['test_model']['baselayer'] = 'slope'
    config['logistic_models']['test_model']['slopemin'] = 5.
    config['logistic_models']['test_model']['slopemax'] = 90.

    # Make test_shakegrid and test_uncert
    eventDict = OrderedDict([('event_id', 'test'),
                            ('lon', 0.5),
                            ('lat', 0.5),
                            ('event_timestamp', datetime(2000, 1, 5, 0, 30, 55)),
                            ('event_network', 'na'),
                            ('magnitude', 6.0),
                            ('event_description', 'Test event'),
                            ('depth', 5.0)])
    shakeDict = OrderedDict([('process_timestamp',
                            datetime(2000, 1, 6, 20, 38, 19)),
                            ('event_id', 'test'),
                            ('shakemap_version', 2),
                            ('code_version', '1 billion'),
                            ('shakemap_event_type', 'TEST'),
                            ('map_status', 'TEST'),
                            ('shakemap_id', 'test'),
                            ('shakemap_originator', 'na')])
    uncertaintyDict = {}

    layers1 = {'pga': pga, 'pgv': pgv}
    shakegrid = ShakeGrid(layers1, geodict, eventDict, shakeDict, uncertaintyDict)
    shakegrid.save('test_shakegrid.xml')

    layers2 = {'stdpga': stdpga}
    uncertgrid = ShakeGrid(layers2, geodict, eventDict, shakeDict, uncertaintyDict)
    uncertgrid.save('test_uncert.xml')

    C = ConfigObj(config)
    C.filename = 'test.ini'
    C.write()

    return config
コード例 #33
0
ファイル: newmark.py プロジェクト: mhearne-usgs/groundfailure
def HAZUS(shakefile, config, uncertfile=None, saveinputs=False, modeltype='coverage', regressionmodel='J_PGA', probtype='jibson2000', bounds=None):
    """
    Runs HAZUS landslide procedure (FEMA, 2003, Chapter 4) using susceptiblity categories from defined by HAZUS manual (I-X)

    :param shakefile: URL or complete file path to the location of the Shakemap to use as input
    :type shakefile: string:
    :param config: Model configuration file object containing locations of input files and other input values config = ConfigObj(configfilepath)
    :type config: ConfigObj
    :param saveinputs: Whether or not to return the model input layers, False (defeault) returns only the model output (one layer)
    :type saveinputs: boolean
    :param modeltype: 'coverage' if critical acceleration is exceeded by pga, this gives the estimated areal coverage of landsliding for that cell
        'dn_hazus' - Outputs Newmark displacement using HAZUS methods without relating to probability of failure
        'dn_prob' - Estimates Newmark displacement using HAZUS methods and relates to probability of failure using param probtype
        'ac_classic_dn' - Uses the critical acceleration defined by HAZUS methodology and uses regression model defined by regressionmodel param to get Newmark displacement without relating to probability of failure
        'ac_classic_prob' - Uses the critical acceleration defined by HAZUS methodology and uses regression model defined by regressionmodel param to get Newmark displacement and probability defined by probtype method
    :type modeltype: string
    :param regressionmodel:
        Newmark displacement regression model to use
        'J_PGA' (default) - PGA-based model from Jibson (2007) - equation 6
        'J_PGA_M' - PGA and M-based model from Jibson (2007) - equation 7
        'RS_PGA_M' - PGA and M-based model from from Rathje and Saygili (2009)
        'RS_PGA_PGV' - PGA and PGV-based model from Saygili and Rathje (2008) - equation 6
    :type regressionmodel: string
    :param probtype: Method used to estimate probability. Entering 'jibson2000' uses equation 5 from Jibson et al. (2000) to estimate probability from Newmark displacement. 'threshold' uses a specified threshold of Newmark displacement (defined in config file) and assumes anything greather than this threshold fails
    :type probtype: string
    :param bounds: Boundaries to compute over if different from ShakeMap boundaries as dictionary with keys 'xmin', 'xmax', 'ymin', 'ymax'

    :returns maplayers:  Dictionary containing output and input layers (if saveinputs=True) along with metadata formatted like maplayers['layer name']={'grid': mapio grid2D object, 'label': 'label for colorbar and top line of subtitle', 'type': 'output or input to model', 'description': 'detailed description of layer for subtitle, potentially including source information'}
    :type maplayers: OrderedDict
    """

    # Empty refs
    suslref = 'unknown'
    sussref = 'unknown'
    modellref = 'unknown'
    modelsref = 'unknown'

    # Parse config and read in files
    sus = None
    susdat = None

    if uncertfile is not None:
        print('ground motion uncertainty option not implemented yet')

    # Read in susceptiblity file
    #try:
    susfile = config['mechanistic_models']['hazus']['layers']['susceptibility']['file']
    shkgdict = ShakeGrid.getFileGeoDict(shakefile, adjust='res')
    susdict = GDALGrid.getFileGeoDict(susfile)
    if bounds is not None:  # Make sure bounds are within ShakeMap Grid
        if shkgdict.xmin > bounds['xmin'] or shkgdict.xmax < bounds['xmax'] or shkgdict.ymin > bounds['ymin'] or shkgdict.ymax < bounds['ymax']:
            print('Specified bounds are outside shakemap area, using ShakeMap bounds instead')
            bounds = None
    if bounds is not None:
        tempgdict1 = GeoDict({'xmin': bounds['xmin'], 'ymin': bounds['ymin'], 'xmax': bounds['xmax'], 'ymax': bounds['ymax'], 'dx': 100., 'dy': 100., 'nx': 100., 'ny': 100.}, adjust='res')
        tempgdict = susdict.getBoundsWithin(tempgdict1)
    else:
        tempgdict = susdict.getBoundsWithin(shkgdict)
    sus = GDALGrid.load(susfile, samplegeodict=tempgdict, resample=False)
    gdict = sus.getGeoDict()
    susdat = sus.getData()
    #except Exception as e:
    #    raise IOError('Unable to read in susceptibility category file specified in config, %s,' % e)
    #    return

    try:  # Try to fetch source information from config
        modelsref = config['mechanistic_models']['hazus']['shortref']
        modellref = config['mechanistic_models']['hazus']['longref']
        sussref = config['mechanistic_models']['hazus']['layers']['susceptibility']['shortref']
        suslref = config['mechanistic_models']['hazus']['layers']['susceptibility']['longref']
    except:
        print('Was not able to retrieve all references from config file. Continuing')

    try:
        dnthresh = float(config['mechanistic_models']['hazus']['values']['dnthresh'])
    except:
        if probtype == 'threshold':
            dnthresh = 5.
            print('Unable to find dnthresh in config, using 5cm')

    # Load in shakemap, resample to susceptibility file
    shakemap = ShakeGrid.load(shakefile, adjust='res')

    PGA = shakemap.getLayer('pga').subdivide(gdict).getData().astype(float)/100.  # in units of g
    PGV = shakemap.getLayer('pgv').subdivide(gdict).getData().astype(float)  # cm/sec
    M = shakemap.getEventDict()['magnitude']

    # Get critical accelerations in g
    Ac = np.empty(np.shape(susdat))
    Ac[(susdat < 1) & (susdat > 10)] = 9999.
    Ac[susdat == 1] = 0.6
    Ac[susdat == 2] = 0.5
    Ac[susdat == 3] = 0.4
    Ac[susdat == 4] = 0.35
    Ac[susdat == 5] = 0.3
    Ac[susdat == 6] = 0.25
    Ac[susdat == 7] = 0.2
    Ac[susdat == 8] = 0.15
    Ac[susdat == 9] = 0.1
    Ac[susdat == 10] = 0.05

    # can delete sus and susdat now, if don't need to output it, to free up memory
    if saveinputs is False:
        del susdat, sus

    if modeltype == 'coverage':
        areal = np.zeros(np.shape(PGA))
        # This seems to be slow for large matrices
        areal[(PGA >= Ac) & (Ac == 0.6)] = 0.01
        areal[(PGA >= Ac) & (Ac == 0.5)] = 0.02
        areal[(PGA >= Ac) & (Ac == 0.4)] = 0.03
        areal[(PGA >= Ac) & (Ac == 0.35)] = 0.05
        areal[(PGA >= Ac) & (Ac == 0.3)] = 0.08
        areal[(PGA >= Ac) & (Ac == 0.25)] = 0.1
        areal[(PGA >= Ac) & (Ac == 0.2)] = 0.15
        areal[(PGA >= Ac) & (Ac == 0.15)] = 0.2
        areal[(PGA >= Ac) & (Ac == 0.1)] = 0.25
        areal[(PGA >= Ac) & (Ac == 0.05)] = 0.3
        # # But this way is even slower, takes 2x as long
        # numrows, numcols = np.shape(areal)
        # for j in np.arange(numrows):
        #     for k in np.arange(numcols):
        #         acval = Ac[j, k]
        #         if PGA[j, k] >= acval:
        #             if acval == 0.6:
        #                 areal[j, k] = 0.01
        #             elif acval == 0.5:
        #                 areal[j, k] = 0.02
        #             elif acval == 0.4:
        #                 areal[j, k] = 0.03
        #             elif acval == 0.35:
        #                 areal[j, k] = 0.05
        #             elif acval == 0.3:
        #                 areal[j, k] = 0.08
        #             elif acval == 0.25:
        #                 areal[j, k] = 0.1
        #             elif acval == 0.2:
        #                 areal[j, k] = 0.15
        #             elif acval == 0.15:
        #                 areal[j, k] = 0.2
        #             elif acval == 0.1:
        #                 areal[j, k] = 0.25
        #             elif acval == 0.05:
        #                 areal[j, k] = 0.3

    elif modeltype == 'dn_hazus' or modeltype == 'dn_prob':
        ed_low, ed_high = est_disp(Ac, PGA)
        ed_mean = np.mean((np.dstack((ed_low, ed_high))), axis=2)  # Get mean estimated displacements
        dn = ed_mean * numcycles(M) * PGA
    else:  # Calculate newmark displacement using a regression model
        if regressionmodel is 'J_PGA':
            dn = J_PGA(Ac, PGA)
        elif regressionmodel is 'J_PGA_M':
            dn = J_PGA_M(Ac, PGA, M)
        elif regressionmodel is 'RS_PGA_M':
            dn = RS_PGA_M(Ac, PGA, M)
        elif regressionmodel is 'RS_PGA_PGV':
            dn = RS_PGA_PGV(Ac, PGA, PGV)
        else:
            print('Unrecognized model, using J_PGA\n')
            dn = J_PGA(Ac, PGA)

    # Calculate probability from dn, if necessary for selected model
    if modeltype == 'ac_classic_prob' or modeltype == 'dn_prob':
        if probtype.lower() in 'jibson2000':
            PROB = 0.335*(1-np.exp(-0.048*dn**1.565))
            dnthresh = None
        elif probtype.lower() in 'threshold':
            PROB = dn.copy()
            PROB[PROB <= dnthresh] = 0
            PROB[PROB > dnthresh] = 1
        else:
            raise NameError('invalid probtype, assuming jibson2000')
            PROB = 0.335*(1-np.exp(-0.048*dn**1.565))
            dnthresh = None

    # Turn output and inputs into into grids and put in maplayers dictionary
    maplayers = collections.OrderedDict()

    temp = shakemap.getShakeDict()
    shakedetail = '%s_ver%s' % (temp['shakemap_id'], temp['shakemap_version'])

    if modeltype == 'coverage':
        maplayers['model'] = {'grid': GDALGrid(areal, gdict), 'label': 'Areal coverage', 'type': 'output', 'description': {'name': modelsref, 'longref': modellref, 'units': 'coverage', 'shakemap': shakedetail, 'parameters': {'modeltype': modeltype}}}
    elif modeltype == 'dn_hazus':
        maplayers['model'] = {'grid': GDALGrid(dn, gdict), 'label': 'Dn (cm)', 'type': 'output', 'description': {'name': modelsref, 'longref': modellref, 'units': 'displacement', 'shakemap': shakedetail, 'parameters': {'regressionmodel': regressionmodel, 'modeltype': modeltype}}}
    elif modeltype == 'ac_classic_dn':
        maplayers['model'] = {'grid': GDALGrid(dn, gdict), 'label': 'Dn (cm)', 'type': 'output', 'description': {'name': modelsref, 'longref': modellref, 'units': 'displacement', 'shakemap': shakedetail, 'parameters': {'regressionmodel': regressionmodel, 'modeltype': modeltype}}}
    elif modeltype == 'dn_prob':
        maplayers['model'] = {'grid': GDALGrid(PROB, gdict), 'label': 'Landslide Probability', 'type': 'output', 'description': {'name': modelsref, 'longref': modellref, 'units': 'probability', 'shakemap': shakedetail, 'parameters': {'regressionmodel': regressionmodel, 'dnthresh_cm': dnthresh, 'modeltype': modeltype, 'probtype': probtype}}}
    elif modeltype == 'ac_classic_prob':
        maplayers['model'] = {'grid': GDALGrid(PROB, gdict), 'label': 'Landslide Probability', 'type': 'output', 'description': {'name': modelsref, 'longref': modellref, 'units': 'probability', 'shakemap': shakedetail, 'parameters': {'regressionmodel': regressionmodel, 'dnthresh_cm': dnthresh, 'modeltype': modeltype, 'probtype': probtype}}}

    if saveinputs is True:
        maplayers['suscat'] = {'grid': sus, 'label': 'Susceptibility Category', 'type': 'input', 'description': {'name': sussref, 'longref': suslref, 'units': 'Category'}}
        maplayers['Ac'] = {'grid': GDALGrid(Ac, gdict), 'label': 'Ac (g)', 'type': 'output', 'description': {'units': 'g', 'shakemap': shakedetail}}
        maplayers['pga'] = {'grid': GDALGrid(PGA, gdict), 'label': 'PGA (g)', 'type': 'input', 'description': {'units': 'g', 'shakemap': shakedetail}}
        if 'pgv' in regressionmodel.lower():
            maplayers['pgv'] = {'grid': GDALGrid(PGV, gdict), 'label': 'PGV (cm/s)', 'type': 'input', 'description': {'units': 'cm/s', 'shakemap': shakedetail}}
        if 'dn' not in modeltype.lower() and modeltype != 'coverage':
            maplayers['dn'] = {'grid': GDALGrid(dn, gdict), 'label': 'Dn (cm)', 'type': 'output', 'description': {'units': 'displacement', 'shakemap': shakedetail, 'parameters': {'regressionmodel': regressionmodel, 'modeltype': modeltype}}}

    return maplayers
コード例 #34
0
    def __init__(self,
                 shakefile,
                 config,
                 uncertfile=None,
                 saveinputs=False,
                 slopefile=None,
                 slopediv=1.,
                 bounds=None,
                 numstd=1):
        """Set up the logistic model
        # ADD BOUNDS TO THIS MODEL
        :param config: configobj (config .ini file read in using configobj) defining the model and its inputs. Only one
          model should be described in each config file.
        :type config: dictionary
        :param shakefile: Full file path to shakemap.xml file for the event of interest
        :type shakefile: string
        :param uncertfile: Full file path to xml file of shakemap uncertainties
        :type uncertfile: string
        :param saveinputs: if True, saves all the input layers as Grid2D objects in addition to the model
          if false, it will just output the model
        :type saveinputs: boolean
        :param slopefile: optional file path to slopefile that will be resampled to the other input files for applying
          thresholds OVERWRITES VALUE IN CONFIG
        :type slopefile: string
        :param slopediv: number to divide slope by to get to degrees (usually will be default
          of 1.)
        :type slopediv: float
        :param numstd: number of +/- standard deviations to use if uncertainty is computed (uncertfile is not None)

        """
        mnames = getLogisticModelNames(config)
        if len(mnames) == 0:
            raise Exception(
                'No config file found or problem with config file format')
        if len(mnames) > 1:
            raise Exception(
                'Config file contains more than one model which is no longer allowed,\
                            update your config file to the newer format')
        self.model = mnames[0]
        self.config = config
        cmodel = config[self.model]
        self.modeltype = cmodel['gfetype']
        self.coeffs = validateCoefficients(cmodel)
        self.layers = validateLayers(
            cmodel)  # key = layer name, value = file name
        self.terms, timeField = validateTerms(cmodel, self.coeffs, self.layers)
        self.interpolations = validateInterpolations(cmodel, self.layers)
        self.units = validateUnits(cmodel, self.layers)
        self.gmused = [
            value for term, value in cmodel['terms'].items()
            if 'pga' in value.lower() or 'pgv' in value.lower()
            or 'mmi' in value.lower()
        ]
        self.modelrefs, self.longrefs, self.shortrefs = validateRefs(cmodel)
        self.numstd = numstd
        if cmodel['baselayer'] not in list(self.layers.keys()):
            raise Exception(
                'You must specify a base layer corresponding to one of the files in the layer section.'
            )
        self.saveinputs = saveinputs
        if slopefile is None:
            try:
                self.slopefile = cmodel['slopefile']
            except:
                print(
                    'Could not find slopefile term in config, no slope thresholds will be applied\n'
                )
                self.slopefile = None
        else:
            self.slopefile = slopefile
        self.slopediv = slopediv

        #get the geodict for the shakemap
        geodict = ShakeGrid.getFileGeoDict(shakefile, adjust='res')
        griddict, eventdict, specdict, fields, uncertainties = getHeaderData(
            shakefile)
        #YEAR = eventdict['event_timestamp'].year
        MONTH = MONTHS[(eventdict['event_timestamp'].month) - 1]
        #DAY = eventdict['event_timestamp'].day
        #HOUR = eventdict['event_timestamp'].hour

        #now find the layer that is our base layer and get the largest bounds we can guarantee not to exceed shakemap bounds
        basefile = self.layers[cmodel['baselayer']]
        ftype = getFileType(basefile)
        if ftype == 'esri':
            basegeodict, firstcol = GDALGrid.getFileGeoDict(basefile)
            sampledict = basegeodict.getBoundsWithin(geodict)
        elif ftype == 'gmt':
            basegeodict, firstcol = GMTGrid.getFileGeoDict(basefile)
            sampledict = basegeodict.getBoundsWithin(geodict)
        else:
            raise Exception(
                'All predictor variable grids must be a valid GMT or ESRI file type'
            )

        #now load the shakemap, resampling and padding if necessary
        if ShakeGrid.getFileGeoDict(shakefile, adjust='res') == sampledict:
            self.shakemap = ShakeGrid.load(shakefile, adjust='res')
            flag = 1
        else:
            self.shakemap = ShakeGrid.load(shakefile,
                                           samplegeodict=sampledict,
                                           resample=True,
                                           doPadding=True,
                                           adjust='res')
            flag = 0

        # take uncertainties into account
        if uncertfile is not None:
            try:
                if flag == 1:
                    self.uncert = ShakeGrid.load(uncertfile, adjust='res')
                else:
                    self.uncert = ShakeGrid.load(uncertfile,
                                                 samplegeodict=sampledict,
                                                 resample=True,
                                                 doPadding=True,
                                                 adjust='res')
            except:
                print(
                    'Could not read uncertainty file, ignoring uncertainties')
                self.uncert = None
        else:
            self.uncert = None

        #load the predictor layers into a dictionary
        self.layerdict = {}  # key = layer name, value = grid object
        for layername, layerfile in self.layers.items():
            if isinstance(layerfile, list):
                for lfile in layerfile:
                    if timeField == 'MONTH':
                        if lfile.find(MONTH) > -1:
                            layerfile = lfile
                            ftype = getFileType(layerfile)
                            interp = self.interpolations[layername]
                            if ftype == 'gmt':
                                if GMTGrid.getFileGeoDict(
                                        layerfile)[0] == sampledict:
                                    lyr = GMTGrid.load(layerfile)
                                else:
                                    lyr = GMTGrid.load(layerfile,
                                                       sampledict,
                                                       resample=True,
                                                       method=interp,
                                                       doPadding=True)
                            elif ftype == 'esri':
                                if GDALGrid.getFileGeoDict(
                                        layerfile)[0] == sampledict:
                                    lyr = GDALGrid.load(layerfile)
                                else:
                                    lyr = GDALGrid.load(layerfile,
                                                        sampledict,
                                                        resample=True,
                                                        method=interp,
                                                        doPadding=True)
                            else:
                                msg = 'Layer %s (file %s) does not appear to be a valid GMT or ESRI file.' % (
                                    layername, layerfile)
                                raise Exception(msg)
                            self.layerdict[layername] = lyr
            else:
                #first, figure out what kind of file we have (or is it a directory?)
                ftype = getFileType(layerfile)
                interp = self.interpolations[layername]
                if ftype == 'gmt':
                    if GMTGrid.getFileGeoDict(layerfile)[0] == sampledict:
                        lyr = GMTGrid.load(layerfile)
                    else:
                        lyr = GMTGrid.load(layerfile,
                                           sampledict,
                                           resample=True,
                                           method=interp,
                                           doPadding=True)
                elif ftype == 'esri':
                    if GDALGrid.getFileGeoDict(layerfile)[0] == sampledict:
                        lyr = GDALGrid.load(layerfile)
                    else:
                        lyr = GDALGrid.load(layerfile,
                                            sampledict,
                                            resample=True,
                                            method=interp,
                                            doPadding=True)
                else:
                    msg = 'Layer %s (file %s) does not appear to be a valid GMT or ESRI file.' % (
                        layername, layerfile)
                    raise Exception(msg)
                self.layerdict[layername] = lyr

        shapes = {}
        for layername, layer in self.layerdict.items():
            shapes[layername] = layer.getData().shape

        self.nuggets = [str(self.coeffs['b0'])]

        ckeys = list(self.terms.keys())
        ckeys.sort()
        for key in ckeys:
            term = self.terms[key]
            coeff = self.coeffs[key]
            self.nuggets.append('(%g * %s)' % (coeff, term))

        self.equation = ' + '.join(self.nuggets)

        if self.uncert is not None:
            self.nugmin = copy.copy(self.nuggets)
            self.nugmax = copy.copy(self.nuggets)
            # Find the term with the shakemap input and replace for these nuggets
            for k, nug in enumerate(self.nuggets):
                if "self.shakemap.getLayer('pga').getData()" in nug:
                    self.nugmin[k] = self.nugmin[k].replace(
                        "self.shakemap.getLayer('pga').getData()",
                        "(np.exp(np.log(self.shakemap.getLayer('pga').getData())\
                                                             - self.numstd * self.uncert.getLayer('stdpga').getData()))"
                    )
                    self.nugmax[k] = self.nugmax[k].replace(
                        "self.shakemap.getLayer('pga').getData()",
                        "(np.exp(np.log(self.shakemap.getLayer('pga').getData())\
                                                             + self.numstd * self.uncert.getLayer('stdpga').getData()))"
                    )
                elif "self.shakemap.getLayer('pgv').getData()" in nug:
                    self.nugmin[k] = self.nugmin[k].replace(
                        "self.shakemap.getLayer('pgv').getData()",
                        "(np.exp(np.log(self.shakemap.getLayer('pgv').getData())\
                                                             - self.numstd * self.uncert.getLayer('stdpgv').getData()))"
                    )
                    self.nugmax[k] = self.nugmax[k].replace(
                        "self.shakemap.getLayer('pgv').getData()",
                        "(np.exp(np.log(self.shakemap.getLayer('pgv').getData())\
                                                             + self.numstd * self.uncert.getLayer('stdpgv').getData()))"
                    )
                elif "self.shakemap.getLayer('mmi').getData()" in nug:
                    self.nugmin[k] = self.nugmin[k].replace(
                        "self.shakemap.getLayer('mmi').getData()",
                        "(np.exp(np.log(self.shakemap.getLayer('mmi').getData())\
                                                             - self.numstd * self.uncert.getLayer('stdmmi').getData()))"
                    )
                    self.nugmax[k] = self.nugmax[k].replace(
                        "self.shakemap.getLayer('mmi').getData()",
                        "(np.exp(np.log(self.shakemap.getLayer('mmi').getData())\
                                                             + self.numstd * self.uncert.getLayer('stdmmi').getData()))"
                    )
            self.equationmin = ' + '.join(self.nugmin)
            self.equationmax = ' + '.join(self.nugmax)
        else:
            self.equationmin = None
            self.equationmax = None

        self.geodict = self.shakemap.getGeoDict()

        try:
            self.slopemin = float(config[self.model]['slopemin'])
            self.slopemax = float(config[self.model]['slopemax'])
        except:
            print(
                'could not find slopemin and/or slopemax in config, no limits will be applied'
            )
            self.slopemin = 0.
            self.slopemax = 90.
コード例 #35
0
ファイル: raster.py プロジェクト: mcetink/shakemap
    def execute(self):
        """
        Write raster.zip file containing ESRI Raster files of all the IMTs
        in shake_result.hdf.

        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """

        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)
        if container.getDataType() != 'grid':
            raise NotImplementedError('raster module can only operate on '
                                      'gridded data, not sets of points')

        # create GIS-readable .flt files of imt and uncertainty
        self.logger.debug('Creating GIS grids...')
        layers = container.getIMTs()

        # Package up all of these files into one zip file.
        zfilename = os.path.join(datadir, 'rasters.zip')
        zfile = zipfile.ZipFile(zfilename,
                                mode='w',
                                compression=zipfile.ZIP_DEFLATED)

        files_written = []
        for layer in layers:
            fileimt = oq_to_file(layer)
            # This is a bit hacky -- we only produce the raster for the
            # first IMC returned. It should work as long as we only have
            # one IMC produced per ShakeMap run.
            imclist = container.getComponents(layer)
            imtdict = container.getIMTGrids(layer, imclist[0])
            mean_grid = imtdict['mean']
            std_grid = imtdict['std']
            mean_gdal = GDALGrid.copyFromGrid(mean_grid)
            std_gdal = GDALGrid.copyFromGrid(std_grid)
            mean_fname = os.path.join(datadir, '%s_mean.flt' % fileimt)
            mean_hdr = os.path.join(datadir, '%s_mean.hdr' % fileimt)
            std_fname = os.path.join(datadir, '%s_std.flt' % fileimt)
            std_hdr = os.path.join(datadir, '%s_std.hdr' % fileimt)
            self.logger.debug('Saving %s...' % mean_fname)
            mean_gdal.save(mean_fname)
            files_written.append(mean_fname)
            files_written.append(mean_hdr)
            self.logger.debug('Saving %s...' % std_fname)
            std_gdal.save(std_fname)
            files_written.append(std_fname)
            files_written.append(std_hdr)
            zfile.write(mean_fname, '%s_mean.flt' % fileimt)
            zfile.write(mean_hdr, '%s_mean.hdr' % fileimt)
            zfile.write(std_fname, '%s_std.flt' % fileimt)
            zfile.write(std_hdr, '%s_std.hdr' % fileimt)

        zfile.close()
        container.close()

        # nuke all of the copies of the files we just put in the zipfile
        for file_written in files_written:
            os.remove(file_written)
コード例 #36
0
def godt2008(shakefile,
             config,
             uncertfile=None,
             saveinputs=False,
             displmodel=None,
             bounds=None,
             slopediv=100.,
             codiv=10.,
             numstd=None,
             trimfile=None):
    """
    This function runs the Godt and others (2008) global method for a given
    ShakeMap. The Factor of Safety is calculated using infinite slope analysis
    assumuing dry conditions. The method uses threshold newmark displacement
    and estimates areal coverage by doing the calculations for each slope
    quantile.

    Args:
        shakefile (str): Path to shakemap xml file.
        config (ConfigObj): ConfigObj of config file containing inputs required
            for running the model
        uncertfile (str): Path to shakemap uncertainty xml file (optional).
        saveinputs (bool): Whether or not to return the model input layers,
            False (default) returns only the model output (one layer).
        displmodel (str): Newmark displacement regression model to use

            * ``'J_PGA'`` (default) -- PGA-based model, equation 6 from
              Jibson (2007).
            * ``'J_PGA_M'`` -- PGA and M-based model, equation 7 from
              Jibson (2007).
            * ``'RS_PGA_M'`` -- PGA and M-based model from from Rathje and
              Saygili (2009).
            * ``'RS_PGA_PGV'`` -- PGA and PGV-based model, equation 6
              from Saygili and Rathje (2008).

        bounds (dict): Optional dictionary with keys 'xmin', 'xmax', 'ymin',
            'ymax' that defines a subset of the shakemap area to compute.
        slopediv (float): Divide slope by this number to get slope in degrees
            (Verdin datasets need to be divided by 100).
        codiv (float): Divide cohesion input layer by this number
            (For Godt method, need to divide by 10 because that is how it was
            calibrated).
        numstd (float): Number of (+/-) standard deviations to use if
            uncertainty is computed (uncertfile must be supplied).
        trimfile (str): shapefile of earth's land masses to trim offshore areas
            of model

    Returns:
        dict: Dictionary containing output and input layers (if
        saveinputs=True):

        .. code-block:: python

            {
                'grid': mapio grid2D object,
                'label': 'label for colorbar and top line of subtitle',
                'type': 'output or input to model',
                'description': {'name': 'short reference of model',
                                'longref': 'full model reference',
                                'units': 'units of output',
                                'shakemap': 'information about shakemap used',
                                'event_id': 'shakemap event id',
                                'parameters': 'dictionary of model parameters
                                               used'

                }
            }

    Raises:
         NameError: when unable to parse the config correctly (probably a
             formatting issue in the configfile) or when unable to find the
             shakefile (Shakemap filepath) -- these cause program to end.

    """
    # TODO:
    #    - Add 'all' -- averages Dn from all four equations, add term to
    #      convert PGA and PGV to Ia and use other equations, add Ambraseys and
    #      Menu (1988) option.

    # Empty refs
    slopesref = 'unknown'
    slopelref = 'unknown'
    cohesionlref = 'unknown'
    cohesionsref = 'unknown'
    frictionsref = 'unknown'
    frictionlref = 'unknown'
    modellref = 'unknown'
    modelsref = 'unknown'

    # See if trimfile exists
    if trimfile is not None:
        if not os.path.exists(trimfile):
            print('trimfile defined does not exist: %s\n'
                  'Ocean will not be trimmed' % trimfile)
            trimfile = None
        if os.path.splitext(trimfile)[1] != '.shp':
            print('trimfile must be a shapefile, ocean will not be trimmed')
            trimfile = None

    # Parse config
    try:  # May want to add error handling so if refs aren't given, just
        # includes unknown
        slopefilepath = config['godt_2008']['layers']['slope']['filepath']
        slopeunits = config['godt_2008']['layers']['slope']['units']
        cohesionfile = config['godt_2008']['layers']['cohesion']['file']
        cohesionunits = config['godt_2008']['layers']['cohesion']['units']
        frictionfile = config['godt_2008']['layers']['friction']['file']
        frictionunits = config['godt_2008']['layers']['friction']['units']

        thick = float(config['godt_2008']['parameters']['thick'])
        uwt = float(config['godt_2008']['parameters']['uwt'])
        nodata_cohesion = \
            float(config['godt_2008']['parameters']['nodata_cohesion'])
        nodata_friction = \
            float(config['godt_2008']['parameters']['nodata_friction'])
        dnthresh = float(config['godt_2008']['parameters']['dnthresh'])
        fsthresh = float(config['godt_2008']['parameters']['fsthresh'])
        acthresh = float(config['godt_2008']['parameters']['acthresh'])
        try:
            slopemin = float(config['godt_2008']['parameters']['slopemin'])
        except:
            slopemin = 0.01
            print('No slopemin found in config file, using 0.01 deg '
                  'for slope minimum')
    except Exception as e:
        raise NameError('Could not parse configfile, %s' % e)

    if displmodel is None:
        try:
            displmodel = config['godt_2008']['parameters']['displmodel']
        except:
            print('No regression model specified, using default of J_PGA_M')
            displmodel = 'J_PGA_M'

    # TO DO: ADD ERROR CATCHING ON UNITS, MAKE SURE THEY ARE WHAT THEY SHOULD
    #        BE FOR THIS MODEL

    try:  # Try to fetch source information from config
        modelsref = config['godt_2008']['shortref']
        modellref = config['godt_2008']['longref']
        slopesref = config['godt_2008']['layers']['slope']['shortref']
        slopelref = config['godt_2008']['layers']['slope']['longref']
        cohesionsref = config['godt_2008']['layers']['cohesion']['shortref']
        cohesionlref = config['godt_2008']['layers']['cohesion']['longref']
        frictionsref = config['godt_2008']['layers']['friction']['shortref']
        frictionlref = config['godt_2008']['layers']['friction']['longref']
    except:
        print('Was not able to retrieve all references from config file. '
              'Continuing')

    # Figure out how/if need to cut anything
    geodict = ShakeGrid.getFileGeoDict(shakefile)  # , adjust='res')
    if bounds is not None:  # Make sure bounds are within ShakeMap Grid
        if geodict.xmin < geodict.xmax:  # only if signs are not opposite
            if (geodict.xmin > bounds['xmin'] or geodict.xmax < bounds['xmax']
                    or geodict.ymin > bounds['ymin']
                    or geodict.ymax < bounds['ymax']):
                print('Specified bounds are outside shakemap area, using '
                      'ShakeMap bounds instead.')
                bounds = None

    if bounds is not None:
        tempgdict = GeoDict.createDictFromBox(bounds['xmin'],
                                              bounds['xmax'],
                                              bounds['ymin'],
                                              bounds['ymax'],
                                              geodict.dx,
                                              geodict.dy,
                                              inside=False)
        # If Shakemap geodict crosses 180/-180 line, fix geodict so things don't break
        if geodict.xmin > geodict.xmax:
            if tempgdict.xmin < 0:
                geodict._xmin -= 360.
            else:
                geodict._xmax += 360.
        geodict = geodict.getBoundsWithin(tempgdict)

    basegeodict, firstcol = GDALGrid.getFileGeoDict(
        os.path.join(slopefilepath, 'slope_min.bil'))
    if basegeodict == geodict:
        sampledict = geodict
    else:
        sampledict = basegeodict.getBoundsWithin(geodict)

    # Do we need to subdivide baselayer?
    if 'divfactor' in config['godt_2008'].keys():
        divfactor = float(config['godt_2008']['divfactor'])
        if divfactor != 1.:
            # adjust sampledict so everything will be resampled (cut one cell
            # of each edge so will be inside bounds)
            newxmin = sampledict.xmin - sampledict.dx/2. + \
                sampledict.dx/(2.*divfactor) + sampledict.dx
            newymin = sampledict.ymin - sampledict.dy/2. + \
                sampledict.dy/(2.*divfactor) + sampledict.dy
            newxmax = sampledict.xmax + sampledict.dx/2. - \
                sampledict.dx/(2.*divfactor) - sampledict.dx
            newymax = sampledict.ymax + sampledict.dy/2. - \
                sampledict.dy/(2.*divfactor) - sampledict.dy
            newdx = sampledict.dx / divfactor
            newdy = sampledict.dy / divfactor

            sampledict = GeoDict.createDictFromBox(newxmin,
                                                   newxmax,
                                                   newymin,
                                                   newymax,
                                                   newdx,
                                                   newdy,
                                                   inside=True)

    tmpdir = tempfile.mkdtemp()

    # Load in ShakeMap and get new geodictionary
    temp = ShakeGrid.load(shakefile)  # , adjust='res')
    junkfile = os.path.join(tmpdir, 'temp.bil')
    GDALGrid.copyFromGrid(temp.getLayer('pga')).save(junkfile)
    pga = quickcut(junkfile, sampledict, precise=True, method='bilinear')
    os.remove(junkfile)
    GDALGrid.copyFromGrid(temp.getLayer('pgv')).save(junkfile)
    pgv = quickcut(junkfile, sampledict, precise=True, method='bilinear')
    os.remove(junkfile)
    # Update geodictionary
    sampledict = pga.getGeoDict()

    t2 = temp.getEventDict()
    M = t2['magnitude']
    event_id = t2['event_id']
    shakedict = temp.getShakeDict()
    del (temp)

    # read in uncertainty if present
    if uncertfile is not None:
        try:
            temp = ShakeGrid.load(uncertfile)  # , adjust='res')
            GDALGrid.copyFromGrid(temp.getLayer('stdpga')).save(junkfile)
            uncertpga = quickcut(junkfile,
                                 sampledict,
                                 precise=True,
                                 method='bilinear',
                                 override=True)
            os.remove(junkfile)
            GDALGrid.copyFromGrid(temp.getLayer('stdpgv')).save(junkfile)
            uncertpgv = quickcut(junkfile,
                                 sampledict,
                                 precise=True,
                                 method='bilinear',
                                 override=True)
            os.remove(junkfile)
        except:
            print('Could not read uncertainty file, ignoring uncertainties')
            uncertfile = None
        if numstd is None:
            numstd = 1.

    # Read in all the slope files, divide all by 100 to get to slope in
    # degrees (because input files are multiplied by 100.)
    slopes = []
    quantiles = [
        'slope_min.bil', 'slope10.bil', 'slope30.bil', 'slope50.bil',
        'slope70.bil', 'slope90.bil', 'slope_max.bil'
    ]
    for quant in quantiles:
        tmpslp = quickcut(os.path.join(slopefilepath, quant), sampledict)
        tgd = tmpslp.getGeoDict()
        if tgd != sampledict:
            raise Exception('Input layers are not aligned to same geodict')
        else:
            slopes.append(tmpslp.getData() / slopediv)

    slopestack = np.dstack(slopes)

    # Change any zero slopes to a very small number to avoid dividing by
    # zero later
    slopestack[slopestack == 0] = 1e-8

    # Read in the cohesion and friction files and duplicate layers so they
    # are same shape as slope structure

    tempco = quickcut(cohesionfile, sampledict, method='near')
    tempco = tempco.getData()[:, :, np.newaxis] / codiv
    cohesion = np.repeat(tempco, 7, axis=2)
    cohesion[cohesion == -999.9] = nodata_cohesion
    cohesion = np.nan_to_num(cohesion)
    cohesion[cohesion == 0] = nodata_cohesion

    tempfric = quickcut(frictionfile, sampledict, method='near')
    tempfric = tempfric.getData().astype(float)[:, :, np.newaxis]
    friction = np.repeat(tempfric, 7, axis=2)
    friction[friction == -9999] = nodata_friction
    friction = np.nan_to_num(friction)
    friction[friction == 0] = nodata_friction

    # Do the calculations using Jibson (2007) PGA only model for Dn
    FS = (cohesion / (uwt * thick * np.sin(slopestack * (np.pi / 180.))) +
          np.tan(friction * (np.pi / 180.)) / np.tan(slopestack *
                                                     (np.pi / 180.)))
    FS[FS < fsthresh] = fsthresh

    # Compute critical acceleration, in g
    # This gives ac in g, equations that multiply by g give ac in m/s2
    Ac = (FS - 1) * np.sin(slopestack * (np.pi / 180.)).astype(float)
    Ac[Ac < acthresh] = acthresh

    # Get PGA in g (PGA is %g in ShakeMap, convert to g)
    PGA = np.repeat(pga.getData()[:, :, np.newaxis] / 100., 7,
                    axis=2).astype(float)
    if 'PGV' in displmodel:  # Load in PGV also, in cm/sec
        PGV = np.repeat(pgv.getData()[:, :, np.newaxis], 7,
                        axis=2).astype(float)
    else:
        PGV = None

    if uncertfile is not None:
        stdpga = np.repeat(uncertpga.getData()[:, :, np.newaxis], 7,
                           axis=2).astype(float)
        stdpgv = np.repeat(uncertpgv.getData()[:, :, np.newaxis], 7,
                           axis=2).astype(float)
        # estimate PGA +- 1std
        PGAmin = np.exp(np.log(PGA * 100) - numstd * stdpga) / 100
        PGAmax = np.exp(np.log(PGA * 100) + numstd * stdpga) / 100
        if 'PGV' in displmodel:
            PGVmin = np.exp(np.log(PGV) - numstd * stdpgv)
            PGVmax = np.exp(np.log(PGV) + numstd * stdpgv)
        else:
            PGVmin = None
            PGVmax = None

    # Ignore errors so still runs when Ac > PGA, just leaves nan instead
    # of crashing.
    np.seterr(invalid='ignore')

    Dn, logDnstd, logtype = NMdisp(Ac, PGA, model=displmodel, M=M, PGV=PGV)
    if uncertfile is not None:
        Dnmin, logDnstdmin, logtype = NMdisp(Ac,
                                             PGAmin,
                                             model=displmodel,
                                             M=M,
                                             PGV=PGVmin)
        Dnmax, logDnstdmax, logtype = NMdisp(Ac,
                                             PGAmax,
                                             model=displmodel,
                                             M=M,
                                             PGV=PGVmax)

    PROB = Dn.copy()
    PROB[PROB < dnthresh] = 0.
    PROB[PROB >= dnthresh] = 1.
    PROB = np.sum(PROB, axis=2)
    if uncertfile is not None:
        PROBmin = Dnmin.copy()
        PROBmin[PROBmin <= dnthresh] = 0.
        PROBmin[PROBmin > dnthresh] = 1.
        PROBmin = np.sum(PROBmin, axis=2)
        PROBmax = Dnmax.copy()
        PROBmax[PROBmax <= dnthresh] = 0.
        PROBmax[PROBmax > dnthresh] = 1.
        PROBmax = np.sum(PROBmax, axis=2)

    PROB[PROB == 1.] = 0.01
    PROB[PROB == 2.] = 0.10
    PROB[PROB == 3.] = 0.30
    PROB[PROB == 4.] = 0.50
    PROB[PROB == 5.] = 0.70
    PROB[PROB == 6.] = 0.90
    PROB[PROB == 7.] = 0.99

    if uncertfile is not None:
        PROBmin[PROBmin == 1.] = 0.01
        PROBmin[PROBmin == 2.] = 0.10
        PROBmin[PROBmin == 3.] = 0.30
        PROBmin[PROBmin == 4.] = 0.50
        PROBmin[PROBmin == 5.] = 0.70
        PROBmin[PROBmin == 6.] = 0.90
        PROBmin[PROBmin == 7.] = 0.99
        PROBmax[PROBmax == 1.] = 0.01
        PROBmax[PROBmax == 2.] = 0.10
        PROBmax[PROBmax == 3.] = 0.30
        PROBmax[PROBmax == 4.] = 0.50
        PROBmax[PROBmax == 5.] = 0.70
        PROBmax[PROBmax == 6.] = 0.90
        PROBmax[PROBmax == 7.] = 0.99

    if slopemin is not None:
        PROB[slopestack[:, :, 6] <= slopemin] = 0.
        # uncert too
        if uncertfile is not None:
            PROBmin[slopestack[:, :, 6] <= slopemin] = 0.
            PROBmax[slopestack[:, :, 6] <= slopemin] = 0.

    # Turn output and inputs into into grids and put in mapLayers dictionary
    maplayers = collections.OrderedDict()

    shakedetail = '%s_ver%s' % (shakedict['shakemap_id'],
                                shakedict['shakemap_version'])

    description = {
        'name': modelsref,
        'longref': modellref,
        'units': 'Proportion of Area Affected',
        'shakemap': shakedetail,
        'event_id': event_id,
        'parameters': {
            'displmodel': displmodel,
            'thickness_m': thick,
            'unitwt_kNm3': uwt,
            'dnthresh_cm': dnthresh,
            'acthresh_g': acthresh,
            'fsthresh': fsthresh,
            'modeltype': 'Landslide'
        }
    }
    PROBgrid = GDALGrid(PROB, sampledict)
    if trimfile is not None:
        PROBgrid = trim_ocean(PROBgrid, trimfile)

    maplayers['model'] = {
        'grid': PROBgrid,
        'label': 'Landslide - Proportion of Area Affected',
        'type': 'output',
        'description': description
    }

    if uncertfile is not None:
        PROBmingrid = GDALGrid(PROBmin, sampledict)
        PROBmaxgrid = GDALGrid(PROBmax, sampledict)
        if trimfile is not None:
            PROBmingrid = trim_ocean(PROBmingrid, trimfile)
            PROBmaxgrid = trim_ocean(PROBmaxgrid, trimfile)
        maplayers['modelmin'] = {
            'grid': PROBmingrid,
            'label': 'Landslide Probability-%1.2fstd' % numstd,
            'type': 'output',
            'description': description
        }
        maplayers['modelmax'] = {
            'grid': PROBmaxgrid,
            'label': 'Landslide Probability+%1.2fstd' % numstd,
            'type': 'output',
            'description': description
        }

    if saveinputs is True:
        maplayers['pga'] = {
            'grid': GDALGrid(PGA[:, :, 0], sampledict),
            'label': 'PGA (g)',
            'type': 'input',
            'description': {
                'units': 'g',
                'shakemap': shakedetail
            }
        }
        if 'PGV' in displmodel:
            maplayers['pgv'] = {
                'grid': GDALGrid(PGV[:, :, 0], sampledict),
                'label': 'PGV (cm/s)',
                'type': 'input',
                'description': {
                    'units': 'cm/s',
                    'shakemap': shakedetail
                }
            }
        maplayers['minFS'] = {
            'grid': GDALGrid(np.min(FS, axis=2), sampledict),
            'label': 'Min Factor of Safety',
            'type': 'input',
            'description': {
                'units': 'unitless'
            }
        }
        maplayers['max slope'] = {
            'grid': GDALGrid(slopestack[:, :, -1], sampledict),
            'label': r'Maximum slope ($^\circ$)',
            'type': 'input',
            'description': {
                'units': 'degrees',
                'name': slopesref,
                'longref': slopelref
            }
        }
        maplayers['cohesion'] = {
            'grid': GDALGrid(cohesion[:, :, 0], sampledict),
            'label': 'Cohesion (kPa)',
            'type': 'input',
            'description': {
                'units': 'kPa (adjusted)',
                'name': cohesionsref,
                'longref': cohesionlref
            }
        }
        maplayers['friction angle'] = {
            'grid': GDALGrid(friction[:, :, 0], sampledict),
            'label': r'Friction angle ($^\circ$)',
            'type': 'input',
            'description': {
                'units': 'degrees',
                'name': frictionsref,
                'longref': frictionlref
            }
        }
        if uncertfile is not None:
            maplayers['pgamin'] = {
                'grid': GDALGrid(PGAmin[:, :, 0], sampledict),
                'label': 'PGA - %1.2fstd (g)' % numstd,
                'type': 'input',
                'description': {
                    'units': 'g',
                    'shakemap': shakedetail
                }
            }
            maplayers['pgamax'] = {
                'grid': GDALGrid(PGAmax[:, :, 0], sampledict),
                'label': 'PGA + %1.2fstd (g)' % numstd,
                'type': 'input',
                'description': {
                    'units': 'g',
                    'shakemap': shakedetail
                }
            }
        if 'PGV' in displmodel:
            if uncertfile is not None:
                maplayers['pgvmin'] = {
                    'grid': GDALGrid(PGVmin[:, :, 0], sampledict),
                    'label': 'PGV - %1.2fstd (cm/s)' % numstd,
                    'type': 'input',
                    'description': {
                        'units': 'cm/s',
                        'shakemap': shakedetail
                    }
                }
                maplayers['pgvmax'] = {
                    'grid': GDALGrid(PGVmax[:, :, 0], sampledict),
                    'label': 'PGV + %1.2fstd (cm/s)' % numstd,
                    'type': 'input',
                    'description': {
                        'units': 'cm/s',
                        'shakemap': shakedetail
                    }
                }

    shutil.rmtree(tmpdir)

    return maplayers
コード例 #37
0
ファイル: logisticmodel.py プロジェクト: usgs/groundfailure
    def __init__(self, config, shakefile, model, uncertfile=None):
        """Set up the logistic model

        :param config: configobj (config .ini file read in using configobj) defining the model and its inputs
        :type config: dictionary
        :param shakefile: Full file path to shakemap.xml file for the event of interest
        :type shakefile: string
        :param model: Name of model defined in config that should be run for the event of interest
        :type model: string
        :param uncertfile:
        :type uncertfile:

        """
        if model not in getLogisticModelNames(config):
            raise Exception('Could not find a model called "%s" in config %s.' % (model, config))
        #do everything here short of calculations - parse config, assemble eqn strings, load data.

        self.model = model
        cmodel = config['logistic_models'][model]
        self.modeltype = cmodel['gfetype']
        self.coeffs = validateCoefficients(cmodel)
        self.layers = validateLayers(cmodel)  # key = layer name, value = file name
        self.terms, timeField = validateTerms(cmodel, self.coeffs, self.layers)
        self.interpolations = validateInterpolations(cmodel, self.layers)
        self.units = validateUnits(cmodel, self.layers)
        self.gmused = [value for term, value in cmodel['terms'].items() if 'pga' in value.lower() or 'pgv' in
                       value.lower() or 'mmi' in value.lower()]
        self.modelrefs, self.longrefs, self.shortrefs = validateRefs(cmodel)
        if 'baselayer' not in cmodel:
            raise Exception('You must specify a base layer file in config.')
        if cmodel['baselayer'] not in list(self.layers.keys()):
            raise Exception('You must specify a base layer corresponding to one of the files in the layer section.')

        #get the geodict for the shakemap
        geodict = ShakeGrid.getFileGeoDict(shakefile, adjust='res')
        griddict, eventdict, specdict, fields, uncertainties = getHeaderData(shakefile)
        #YEAR = eventdict['event_timestamp'].year
        MONTH = MONTHS[(eventdict['event_timestamp'].month)-1]
        #DAY = eventdict['event_timestamp'].day
        #HOUR = eventdict['event_timestamp'].hour

        #now find the layer that is our base layer and get the largest bounds we can guarantee not to exceed shakemap bounds
        basefile = self.layers[cmodel['baselayer']]
        ftype = getFileType(basefile)
        if ftype == 'esri':
            basegeodict, firstcol = GDALGrid.getFileGeoDict(basefile)
            sampledict = basegeodict.getBoundsWithin(geodict)
        elif ftype == 'gmt':
            basegeodict, firstcol = GMTGrid.getFileGeoDict(basefile)
            sampledict = basegeodict.getBoundsWithin(geodict)
        else:
            raise Exception('All predictor variable grids must be a valid GMT or ESRI file type')

        #now load the shakemap, resampling and padding if necessary
        self.shakemap = ShakeGrid.load(shakefile, samplegeodict=sampledict, resample=True, doPadding=True, adjust='res')

        # take uncertainties into account
        if uncertfile is not None:
            try:
                self.uncert = ShakeGrid.load(uncertfile, samplegeodict=sampledict, resample=True, doPadding=True,
                                             adjust='res')
            except:
                print('Could not read uncertainty file, ignoring uncertainties')
                self.uncert = None
        else:
            self.uncert = None

        #load the predictor layers into a dictionary
        self.layerdict = {}  # key = layer name, value = grid object
        for layername, layerfile in self.layers.items():
            if isinstance(layerfile, list):
                for lfile in layerfile:
                    if timeField == 'MONTH':
                        if lfile.find(MONTH) > -1:
                            layerfile = lfile
                            ftype = getFileType(layerfile)
                            interp = self.interpolations[layername]
                            if ftype == 'gmt':
                                lyr = GMTGrid.load(layerfile, sampledict, resample=True, method=interp, doPadding=True)
                            elif ftype == 'esri':
                                lyr = GDALGrid.load(layerfile, sampledict, resample=True, method=interp, doPadding=True)
                            else:
                                msg = 'Layer %s (file %s) does not appear to be a valid GMT or ESRI file.' % (layername, layerfile)
                                raise Exception(msg)
                            self.layerdict[layername] = lyr
            else:
                #first, figure out what kind of file we have (or is it a directory?)
                ftype = getFileType(layerfile)
                interp = self.interpolations[layername]
                if ftype == 'gmt':
                    lyr = GMTGrid.load(layerfile, sampledict, resample=True, method=interp, doPadding=True)
                elif ftype == 'esri':
                    lyr = GDALGrid.load(layerfile, sampledict, resample=True, method=interp, doPadding=True)
                else:
                    msg = 'Layer %s (file %s) does not appear to be a valid GMT or ESRI file.' % (layername, layerfile)
                    raise Exception(msg)
                self.layerdict[layername] = lyr

        shapes = {}
        for layername, layer in self.layerdict.items():
            shapes[layername] = layer.getData().shape

        self.nuggets = [str(self.coeffs['b0'])]

        ckeys = list(self.terms.keys())
        ckeys.sort()
        for key in ckeys:
            term = self.terms[key]
            coeff = self.coeffs[key]
            self.nuggets.append('(%g * %s)' % (coeff, term))

        self.equation = ' + '.join(self.nuggets)

        if self.uncert is not None:
            self.nugmin = copy.copy(self.nuggets)
            self.nugmax = copy.copy(self.nuggets)
            # Find the term with the shakemap input and replace for these nuggets
            for k, nug in enumerate(self.nuggets):
                if "self.shakemap.getLayer('pga').getData()" in nug:
                    self.nugmin[k] = self.nugmin[k].replace("self.shakemap.getLayer('pga').getData()", "(np.exp(np.log(self.shakemap.getLayer('pga').getData()) - self.uncert.getLayer('stdpga').getData()))")
                    self.nugmax[k] = self.nugmax[k].replace("self.shakemap.getLayer('pga').getData()", "(np.exp(np.log(self.shakemap.getLayer('pga').getData()) + self.uncert.getLayer('stdpga').getData()))")
                elif "self.layerdict['pgv'].getData()" in nug:
                    self.nugmin[k] = self.nugmin[k].replace("self.shakemap.getLayer('pgv').getData()", "(np.exp(np.log(self.shakemap.getLayer('pgv').getData()) - self.uncert.getLayer('stdpgv').getData()))")
                    self.nugmax[k] = self.nugmax[k].replace("self.shakemap.getLayer('pgv').getData()", "(np.exp(np.log(self.shakemap.getLayer('pgv').getData()) + self.uncert.getLayer('stdpgv').getData()))")
                elif "self.layerdict['mmi'].getData()" in nug:
                    self.nugmin[k] = self.nugmin[k].replace("self.shakemap.getLayer('mmi').getData()", "(np.exp(np.log(self.shakemap.getLayer('mmi').getData()) - self.uncert.getLayer('stdmmi').getData()))")
                    self.nugmax[k] = self.nugmax[k].replace("self.shakemap.getLayer('mmi').getData()", "(np.exp(np.log(self.shakemap.getLayer('mmi').getData()) + self.uncert.getLayer('stdmmi').getData()))")
            self.equationmin = ' + '.join(self.nugmin)
            self.equationmax = ' + '.join(self.nugmax)
        else:
            self.equationmin = None
            self.equationmax = None

        self.geodict = self.shakemap.getGeoDict()

        try:
            self.slopemin = float(config['logistic_models'][model]['slopemin'])
            self.slopemax = float(config['logistic_models'][model]['slopemax'])
        except:
            print('could not find slopemin and/or slopemax in config, no limits will be applied')
            self.slopemin = 0.
            self.slopemax = 90.
コード例 #38
0
def check_input_extents(config, shakefile=None, bounds=None):
    """Make sure all input files exist and cover the extent desired

    Args:
        config: configObj of a single model
        shakefile: path to ShakeMap grid.xml file (used for bounds). If not
            provided, bounds must be provided
        bounds: dictionary of bounds with keys: 'xmin', 'xmax', 'ymin', 'ymax'

    Returns:
        tuple containing:
            notcovered: list of files that do not cover the entire area
                defined by bounds or shakefile
            newbounds: new dictionary of bounds of subarea of original
                bounds or shakefile extent that is covered by all input files
    """
    if shakefile is None and bounds is None:
        raise Exception('Must define either a shakemap file or bounds')
    modelname = config.keys()[0]
    # Make dummy geodict to use
    if bounds is None:
        evdict = ShakeGrid.getFileGeoDict(shakefile)
    else:
        evdict = GeoDict.createDictFromBox(
            bounds['xmin'], bounds['xmax'],
            bounds['ymin'], bounds['ymax'],
            0.00001, 0.00001, inside=False)

    # Check extents of all input layers
    notcovered = []
    notcovgdicts = []
    newbounds = None
    for item, value in config[modelname]['layers'].items():
        if 'file' in value.keys():
            filelook = value['file']
            if getFileType(filelook) == 'gmt':
                tmpgd, _ = GMTGrid.getFileGeoDict(filelook)
            else:
                tmpgd, _ = GDALGrid.getFileGeoDict(filelook)
            # See if tempgd contains evdict
            contains = tmpgd.contains(evdict)
            if not contains:
                notcovered.append(filelook)
                notcovgdicts.append(tmpgd)
                # print(filelook)
    if len(notcovered) > 0:
        # Figure out what bounds COULD be run
        xmins = [gd.xmin for gd in notcovgdicts]
        xmaxs = [gd.xmax for gd in notcovgdicts]
        ymins = [gd.ymin for gd in notcovgdicts]
        ymaxs = [gd.ymax for gd in notcovgdicts]

        # Set in by a buffer of 0.05 degrees because mapio doesn't like 
        # when bounds are exactly the same for getboundswithin
        newbounds = dict(xmin=evdict.xmin + 0.05,
                         xmax=evdict.xmax - 0.05,
                         ymin=evdict.ymin + 0.05,
                         ymax=evdict.ymax - 0.05)
        # Which one is the problem?
        if evdict.xmin < np.max(xmins):
            newbounds['xmin'] = np.max(xmins) + 0.05
        if evdict.xmax > np.min(xmaxs):
            newbounds['xmax'] = np.min(xmaxs) - 0.05
        if evdict.ymin < np.max(ymins):
            newbounds['ymin'] = np.max(ymins) + 0.05
        if evdict.ymax > np.min(ymaxs):
            newbounds['ymax'] = np.min(ymaxs) - 0.05

        # See if this is a possible extent
        try:
            test = GeoDict.createDictFromBox(
                newbounds['xmin'], newbounds['xmax'],
                newbounds['ymin'], newbounds['ymax'],
                0.00001, 0.00001, inside=False)
        except BaseException:
            print('Cannot make new bounds that will work')
            newbounds = None

    return notcovered, newbounds
コード例 #39
0
ファイル: newmark.py プロジェクト: mhearne-usgs/groundfailure
def classic(shakefile, config, uncertfile=None, saveinputs=False, regressionmodel='J_PGA', probtype='jibson2000', slopediv=1., codiv=1., bounds=None):
    """This function uses the Newmark method to estimate probability of failure at each grid cell.
    Factor of Safety and critcal accelerations are calculated following Jibson et al. (2000) and the
    Newmark displacement is estimated using PGA, PGV, and/or Magnitude (depending on equation used)
    from Shakemap with regression equations from Jibson (2007), Rathje and Saygili (2008) and
    Saygili and Rathje (2009)

    :param shakefile: URL or complete file path to the location of the Shakemap to use as input
    :type shakefile: string:
    :param config: Model configuration file object containing locations of input files and other input values config = ConfigObj(configfilepath)
    :type config: ConfigObj
    :param uncertfile: complete file path to the location of the uncertainty.xml for the shakefile, if this is not None, it will compute the model for +-std in addition to the best estimate
    :param saveinputs: Whether or not to return the model input layers, False (defeault) returns only the model output (one layer)
    :type saveinputs: boolean
    :param regressionmodel:
        Newmark displacement regression model to use
        'J_PGA' (default) - PGA-based model from Jibson (2007) - equation 6
        'J_PGA_M' - PGA and M-based model from Jibson (2007) - equation 7
        'RS_PGA_M' - PGA and M-based model from from Rathje and Saygili (2009)
        'RS_PGA_PGV' - PGA and PGV-based model from Saygili and Rathje (2008) - equation 6
    :type regressionmodel: string
    :param probtype: Method used to estimate probability. Entering 'jibson2000' uses equation 5 from Jibson et al. (2000) to estimate probability from Newmark displacement. 'threshold' uses a specified threshold of Newmark displacement (defined in config file) and assumes anything greather than this threshold fails
    :type probtype: string
    :param slopediv: Divide slope by this number to get slope in degrees (Verdin datasets need to be divided by 100)
    :type slopediv: float
    :param codiv: Divide cohesion by this number to get reasonable numbers (For Godt method, need to divide by 10 because that is how it was calibrated, but values are reasonable without multiplying for regular analysis)
    :type codiv: float

    :returns maplayers:  Dictionary containing output and input layers (if saveinputs=True) along with metadata formatted like maplayers['layer name']={'grid': mapio grid2D object, 'label': 'label for colorbar and top line of subtitle', 'type': 'output or input to model', 'description': 'detailed description of layer for subtitle, potentially including source information'}
    :type maplayers: OrderedDict

    :raises NameError: when unable to parse the config correctly (probably a formatting issue in the configfile) or when unable to find the shakefile (Shakemap URL or filepath) - these cause program to end
    :raises NameError: when probtype does not match a predifined probability type, will cause to default to 'jibson2000'

    """
    # Empty refs
    slopesref = 'unknown'
    slopelref = 'unknown'
    cohesionlref = 'unknown'
    cohesionsref = 'unknown'
    frictionsref = 'unknown'
    frictionlref = 'unknown'
    modellref = 'unknown'
    modelsref = 'unknown'

    # Parse config - should make it so it uses defaults if any are missing...
    try:
        slopefile = config['mechanistic_models']['classic_newmark']['layers']['slope']['file']
        slopeunits = config['mechanistic_models']['classic_newmark']['layers']['slope']['units']
        cohesionfile = config['mechanistic_models']['classic_newmark']['layers']['cohesion']['file']
        cohesionunits = config['mechanistic_models']['classic_newmark']['layers']['cohesion']['units']
        frictionfile = config['mechanistic_models']['classic_newmark']['layers']['friction']['file']
        frictionunits = config['mechanistic_models']['classic_newmark']['layers']['friction']['units']

        thick = float(config['mechanistic_models']['classic_newmark']['parameters']['thick'])
        uwt = float(config['mechanistic_models']['classic_newmark']['parameters']['uwt'])
        nodata_cohesion = float(config['mechanistic_models']['classic_newmark']['parameters']['nodata_cohesion'])
        nodata_friction = float(config['mechanistic_models']['classic_newmark']['parameters']['nodata_friction'])
        try:
            dnthresh = float(config['mechanistic_models']['classic_newmark']['parameters']['dnthresh'])
        except:
            if probtype == 'threshold':
                dnthresh = 5.
                print('Unable to find dnthresh in config, using 5cm')
            else:
                dnthresh = None
        fsthresh = float(config['mechanistic_models']['classic_newmark']['parameters']['fsthresh'])
        acthresh = float(config['mechanistic_models']['classic_newmark']['parameters']['acthresh'])
        slopethresh = float(config['mechanistic_models']['classic_newmark']['parameters']['slopethresh'])
        try:
            m = float(config['mechanistic_models']['classic_newmark']['parameters']['m'])
        except:
            print('no constant saturated thickness specified, m=0 if no watertable file is found')
            m = 0.
    except Exception as e:
        raise NameError('Could not parse configfile, %s' % e)
        return

    try:  # Try to fetch source information from config
        modelsref = config['mechanistic_models']['classic_newmark']['shortref']
        modellref = config['mechanistic_models']['classic_newmark']['longref']
        slopesref = config['mechanistic_models']['classic_newmark']['layers']['slope']['shortref']
        slopelref = config['mechanistic_models']['classic_newmark']['layers']['slope']['longref']
        cohesionsref = config['mechanistic_models']['classic_newmark']['layers']['cohesion']['shortref']
        cohesionlref = config['mechanistic_models']['classic_newmark']['layers']['cohesion']['longref']
        frictionsref = config['mechanistic_models']['classic_newmark']['layers']['friction']['shortref']
        frictionlref = config['mechanistic_models']['classic_newmark']['layers']['friction']['longref']
    except:
        print('Was not able to retrieve all references from config file. Continuing')

    # Cut and resample all files
    shkgdict = ShakeGrid.getFileGeoDict(shakefile, adjust='res')
    slpdict = GDALGrid.getFileGeoDict(slopefile)
    if bounds is not None:  # Make sure bounds are within ShakeMap Grid
        if shkgdict.xmin > bounds['xmin'] or shkgdict.xmax < bounds['xmax'] or shkgdict.ymin > bounds['ymin'] or shkgdict.ymax < bounds['ymax']:
            print('Specified bounds are outside shakemap area, using ShakeMap bounds instead')
            bounds = None
    if bounds is not None:
        tempgdict = GeoDict({'xmin': bounds['xmin'], 'ymin': bounds['ymin'], 'xmax': bounds['xmax'], 'ymax': bounds['ymax'], 'dx': 100., 'dy': 100., 'nx': 100., 'ny': 100.}, adjust='res')
        gdict = slpdict.getBoundsWithin(tempgdict)
    else:  # Get boundaries from shakemap if not specified
        shkgdict = ShakeGrid.getFileGeoDict(shakefile, adjust='res')
        slpdict = GDALGrid.getFileGeoDict(slopefile)
        gdict = slpdict.getBoundsWithin(shkgdict)

    # Load in slope file
    slopegrid = GDALGrid.load(slopefile, samplegeodict=gdict, resample=False)
    gdict = slopegrid.getGeoDict()  # Get this again just in case it changed
    slope = slopegrid.getData().astype(float)/slopediv  # Adjust slope to degrees, if needed
    # Change any zero slopes to a very small number to avoid dividing by zero later
    slope[slope == 0] = 1e-8

    # Load in shakefile
    if not os.path.isfile(shakefile):
        if isURL(shakefile):
            shakefile = getGridURL(shakefile)  # returns a file object
        else:
            raise NameError('Could not find "%s" as a file or a valid url' % (shakefile))
            return

    # Load in shakemap, resample to slope file (this will be important when go to higher res)
    shakemap = ShakeGrid.load(shakefile, samplegeodict=gdict, resample=True, method='linear', adjust='res')
    M = shakemap.getEventDict()['magnitude']
    # Read in uncertainty layer, if present
    if uncertfile is not None:
        try:
            uncert = ShakeGrid.load(uncertfile, samplegeodict=gdict, resample=True, method='linear', adjust='res')
        except:
            print('Could not read uncertainty file, ignoring uncertainties')
            uncertfile = None

    # Read in the cohesion and friction files, resampled to slope grid
    cohesion = GDALGrid.load(cohesionfile, samplegeodict=gdict, resample=True, method='nearest').getData().astype(float)/codiv
    cohesion[np.isnan(cohesion)] = nodata_cohesion
    friction = GDALGrid.load(frictionfile, samplegeodict=gdict, resample=True, method='nearest').getData().astype(float)
    friction[np.isnan(friction)] = nodata_friction

    # See if there is a water table depth file and read it in if there is
    try:
        waterfile = config['mechanistic_models']['classic_newmark']['layers']['watertable']['file']
        watertable = GDALGrid.load(waterfile, samplegeodict=gdict, resample=True, method='linear').getData()  # Needs to be in meters!
        uwtw = float(config['mechanistic_models']['classic_newmark']['parameters']['uwtw'])
        try:
            watersref = config['mechanistic_models']['classic_newmark']['layers']['watertable']['shortref']
            waterlref = config['mechanistic_models']['classic_newmark']['layers']['watertable']['longref']
        except:
            print('Was not able to retrieve water table references from config file. Continuing')

    except:
        print(('Water table file not specified or readable, assuming constant saturated thickness proportion of %0.1f' % m))
        watertable = None
        try:
            uwtw = float(config['mechanistic_models']['classic_newmark']['parameters']['uwtw'])
        except:
            print('Could not read soil wet unit weight, using 18.8 kN/m3')
            uwtw = 18.8

    # Factor of safety
    if watertable is not None:
        watertable[watertable > thick] = thick
        m = (thick - watertable)/thick
    FS = cohesion/(uwt*thick*np.sin(slope*(np.pi/180.))) + np.tan(friction*(np.pi/180.))/np.tan(slope*(np.pi/180.)) - (m*uwtw*np.tan(friction*(np.pi/180.)))/(uwt*np.tan(slope*(np.pi/180.)))
    FS[FS < fsthresh] = fsthresh

    # Compute critical acceleration, in g
    Ac = (FS-1.)*np.sin(slope*(np.pi/180.))  # This gives ac in g, equations that multiply by g give ac in m/s2
    Ac[Ac < acthresh] = acthresh
    Ac[slope < slopethresh] = float('nan')

    # Get PGA in g (PGA is %g in ShakeMap, convert to g)
    PGA = shakemap.getLayer('pga').getData().astype(float)/100.
    PGV = shakemap.getLayer('pgv').getData().astype(float)
    if uncertfile is not None:
        stdpga = uncert.getLayer('stdpga')
        stdpgv = uncert.getLayer('stdpgv')
        # Estimate PGA +- 1std
        PGAmin = np.exp(np.log(PGA*100.) - stdpga.getData())/100.
        PGAmax = np.exp(np.log(PGA*100.) + stdpga.getData())/100.
        PGVmin = np.exp(np.log(PGV) - stdpgv.getData())
        PGVmax = np.exp(np.log(PGV) + stdpgv.getData())

    np.seterr(invalid='ignore')  # Ignore errors so still runs when Ac > PGA, just leaves nan instead of crashing

    if regressionmodel is 'J_PGA':
        Dn = J_PGA(Ac, PGA)
        if uncertfile is not None:
            Dnmin = J_PGA(Ac, PGAmin)
            Dnmax = J_PGA(Ac, PGAmax)
    elif regressionmodel is 'J_PGA_M':
        Dn = J_PGA_M(Ac, PGA, M)
        if uncertfile is not None:
            Dnmin = J_PGA_M(Ac, PGAmin, M)
            Dnmax = J_PGA_M(Ac, PGAmax, M)

    elif regressionmodel is 'RS_PGA_M':
        Dn = RS_PGA_M(Ac, PGA, M)
        if uncertfile is not None:
            Dnmin = RS_PGA_M(Ac, PGAmin, M)
            Dnmax = RS_PGA_M(Ac, PGAmax, M)

    elif regressionmodel is 'RS_PGA_PGV':
        Dn = RS_PGA_PGV(Ac, PGA, PGV)
        if uncertfile is not None:
            Dnmin = RS_PGA_PGV(Ac, PGAmin, PGVmin)
            Dnmax = RS_PGA_PGV(Ac, PGAmax, PGVmax)
    else:
        print('Unrecognized regression model, aborting')
        return

    units = 'probability'
    label = 'Landslide Probability'
    if probtype.lower() in 'jibson2000':
        PROB = 0.335*(1-np.exp(-0.048*Dn**1.565))
        dnthresh = None
        if uncertfile is not None:
            PROBmin = 0.335*(1-np.exp(-0.048*Dnmin**1.565))
            PROBmax = 0.335*(1-np.exp(-0.048*Dnmax**1.565))
    elif probtype.lower() in 'threshold':
        PROB = Dn.copy()
        PROB[PROB <= dnthresh] = 0
        PROB[PROB > dnthresh] = 1
        units = 'prediction'
        label = 'Predicted Landslides'
        if uncertfile is not None:
            PROBmin = Dnmin.copy()
            PROBmin[PROBmin <= dnthresh] = 0
            PROBmin[PROBmin > dnthresh] = 1
            PROBmax = Dnmax.copy()
            PROBmax[PROBmax <= dnthresh] = 0
            PROBmax[PROBmax > dnthresh] = 1
    else:
        raise NameError('invalid probtype, assuming jibson2000')
        PROB = 0.335*(1-np.exp(-0.048*Dn**1.565))
        dnthresh = None
        if uncertfile is not None:
            PROBmin = 0.335*(1-np.exp(-0.048*Dnmin**1.565))
            PROBmax = 0.335*(1-np.exp(-0.048*Dnmax**1.565))

    # Turn output and inputs into into grids and put in mapLayers dictionary
    maplayers = collections.OrderedDict()

    temp = shakemap.getShakeDict()
    shakedetail = '%s_ver%s' % (temp['shakemap_id'], temp['shakemap_version'])

    if watertable is not None:
        des = 'variable'
    else:
        des = m
    description = {'name': modelsref, 'longref': modellref, 'units': units, 'shakemap': shakedetail, 'parameters': {'regressionmodel': regressionmodel, 'thickness_m': thick, 'unitwt_kNm3': uwt, 'dnthresh_cm': dnthresh, 'acthresh_g': acthresh, 'fsthresh': fsthresh, 'slopethresh': slopethresh, 'sat_proportion': des}}

    maplayers['model'] = {'grid': GDALGrid(PROB, gdict), 'label': label, 'type': 'output', 'description': description}
    if uncertfile is not None:
        maplayers['modelmin'] = {'grid': GDALGrid(PROBmin, gdict), 'label': label+' -1std', 'type': 'output', 'description': description}
        maplayers['modelmax'] = {'grid': GDALGrid(PROBmax, gdict), 'label': label+' +1std', 'type': 'output', 'description': description}

    if saveinputs is True:
        maplayers['pga'] = {'grid': GDALGrid(PGA, gdict), 'label': 'PGA (g)', 'type': 'input', 'description': {'units': 'g', 'shakemap': shakedetail}}
        maplayers['FS'] = {'grid': GDALGrid(FS, gdict), 'label': 'Factor of Safety', 'type': 'input', 'description': {'units': 'unitless'}}
        maplayers['Ac'] = {'grid': GDALGrid(Ac, gdict), 'label': 'Critical acceleration (g)', 'type': 'input'}
        maplayers['Dn'] = {'grid': GDALGrid(Dn, gdict), 'label': 'Newmark Displacement (cm)', 'type': 'input'}
        maplayers['slope'] = {'grid': GDALGrid(slope, gdict), 'label': 'Max slope ($^\circ$)', 'type': 'input', 'description': {'units': 'degrees', 'name': slopesref, 'longref': slopelref}}
        maplayers['cohesion'] = {'grid': GDALGrid(cohesion, gdict), 'label': 'Cohesion (kPa)', 'type': 'input', 'description': {'units': 'kPa (adjusted)', 'name': cohesionsref, 'longref': cohesionlref}}
        maplayers['friction angle'] = {'grid': GDALGrid(friction, gdict), 'label': 'Friction angle ($^\circ$)', 'type': 'input', 'description': {'units': 'degrees', 'name': frictionsref, 'longref': frictionlref}}
        if uncertfile is not None:
            maplayers['pgamin'] = {'grid': GDALGrid(PGAmin, gdict), 'label': 'PGA - 1std (g)', 'type': 'input', 'description': {'units': 'g', 'shakemap': shakedetail}}
            maplayers['pgamax'] = {'grid': GDALGrid(PGAmax, gdict), 'label': 'PGA + 1std (g)', 'type': 'input', 'description': {'units': 'g', 'shakemap': shakedetail}}
        if 'PGV' in regressionmodel:
            maplayers['pgv'] = {'grid': GDALGrid(PGV, gdict), 'label': 'PGV (cm/s)', 'type': 'input', 'description': {'units': 'cm/s', 'shakemap': shakedetail}}
            if uncertfile is not None:
                maplayers['pgvmin'] = {'grid': GDALGrid(PGVmin, gdict), 'label': 'PGV - 1std (cm/s)', 'type': 'input', 'description': {'units': 'cm/s', 'shakemap': shakedetail}}
                maplayers['pgvmax'] = {'grid': GDALGrid(PGVmax, gdict), 'label': 'PGV + 1std (cm/s)', 'type': 'input', 'description': {'units': 'cm/s', 'shakemap': shakedetail}}
        if watertable is not None:
            maplayers['sat thick prop'] = {'grid': GDALGrid(m, gdict), 'label': 'Saturated thickness proprtion [0,1]', 'type': 'input', 'description': {'units': 'meters', 'name': watersref, 'longref': waterlref}}

    return maplayers
コード例 #40
0
def computePexp(grid, pop_file, shakefile=None, shakethreshtype='pga',
                shakethresh=0., probthresh=0., stdgrid2D=None,
                stdtype='full', maxP=1., sill1=None, range1=None):
    """
    Get exposure-based statistics.

    Args:
        grid: Model grid.
        pop_file (str):  Path to the landscan population grid.
        shakefile (str): Optional, path to shakemap file to use for ground
            motion threshold.
        shakethreshtype(str): Optional, Type of ground motion to use for
            shakethresh, 'pga', 'pgv', or 'mmi'.
        shakethresh: Float or list of shaking thresholds in %g for
            pga, cm/s for pgv, float for mmi.
        probthresh: Float, exclude any cells with
            probabilities less than or equal to this value
        stdgrid2D: grid2D object of model standard deviations (optional)
        stdtype (str): assumption of spatial correlation used to compute
            the stdev of the statistics, 'max', 'min', 'mean' of max and min,
            or 'full' (default) which estimates the range of correlation and
            accounts for covariance. Will return 'mean' if
            ridge and sill cannot be estimated.
        maxP (float): the maximum possible probability of the model
        sill1 (float): If known, the sill of the variogram of grid2D, will be
            estimated if None and stdtype='full'
        range1 (float): If known, the range of the variogram of grid2D, will
            be estimated if None and stdtype='full'

    Returns:
        dict: Dictionary with keys named exp_pop_# where # is the shakethresh
            and exp_std_# if stdgrid2D is supplied (stdev of exp_pop)
            and elim_#, the maximum exposure value possible with the
            applied thresholds and given maxP value
            p_exp_# beta distribution shape factor p (sometimes called alpha)
            q_exp_# beta distribution shape factor q (sometimes called beta)
    """

    model = grid.getData().copy()
    mdict = grid.getGeoDict()

    # Figure out difference in resolution of popfile to shakefile
    ptemp, J = GDALGrid.getFileGeoDict(pop_file)
    factor = ptemp.dx/mdict.dx

    # Cut out area from population file
    popcut1 = quickcut(pop_file, mdict, precise=False, extrasamp=2., method='nearest')
    #tot1 = np.sum(popcut1.getData())
    # Adjust for factor to prepare for upsampling to avoid creating new people
    popcut1.setData(popcut1.getData()/factor**2)

    # Upsample to mdict
    popcut = popcut1.interpolate2(mdict, method='nearest')
    popdat = popcut.getData()
    exp_pop = {}

    if shakefile is not None:
        if shakethresh < 0.:
            raise Exception('shaking threshold must be equal or greater '
                            'than zero')
        # resample shakemap to grid2D
        temp = ShakeGrid.load(shakefile)
        shk = temp.getLayer(shakethreshtype)
        shk = shk.interpolate2(mdict)
        if shk.getGeoDict() != mdict:
            raise Exception('shakemap was not resampled to exactly the same '
                            'geodict as the model')
        shkdat = shk.getData()
        model[shkdat < shakethresh] = float('nan')
    else:
        shakethresh = 0.
        shkdat = None

    mu = np.nansum(model[model >= probthresh] * popdat[model >= probthresh])
    exp_pop['exp_pop_%1.2fg' % (shakethresh/100.,)] = mu
    #N = np.nansum([model >= probthresh])
    #exp_pop['N_%1.2fg' % (shakethresh/100.,)] = N
    elim = np.nansum(popdat[model >= probthresh])*maxP
    exp_pop['elim_%1.2fg' % (shakethresh/100.,)] = elim

    if stdgrid2D is not None:
        std = stdgrid2D.getData().copy()
        if np.nanmax(std) > 0. and np.nanmax(model) >= probthresh:
            totalmin = np.sqrt(np.nansum((popdat[model >= probthresh]*std[model >= probthresh])**2.))
            totalmax = np.nansum(std[model >= probthresh] * popdat[model >= probthresh])
            if stdtype=='full':
                if sill1 is None or range1 is None:
                    modelfresh = grid.getData().copy()
                    range1, sill1 = semivario(modelfresh, probthresh,
                                              shakethresh=shakethresh,
                                              shakegrid=shkdat)
                if range1 is None:
                    # Use mean
                    exp_pop['exp_std_%1.2fg' % (shakethresh/100.,)] = (totalmax+totalmin)/2.
                else:
                    # Zero out std at cells where the model probability was below
                    # the threshold because we aren't including those cells in Hagg
                    stdz = std.copy()
                    stdz[model < probthresh] = 0.
                    svar1 = svar(stdz, range1, sill1, scale=popdat)
                    exp_pop['exp_std_%1.2fg' % (shakethresh/100.,)] = np.sqrt(svar1)
                    #exp_pop['exp_range_%1.2fg' % (shakethresh/100.,)] = range1
                    #exp_pop['exp_sill_%1.2fg' % (shakethresh/100.,)] = sill1

            elif stdtype == 'max':
                exp_pop['exp_std_%1.2fg' % (shakethresh/100.,)] = totalmax
            elif stdtype == 'min':
                exp_pop['exp_std_%1.2fg' % (shakethresh/100.,)] = totalmin
            else:
                exp_pop['exp_std_%1.2fg' % (shakethresh/100.,)] = (totalmax+totalmin)/2.
            # Beta distribution shape factors
            var = exp_pop['exp_std_%1.2fg' % (shakethresh/100.,)]**2.
            exp_pop['p_exp_%1.2fg' % (shakethresh/100.,)] = (mu/elim)*((elim*mu-mu**2)/var-1)
            exp_pop['q_exp_%1.2fg' % (shakethresh/100.,)] = (1-mu/elim)*((elim*mu-mu**2)/var-1)
        else:
            print('no std values above zero, filling with zeros')
            exp_pop['exp_std_%1.2fg' % (shakethresh/100.,)] = 0.
            exp_pop['p_exp_%1.2fg' % (shakethresh/100.,)] = 0.
            exp_pop['q_exp_%1.2fg' % (shakethresh/100.,)] = 0.
    else:
        exp_pop['exp_std_%1.2fg' % (shakethresh/100.,)] = 0.
        exp_pop['p_exp_%1.2fg' % (shakethresh/100.,)] = 0.
        exp_pop['q_exp_%1.2fg' % (shakethresh/100.,)] = 0.

    return exp_pop
コード例 #41
0
def draw_contour(shakefile,
                 popfile,
                 oceanfile,
                 cityfile,
                 outfilename,
                 make_png=False):
    """Create a contour map showing population (greyscale) underneath contoured MMI.

    :param shakefile:
      String path to ShakeMap grid.xml file.
    :param popfile:
      String path to GDALGrid-compliant file containing population data.
    :param oceanfile:
      String path to file containing ocean vector data in a format compatible with fiona.
    :param cityfile:
      String path to file containing GeoNames cities data.
    :param outfilename:
      String path containing desired output PDF filename.
    :param make_png:
      Boolean indicating whether a PNG version of the file should also be created in the
      same output folder as the PDF.
    :returns:
      Tuple containing: 
        - Name of PNG file created, or None if PNG output not specified.
        - CartopyCities object containing the cities that were rendered on the contour map.
    """
    #load the shakemap - for the time being, we're interpolating the
    #population data to the shakemap, which would be important
    #if we were doing math with the pop values.  We're not, so I think it's ok.
    shakegrid = ShakeGrid.load(shakefile, adjust='res')
    gd = shakegrid.getGeoDict()

    #retrieve the epicenter - this will get used on the map
    clat = shakegrid.getEventDict()['lat']
    clon = shakegrid.getEventDict()['lon']

    #load the population data, sample to shakemap
    popgrid = GDALGrid.load(popfile, samplegeodict=gd, resample=True)
    popdata = popgrid.getData()

    #smooth the MMI data for contouring
    mmi = shakegrid.getLayer('mmi').getData()
    smoothed_mmi = gaussian_filter(mmi, FILTER_SMOOTH)

    #clip the ocean data to the shakemap
    bbox = (gd.xmin, gd.ymin, gd.xmax, gd.ymax)
    oceanshapes = _clip_bounds(bbox, oceanfile)

    #load the cities data, limit to cities within shakemap bounds
    allcities = CartopyCities.fromDefault()
    cities = allcities.limitByBounds((gd.xmin, gd.xmax, gd.ymin, gd.ymax))

    # Define ocean/land masks to do the contours, since we want different contour line styles over land and water.
    oceangrid = Grid2D.rasterizeFromGeometry(oceanshapes,
                                             gd,
                                             burnValue=1.0,
                                             fillValue=0.0,
                                             mustContainCenter=False,
                                             attribute=None)
    oceanmask = np.ma.masked_where(oceangrid == 1.0, smoothed_mmi)
    landmask = np.ma.masked_where(oceangrid == 0.0, smoothed_mmi)

    # Use our GMT-inspired palette class to create population and MMI colormaps
    popmap = ColorPalette.fromPreset('pop')
    mmimap = ColorPalette.fromPreset('mmi')

    #use the ShakeMap to determine the aspect ratio of the map
    aspect = (gd.xmax - gd.xmin) / (gd.ymax - gd.ymin)
    figheight = FIGWIDTH / aspect
    fig = plt.figure(figsize=(FIGWIDTH, figheight))

    # set up axes object with PlateCaree (non) projection.
    ax = plt.axes([0.02, 0.02, 0.95, 0.95], projection=ccrs.PlateCarree())

    #set the image extent to that of the data
    img_extent = (gd.xmin, gd.xmax, gd.ymin, gd.ymax)
    plt.imshow(popdata,
               origin='upper',
               extent=img_extent,
               cmap=popmap.cmap,
               vmin=popmap.vmin,
               vmax=popmap.vmax,
               zorder=9,
               interpolation='none')

    #define arrays of latitude and longitude we will use to plot MMI contours
    lat = np.linspace(gd.ymin, gd.ymax, gd.ny)
    lon = np.linspace(gd.xmin, gd.xmax, gd.nx)

    #contour the masked land/ocean MMI data at half-integer levels
    plt.contour(lon,
                lat,
                landmask,
                linewidths=3.0,
                linestyles='solid',
                zorder=10,
                cmap=mmimap.cmap,
                vmin=mmimap.vmin,
                vmax=mmimap.vmax,
                levels=np.arange(0.5, 10.5, 1.0))

    plt.contour(lon,
                lat,
                oceanmask,
                linewidths=2.0,
                linestyles='dashed',
                zorder=13,
                cmap=mmimap.cmap,
                vmin=mmimap.vmin,
                vmax=mmimap.vmax,
                levels=np.arange(0.5, 10.5, 1.0))

    #the idea here is to plot invisible MMI contours at integer levels and then label them.
    #labeling part does not currently work.
    cs = plt.contour(lon,
                     lat,
                     landmask,
                     linewidths=0.0,
                     levels=np.arange(0, 11),
                     zorder=10)
    #clabel is not actually drawing anything, but it is blotting out a portion of the contour line.  ??
    ax.clabel(cs, np.arange(0, 11), colors='k', zorder=25)

    #set the extent of the map to our data
    ax.set_extent([lon.min(), lon.max(), lat.min(), lat.max()])

    #draw the ocean data
    if isinstance(oceanshapes[0], mPolygon):
        for shape in oceanshapes[0]:
            ocean_patch = PolygonPatch(shape,
                                       zorder=10,
                                       facecolor=WATERCOLOR,
                                       edgecolor=WATERCOLOR)
            ax.add_patch(ocean_patch)
    else:
        ocean_patch = PolygonPatch(oceanshapes[0],
                                   zorder=10,
                                   facecolor=WATERCOLOR,
                                   edgecolor=WATERCOLOR)
        ax.add_patch(ocean_patch)

    # add coastlines with desired scale of resolution
    ax.coastlines('10m', zorder=11)

    #draw meridians and parallels using Cartopy's functions for that
    gl = ax.gridlines(crs=ccrs.PlateCarree(),
                      draw_labels=True,
                      linewidth=2,
                      color=(0.9, 0.9, 0.9),
                      alpha=0.5,
                      linestyle='-',
                      zorder=20)
    gl.xlabels_top = False
    gl.xlabels_bottom = False
    gl.ylabels_left = False
    gl.ylabels_right = False
    gl.xlines = True
    xlocs = np.arange(np.floor(gd.xmin - 1), np.ceil(gd.xmax + 1))
    ylocs = np.arange(np.floor(gd.ymin - 1), np.ceil(gd.ymax + 1))
    gl.xlocator = mticker.FixedLocator(xlocs)
    gl.ylocator = mticker.FixedLocator(ylocs)
    gl.xformatter = LONGITUDE_FORMATTER
    gl.yformatter = LATITUDE_FORMATTER
    gl.xlabel_style = {'size': 15, 'color': 'black'}
    gl.ylabel_style = {'size': 15, 'color': 'black'}

    #drawing our own tick labels INSIDE the plot, as Cartopy doesn't seem to support this.
    yrange = gd.ymax - gd.ymin
    xrange = gd.xmax - gd.xmin
    for xloc in gl.xlocator.locs:
        outside = xloc < gd.xmin or xloc > gd.xmax
        #don't draw labels when we're too close to either edge
        near_edge = (xloc - gd.xmin) < (xrange * 0.1) or (gd.xmax - xloc) < (
            xrange * 0.1)
        if outside or near_edge:
            continue
        if xloc < 0:
            xtext = r'$%s^\circ$W' % str(abs(int(xloc)))
        else:
            xtext = r'$%s^\circ$E' % str(int(xloc))
        ax.text(xloc,
                gd.ymax - (yrange / 35),
                xtext,
                fontsize=14,
                zorder=20,
                ha='center',
                fontname='Bitstream Vera Sans')

    for yloc in gl.ylocator.locs:
        outside = yloc < gd.ymin or yloc > gd.ymax
        #don't draw labels when we're too close to either edge
        near_edge = (yloc - gd.ymin) < (yrange * 0.1) or (gd.ymax - yloc) < (
            yrange * 0.1)
        if outside or near_edge:
            continue
        if yloc < 0:
            ytext = r'$%s^\circ$S' % str(abs(int(yloc)))
        else:
            ytext = r'$%s^\circ$N' % str(int(yloc))
        thing = ax.text(gd.xmin + (xrange / 100),
                        yloc,
                        ytext,
                        fontsize=14,
                        zorder=20,
                        va='center',
                        fontname='Bitstream Vera Sans')

    #Limit the number of cities we show - we may not want to use the population size
    #filter in the global case, but the map collision filter is a little sketchy right now.
    mapcities = cities.limitByPopulation(25000)
    mapcities = mapcities.limitByGrid()
    mapcities = mapcities.limitByMapCollision(ax, shadow=True)
    mapcities.renderToMap(ax, shadow=True, fontsize=12, zorder=11)

    #Get the corner of the map with the lowest population
    corner_rect, filled_corner = _get_open_corner(popgrid, ax)
    clat = round_to_nearest(clat, 1.0)
    clon = round_to_nearest(clon, 1.0)

    #draw a little globe in the corner showing in small-scale where the earthquake is located.
    proj = ccrs.Orthographic(central_latitude=clat, central_longitude=clon)
    ax2 = fig.add_axes(corner_rect, projection=proj)
    ax2.add_feature(cartopy.feature.OCEAN,
                    zorder=0,
                    facecolor=WATERCOLOR,
                    edgecolor=WATERCOLOR)
    ax2.add_feature(cartopy.feature.LAND, zorder=0, edgecolor='black')
    ax2.plot([clon], [clat],
             'w*',
             linewidth=1,
             markersize=16,
             markeredgecolor='k',
             markerfacecolor='r')
    gh = ax2.gridlines()
    ax2.set_global()
    ax2.outline_patch.set_edgecolor('black')
    ax2.outline_patch.set_linewidth(2)

    #Draw the map scale in the unoccupied lower corner.
    corner = 'lr'
    if filled_corner == 'lr':
        corner = 'll'
    draw_scale(ax, corner, pady=0.05, padx=0.05)

    plt.savefig(outfilename)

    pngfile = None
    if make_png:
        fpath, fname = os.path.split(outfilename)
        fbase, t = os.path.splitext(fname)
        pngfile = os.path.join(fpath, fbase + '.png')
        plt.savefig(pngfile)

    return (pngfile, mapcities)
コード例 #42
0
ファイル: gfail_test.py プロジェクト: lmateus/groundfailure
def test_zhu2015(tmpdir):
    shakegrid = os.path.join(datadir, 'loma_prieta', 'grid.xml')
    pathcmd = """
        gfail --set-default-paths \
        -d %s/loma_prieta/model_inputs \
        -o [TMPOUT] \
        -c %s/defaultconfigfiles/models \
        -m %s/defaultconfigfiles/mapconfig.ini \
        -md %s/loma_prieta/mapping_inputs
    """ % (datadir, upone, upone, datadir)

    trimfile = '%s/loma_prieta/mapping_inputs/ne_10m_ocean/ne_10m_ocean.shp' \
               % datadir

    # Make a copy of current defaults
    default_file = os.path.join(os.path.expanduser("~"), ".gfail_defaults")
    if os.path.exists(default_file):
        shutil.copy(default_file, default_file + '_bak')

    try:
        try:
            p = os.path.join(str(tmpdir.name), "sub")
        except:
            p = os.path.join(str(tmpdir), "sub")
        if not os.path.exists(p):
            os.makedirs(p)

        # Clear paths
        rc, so, se = get_command_output('gfail -reset')
        # Modify paths
        pathcmd = pathcmd.replace('[TMPOUT]', p)
        rc1, so1, se1 = get_command_output(pathcmd)

        with open(default_file, "a") as f:
            f.write("popfile = %s" %
                    os.path.join(datadir, 'loma_prieta/lspop2016_lp.flt'))

        # List paths
        rc3, so3, se3 = get_command_output('gfail --list-default-paths')

        # Run model with bounds
        runcmd = "gfail %s/test_conf %s -b 'zoom, pga, 2' --hdf5 -tr  %s -ext"\
                 % (datadir, shakegrid, trimfile)
        rc4, so4, se4 = get_command_output(runcmd)

        # Run model
        runcmd = "gfail %s/test_conf %s --gis -pn -pi -pd --hdf5 -ext" \
                 % (datadir, shakegrid)
        rc2, so2, se2 = get_command_output(runcmd)

        # Read in the testing data
        test_file = os.path.join(p, '19891018000415_zhu_2015_model.tif')
        test_grid = GDALGrid.load(test_file)
        test_data = test_grid.getData()

        # Read in target file
        target_file = os.path.join(datadir, 'loma_prieta', 'targets',
                                   '19891018000415_zhu_2015_model.tif')

        if changetarget:
            # To change target data:
            test_grid.save(test_file)
            cmd = 'gdal_translate -a_srs EPSG:4326 -of GTiff %s %s' % (
                test_file, target_file)
            rc, so, se = get_command_output(cmd)

        target_grid = GDALGrid.load(target_file)
        target_data = target_grid.getData()

    except Exception as e:
        print(e)

    # Put defaults back
    if os.path.exists(default_file + '_bak'):
        shutil.copy(default_file + '_bak', default_file)

    # Remove backup and tempfile
    if os.path.exists(default_file + '_bak'):
        os.remove(default_file + '_bak')
    shutil.rmtree(p)

    # Test that everything ran
    np.testing.assert_equal(True, rc, 'gfail reset failed')
    np.testing.assert_equal(True, rc1, 'gfail path modification failed')
    np.testing.assert_equal(True, rc2, se2.decode())
    np.testing.assert_equal(True, rc3, 'gfail list-default-paths failed')
    np.testing.assert_equal(True, rc4, se4.decode())

    # Then do test of values
    np.testing.assert_allclose(target_data, test_data, rtol=1e-3)
コード例 #43
0
def run_gfail(args):
    """Runs ground failure.

    Args:
        args: dictionary or argument parser Namespace output by bin/gfail
            program.

    Returns:
        list: Names of created files.

    """
    # TODO: ADD CONFIG VALIDATION STEP THAT MAKES SURE ALL THE FILES EXIST
    filenames = []
    # If args is a dictionary, convert to a Namespace
    if isinstance(args, dict):
        args = Namespace(**args)

    if args.set_default_paths:
        set_default_paths(args)
        print('default paths set, continuing...\n')

    if args.list_default_paths:
        list_default_paths()
        return

    if args.reset_default_paths:
        reset_default_paths()
        return

    if args.make_webpage:
        # Turn on GIS and HDF5 flags
        gis = True
        hdf5 = True
        kmz = True
    else:
        gis = args.gis
        hdf5 = args.hdf5
        kmz = args.kmz

    # Figure out what models will be run
    if args.shakefile is not None:  # user intends to actually run some models
        shakefile = args.shakefile

        # make output location for things
        if args.output_filepath is None:
            outdir = os.getcwd()
        else:
            outdir = args.output_filepath

        if hdf5 or gis or kmz:
            if not os.path.exists(outdir):
                os.makedirs(outdir)

        # download if is url
        # cleanup = False
        if not os.path.isfile(shakefile):
            if isURL(shakefile):
                # getGridURL returns a named temporary file object
                shakefile = getGridURL(shakefile)
                # cleanup = True  # Be sure to delete it after
            else:
                raise NameError('Could not find "%s" as a file or a valid url'
                                % shakefile)
        eventid = getHeaderData(shakefile)[0]['event_id']

        # Get entire path so won't break if running gfail with relative path
        shakefile = os.path.abspath(shakefile)

        if args.extract_contents:
            outfolder = outdir
        else:  # Nest in a folder named by eventid
            outfolder = os.path.join(outdir, eventid)
            if not os.path.exists(outfolder):
                os.makedirs(outfolder)

        # Copy shake grid into output directory
        # --- this is base on advice from Mike that when running in production
        #     the shake grids are not archived and so if we need/want to have
        #     the exact grid used for the calculation later if there's every a
        #     question about how the calculation was done, the safest thing is
        #     to store a copy of it here.
        shake_copy = os.path.join(outfolder, "grid.xml")
        shutil.copyfile(shakefile, shake_copy)

        if args.uncertfile is not None:
            uncertfile = os.path.abspath(args.uncertfile)
            unc_copy = os.path.join(outfolder, "uncertainty.xml")
            shutil.copyfile(uncertfile, unc_copy)
        else:
            uncertfile = None

        # Write shakefile to a file for use later
        shakename = os.path.join(outfolder, "shakefile.txt")
        shake_file = open(shakename, "wt")
        shake_file.write(shake_copy)
        shake_file.close()
        filenames.append(shakename)

        # Check that shakemap bounds do not cross 180/-180 line

        if args.set_bounds is None:
            sd = ShakeGrid.getFileGeoDict(shakefile)
            if sd.xmin > sd.xmax:
                print('\nShakeMap crosses 180/-180 line, setting bounds so '
                      'only side with more land area is run')
                if sd.xmax + 180. > 180 - sd.xmin:
                    set_bounds = '%s, %s, %s, %s' % (
                        sd.ymin, sd.ymax, -180., sd.xmax)
                else:
                    set_bounds = '%s, %s, %s, %s' % (sd.ymin, sd.ymax, sd.xmin,
                                                     180.)
                print('Bounds applied: %s' % set_bounds)
            else:
                set_bounds = args.set_bounds
        else:
            set_bounds = args.set_bounds

        config = args.config

        if args.config_filepath is not None:
            # only add config_filepath if full filepath not given and file
            # ext is .ini
            if (not os.path.isabs(config) and
                    os.path.splitext(config)[-1] == '.ini'):
                config = os.path.join(args.config_filepath, config)

        if os.path.splitext(config)[-1] == '.ini':
            temp = ConfigObj(config)
            if len(temp) == 0:
                raise Exception(
                    'Could not find specified .ini file: %s' % config)
            if args.data_path is not None:
                temp = correct_config_filepaths(args.data_path, temp)
            configs = [temp]
            conffail = []
        else:
            # input is a list of config files
            f = open(config, 'r')
            configlist = f.readlines()
            configs = []
            conffail = []
            for conf in configlist:
                conf = conf.strip()
                if not os.path.isabs(conf):
                    # only add config_filepath if full filepath not given
                    conf = os.path.join(args.config_filepath, conf)
                try:
                    temp = ConfigObj(conf)
                    if temp:
                        if args.data_path is not None:
                            temp = correct_config_filepaths(
                                args.data_path, temp)
                        configs.append(temp)
                    else:
                        conffail.append(conf)
                except BaseException:
                    conffail.append(conf)

        print('\nRunning the following models:')

        for conf in configs:
            print('\t%s' % conf.keys()[0])
        if len(conffail) > 0:
            print('Could not find or read in the following config files:\n')
            for conf in conffail:
                print('\t%s' % conf)
            print('\nContinuing...\n')

        if set_bounds is not None:
            if 'zoom' in set_bounds:
                temp = set_bounds.split(',')
                print('Using %s threshold of %1.1f to cut model bounds'
                      % (temp[1].strip(), float(temp[2].strip())))
                bounds = get_bounds(shakefile, temp[1].strip(),
                                    float(temp[2].strip()))
            else:
                temp = eval(set_bounds)
                latmin = temp[0]
                latmax = temp[1]
                lonmin = temp[2]
                lonmax = temp[3]
                bounds = {'xmin': lonmin, 'xmax': lonmax,
                          'ymin': latmin, 'ymax': latmax}
            print('Applying bounds of lonmin %1.2f, lonmax %1.2f, '
                  'latmin %1.2f, latmax %1.2f'
                  % (bounds['xmin'], bounds['xmax'],
                     bounds['ymin'], bounds['ymax']))
        else:
            bounds = None

        if args.make_webpage:
            results = []

        # pre-read in ocean trimming file polygons so only do this step once
        if args.trimfile is not None:
            if not os.path.exists(args.trimfile):
                print('trimfile defined does not exist: %s\n'
                      'Ocean will not be trimmed.' % args.trimfile)
                trimfile = None
            elif os.path.splitext(args.trimfile)[1] != '.shp':
                print('trimfile must be a shapefile, '
                      'ocean will not be trimmed')
                trimfile = None
            else:
                trimfile = args.trimfile
        else:
            trimfile = None

        # Get finite fault ready, if exists

        ffault = None
        point = True
        if args.finite_fault is not None:
            point = False
            try:
                if os.path.splitext(args.finite_fault)[-1] == '.txt':
                    ffault = text_to_json(args.finite_fault)
                elif os.path.splitext(args.finite_fault)[-1] == '.json':
                    ffault = args.finite_fault
                else:
                    print('Could not read in finite fault, will '
                          'try to download from comcat')
                    ffault = None
            except BaseException:
                print('Could not read in finite fault, will try to '
                      'download from comcat')
                ffault = None

        if ffault is None:
            # Try to get finite fault file, if it exists
            try:
                returned_ev = get_event_comcat(shakefile)
                if returned_ev is not None:
                    testjd, detail, temp = returned_ev
                    evinfo = testjd['input']['event_information']
                    if 'faultfiles' in evinfo:
                        ffilename = evinfo['faultfiles']
                        if len(ffilename) > 0:
                            # Download the file
                            with tempfile.NamedTemporaryFile(
                                    delete=False, mode='w') as f:
                                temp.getContent(ffilename, filename=f.name)
                                ffault = text_to_json(f.name)
                                os.remove(f.name)
                            point = False
                        else:
                            point = True
                else:
                    print('Unable to determine source type, unknown if finite'
                          ' fault or point source')
                    ffault = None
                    point = False

            except Exception as e:
                print(e)
                print('Unable to determine source type, unknown if finite'
                      ' fault or point source')
                ffault = None
                point = False

        # Loop over config files
        for conf in configs:
            modelname = conf.keys()[0]
            print('\nNow running %s:' % modelname)
            notcov, newbnds = check_input_extents(
                conf, shakefile=shakefile,
                bounds=bounds
            )
            if len(notcov) > 0:
                print('\nThe following input layers do not cover'
                      ' the area of interest:\n\t%s' % '\n\t'.join(notcov))
                if newbnds is None:
                    print('\nCannnot make bounds that work. '
                          'Skipping to next model\n')
                    continue
                else:
                    pnt = '%s, %s, %s, %s' % (
                        newbnds['xmin'], newbnds['xmax'],
                        newbnds['ymin'], newbnds['ymax'])
                    print('Running model for new bounds that are fully covered'
                          ' by input layer: %s' % pnt)
                    bounds2 = newbnds
            else:
                bounds2 = bounds

            modelfunc = conf[modelname]['funcname']
            if modelfunc == 'LogisticModel':
                lm = LM.LogisticModel(shakefile, conf,
                                      uncertfile=uncertfile,
                                      saveinputs=args.save_inputs,
                                      bounds=bounds2,
                                      trimfile=trimfile)

                maplayers = lm.calculate()
            elif modelfunc == 'godt2008':
                maplayers = godt2008(shakefile, conf,
                                     uncertfile=uncertfile,
                                     saveinputs=args.save_inputs,
                                     bounds=bounds2,
                                     trimfile=trimfile)
            else:
                print('Unknown model function specified in config for %s '
                      'model, skipping to next config' % modelfunc)
                continue

            # time1 = datetime.datetime.utcnow().strftime('%d%b%Y_%H%M')
            # filename = ('%s_%s_%s' % (eventid, modelname, time1))

            if args.appendname is not None:
                filename = ('%s_%s_%s' % (eventid, modelname, args.appendname))
            else:
                filename = ('%s_%s' % (eventid, modelname))
            if hdf5:
                filenameh = filename + '.hdf5'
                if os.path.exists(filenameh):
                    os.remove(filenameh)
                savelayers(maplayers, os.path.join(outfolder, filenameh))
                filenames.append(filenameh)

            if gis or kmz:
                for key in maplayers:
                    # Rename 'std' key to 'beta_sigma'
                    if key == 'std':
                        key_label = 'beta_sigma'
                    else:
                        key_label = key
                    if gis:
                        filen = os.path.join(outfolder, '%s_%s.bil'
                                             % (filename, key_label))
                        fileh = os.path.join(outfolder, '%s_%s.hdr'
                                             % (filename, key_label))
                        fileg = os.path.join(outfolder, '%s_%s.tif'
                                             % (filename, key_label))

                        GDALGrid.copyFromGrid(
                            maplayers[key]['grid']).save(filen)
                        cflags = '-co COMPRESS=DEFLATE -co predictor=2'
                        srs = '-a_srs EPSG:4326'
                        cmd = 'gdal_translate %s %s -of GTiff %s %s' % (
                            srs, cflags, filen, fileg)
                        rc, so, se = get_command_output(cmd)
                        # Delete bil file and its header
                        os.remove(filen)
                        os.remove(fileh)
                        filenames.append(fileg)
                    if kmz and (not key.startswith('quantile') and not key.startswith('std')) :
                        plotorder, logscale, lims, colormaps, maskthresh = \
                            parseConfigLayers(maplayers, conf, keys=['model'])
                        maxprob = np.nanmax(maplayers[key]['grid'].getData())
                        if key == 'model':
                            qdict = {
                                k: maplayers[k] for k in maplayers.keys()
                                if k.startswith('quantile')
                            }
                        else:
                            qdict = None
                        if maskthresh is None:
                            maskthresh = [0.]
                        if maxprob >= maskthresh[0]:
                            filen = os.path.join(outfolder, '%s_%s.kmz'
                                                 % (filename, key_label))
                            filek = create_kmz(maplayers[key], filen,
                                               mask=maskthresh[0],
                                               levels=lims[0],
                                               qdict=qdict)
                            filenames.append(filek)
                        else:
                            print('No unmasked pixels present, skipping kmz '
                                  'file creation')

            if args.make_webpage:
                # Compile into list of results for later
                results.append(maplayers)

                #  # Make binary output for ShakeCast
                #  filef = os.path.join(outfolder, '%s_model.flt'
                #                       % filename)
                #  # And get name of header
                #  filefh = os.path.join(outfolder, '%s_model.hdr'
                #                        % filename)
                #  # Make file
                #  write_floats(filef, maplayers['model']['grid'])
                #  filenames.append(filef)
                #  filenames.append(filefh)

        eventid = getHeaderData(shakefile)[0]['event_id']
        if not hasattr(args, 'eventsource'):
            args.eventsource = 'us'
        if not hasattr(args, 'eventsourcecode'):
            args.eventsourcecode = eventid

        if args.make_webpage:
            if len(results) == 0:
                raise Exception('No models were run. Cannot make webpages.')
            outputs = hazdev(
                results, configs,
                shakefile, outfolder=outfolder,
                pop_file=args.popfile,
                pager_alert=args.property_alertlevel,
                eventsource=args.eventsource,
                eventsourcecode=args.eventsourcecode,
                point=point, gf_version=args.gf_version,
                pdlcall=args.pdlcall)
            filenames = filenames + outputs

#        # create transparent png file
#        outputs = create_png(outdir)
#        filenames = filenames + outputs
#
#        # create info file
#        infofile = create_info(outdir)
#        filenames = filenames + infofile

        print('\nFiles created:\n')
        for filen in filenames:
            print('%s' % filen)

        return filenames
コード例 #44
0
ファイル: stats.py プロジェクト: sudwebd/groundfailure
def computeHagg(grid2D,
                proj='moll',
                probthresh=0.0,
                shakefile=None,
                shakethreshtype='pga',
                shakethresh=0.0):
    """
    Computes the Aggregate Hazard (Hagg) which is equal to the
    probability * area of grid cell For models that compute areal coverage,
    this is equivalant to the total predicted area affected in km2.

    Args:
        grid2D: grid2D object of model output.
        proj: projection to use to obtain equal area, 'moll'  mollweide, or
            'laea' lambert equal area.
        probthresh: Probability threshold, any values less than this will not
            be included in aggregate hazard estimation.
        shakefile: Optional, path to shakemap file to use for ground motion
            threshold.
        shakethreshtype: Optional, Type of ground motion to use for
            shakethresh, 'pga', 'pgv', or 'mmi'.
        shakethresh: Optional, Float or list of shaking thresholds in %g for
            pga, cm/s for pgv, float for mmi.

    Returns: Aggregate hazard (float) if no shakethresh or only one shakethresh was defined,
        otherwise, a list of floats of aggregate hazard for all shakethresh values.
    """
    Hagg = []
    bounds = grid2D.getBounds()
    lat0 = np.mean((bounds[2], bounds[3]))
    lon0 = np.mean((bounds[0], bounds[1]))
    projs = ('+proj=%s +lat_0=%f +lon_0=%f +x_0=0 +y_0=0 +ellps=WGS84 '
             '+units=km +no_defs' % (proj, lat0, lon0))
    geodict = grid2D.getGeoDict()

    if shakefile is not None:
        if type(shakethresh) != list and type(shakethresh) != np.ndarray:
            shakethresh = [shakethresh]
        for shaket in shakethresh:
            if shaket < 0.:
                raise Exception('shaking threshold must be equal or greater '
                                'than zero')
        tmpdir = tempfile.mkdtemp()
        # resample shakemap to grid2D
        temp = ShakeGrid.load(shakefile)
        junkfile = os.path.join(tmpdir, 'temp.bil')
        GDALGrid.copyFromGrid(temp.getLayer(shakethreshtype)).save(junkfile)
        shk = quickcut(junkfile, geodict, precise=True, method='bilinear')
        shutil.rmtree(tmpdir)
        if shk.getGeoDict() != geodict:
            raise Exception('shakemap was not resampled to exactly the same '
                            'geodict as the model')

    if probthresh < 0.:
        raise Exception('probability threshold must be equal or greater '
                        'than zero')

    grid = grid2D.project(projection=projs, method='bilinear')
    geodictRS = grid.getGeoDict()
    cell_area_km2 = geodictRS.dx * geodictRS.dy
    model = grid.getData()
    model[np.isnan(model)] = -1.
    if shakefile is not None:
        for shaket in shakethresh:
            modcop = model.copy()
            shkgrid = shk.project(projection=projs)
            shkdat = shkgrid.getData()
            # use -1 to avoid nan errors and warnings, will always be thrown
            # out because default is 0.
            shkdat[np.isnan(shkdat)] = -1.
            modcop[shkdat < shaket] = -1.
            Hagg.append(np.sum(modcop[modcop >= probthresh] * cell_area_km2))
    else:
        Hagg.append(np.sum(model[model >= probthresh] * cell_area_km2))
    if len(Hagg) == 1:
        Hagg = Hagg[0]
    return Hagg
コード例 #45
0
ファイル: stats.py プロジェクト: sudwebd/groundfailure
def computeParea(grid2D,
                 proj='moll',
                 probthresh=0.0,
                 shakefile=None,
                 shakethreshtype='pga',
                 shakethresh=0.0):
    """
    Alternative to Aggregate Hazard (Hagg), which is equal to the
    the sum of the area of grid cells that exceeds a given probability.

    Args:
        grid2D: grid2D object of model output.
        proj: projection to use to obtain equal area, 'moll'  mollweide, or
            'laea' lambert equal area.
        probthresh: Optional, Float or list of probability thresholds.
        shakefile: Optional, path to shakemap file to use for ground motion
            threshold.
        shakethreshtype: Optional, Type of ground motion to use for
            shakethresh, 'pga', 'pgv', or 'mmi'.
        shakethresh: Optional, Float of shaking thresholds in %g for
            pga, cm/s for pgv, float for mmi.

    Returns:
        Parea (float) if no or only one probthresh defined,
        otherwise, a list of floats of Parea corresponding to all
        specified probthresh values.
    """
    if type(probthresh) != list and type(probthresh) != np.ndarray:
        probthresh = [probthresh]

    Parea = []
    bounds = grid2D.getBounds()
    lat0 = np.mean((bounds[2], bounds[3]))
    lon0 = np.mean((bounds[0], bounds[1]))
    projs = ('+proj=%s +lat_0=%f +lon_0=%f +x_0=0 +y_0=0 +ellps=WGS84 '
             '+units=km +no_defs' % (proj, lat0, lon0))
    geodict = grid2D.getGeoDict()

    if shakefile is not None:
        if shakethresh < 0.:
            raise Exception('shaking threshold must be equal or greater '
                            'than zero')
        tmpdir = tempfile.mkdtemp()
        # resample shakemap to grid2D
        temp = ShakeGrid.load(shakefile)
        junkfile = os.path.join(tmpdir, 'temp.bil')
        GDALGrid.copyFromGrid(temp.getLayer(shakethreshtype)).save(junkfile)
        shk = quickcut(junkfile, geodict, precise=True, method='bilinear')
        shutil.rmtree(tmpdir)
        if shk.getGeoDict() != geodict:
            raise Exception('shakemap was not resampled to exactly the same '
                            'geodict as the model')

    grid = grid2D.project(projection=projs)
    geodictRS = grid.getGeoDict()
    cell_area_km2 = geodictRS.dx * geodictRS.dy
    model = grid.getData()
    model[np.isnan(model)] = -1.
    for probt in probthresh:
        if probt < 0.:
            raise Exception('probability threshold must be equal or greater '
                            'than zero')
        modcop = model.copy()
        if shakefile is not None:
            shkgrid = shk.project(projection=projs)
            shkdat = shkgrid.getData()
            # use -1 to avoid nan errors and warnings, will always be thrown
            # out because default probthresh is 0 and must be positive.
            shkdat[np.isnan(shkdat)] = -1.
            modcop[shkdat < shakethresh] = -1.
        one_mat = np.ones_like(modcop)
        Parea.append(np.sum(one_mat[modcop >= probt] * cell_area_km2))

    if len(Parea) == 1:
        Parea = Parea[0]
    return Parea
コード例 #46
0
ファイル: grid2d_test.py プロジェクト: mtoqeerpk/MapIO
def test_project():
    # test projecting a grid that wraps the 180 meridian
    gd = GeoDict.createDictFromBox(175, -175, -5, 5, 1.0, 1.0)
    ncells = gd.ny * gd.nx
    data = np.arange(0.0, ncells).reshape(gd.ny, gd.nx)
    grid = GDALGrid(data, gd)
    projstr = "+proj=merc +lat_ts=55 +lon_0=180 +ellps=WGS84"
    newgrid = grid.project(projstr, method='nearest')
    proj = pyproj.Proj(projstr)
    # what would the ul/lr corners be?
    ulx, uly = proj(grid._geodict.xmin, grid._geodict.ymax)
    lrx, lry = proj(grid._geodict.xmax, grid._geodict.ymin)
    # what if we back-project?
    newxmin, newymax = proj(newgrid._geodict.xmin,
                            newgrid._geodict.ymax, inverse=True)
    newxmax, newymin = proj(newgrid._geodict.xmax,
                            newgrid._geodict.ymin, inverse=True)
    x = 1

    # test simple projection
    data = np.array([[0, 0, 1, 0, 0],
                     [0, 0, 1, 0, 0],
                     [1, 1, 1, 1, 1],
                     [0, 0, 1, 0, 0],
                     [0, 0, 1, 0, 0]], dtype=np.int32)
    geodict = {'xmin': 50, 'xmax': 50.4, 'ymin': 50,
               'ymax': 50.4, 'dx': 0.1, 'dy': 0.1, 'nx': 5, 'ny': 5}
    gd = GeoDict(geodict)
    grid = GDALGrid(data, gd)
    projstr = "+proj=utm +zone=40 +north +ellps=WGS84 +datum=WGS84 +units=m +no_defs "
    newgrid = grid.project(projstr, method='nearest')

    try:
        tdir = tempfile.mkdtemp()
        outfile = os.path.join(tdir, 'output.bil')
        grid.save(outfile)
        with rasterio.open(outfile) as src:
            aff = get_affine(src)
            data = src.read(1)
            src_crs = CRS().from_string(GeoDict.DEFAULT_PROJ4).to_dict()
            dst_crs = CRS().from_string(projstr).to_dict()
            nrows, ncols = data.shape
            left = aff.xoff
            top = aff.yoff
            right, bottom = aff * (ncols-1, nrows-1)
            dst_transform, width, height = calculate_default_transform(src_crs, dst_crs,
                                                                       ncols, nrows,
                                                                       left, bottom,
                                                                       right, top)
            destination = np.zeros((height, width))
            reproject(data,
                      destination,
                      src_transform=aff,
                      src_crs=src_crs,
                      dst_transform=dst_transform,
                      dst_crs=dst_crs,
                      src_nodata=src.nodata,
                      dst_nodata=np.nan,
                      resampling=Resampling.nearest)
            x = 1
    except:
        pass
    finally:
        shutil.rmtree(tdir)
コード例 #47
0
ファイル: spatial.py プロジェクト: mhearne-usgs/groundfailure
def trim_ocean(grid2D, mask, all_touched=True, crop=False):
    """Use the mask (a shapefile) to trim offshore areas

    Args:
        grid2D: MapIO grid2D object of results that need trimming
        mask: list of shapely polygon features already loaded in or string of
            file extension of shapefile to use for clipping
        all_touched (bool): if True, won't mask cells that touch any part of
            polygon edge
        crop (bool): crop boundaries of raster to new masked area

    Returns:
        grid2D file with ocean masked
    """
    gdict = grid2D.getGeoDict()

    tempdir = tempfile.mkdtemp()

    # Get shapes ready
    if type(mask) == str:
        with fiona.open(mask, 'r') as shapefile:
            bbox = (gdict.xmin, gdict.ymin, gdict.xmax, gdict.ymax)
            hits = list(shapefile.items(bbox=bbox))
            features = [feature[1]["geometry"] for feature in hits]
            # hits = list(shapefile)
            # features = [feature["geometry"] for feature in hits]
    elif type(mask) == list:
        features = mask
    else:
        raise Exception('mask is neither a link to a shapefile or a list of \
                        shapely shapes, cannot proceed')

    if len(features) == 0:
        print('No coastlines in ShakeMap area')
        return grid2D

    tempfilen = os.path.join(tempdir, 'temp.bil')
    tempfile1 = os.path.join(tempdir, 'temp.tif')
    tempfile2 = os.path.join(tempdir, 'temp2.tif')
    GDALGrid.copyFromGrid(grid2D).save(tempfilen)
    cmd = 'gdal_translate -a_srs EPSG:4326 -of GTiff %s %s' % \
        (tempfilen, tempfile1)
    rc, so, se = get_command_output(cmd)

    if rc:
        with rasterio.open(tempfile1, 'r') as src_raster:
            out_image, out_transform = rasterio.mask.mask(
                src_raster, features, all_touched=all_touched, crop=crop)
            out_meta = src_raster.meta.copy()
            out_meta.update({
                "driver": "GTiff",
                "height": out_image.shape[1],
                "width": out_image.shape[2],
                "transform": out_transform
            })
            with rasterio.open(tempfile2, "w", **out_meta) as dest:
                dest.write(out_image)

        newgrid = GDALGrid.load(tempfile2)

    else:
        print(se)
        raise Exception('ocean trimming failed')

    shutil.rmtree(tempdir)
    return newgrid
コード例 #48
0
    def __init__(self,config,shakefile,model):
        if model not in getLogisticModelNames(config):
            raise Exception('Could not find a model called "%s" in config %s.' % (model,config))
        #do everything here short of calculations - parse config, assemble eqn strings, load data.
        self.model = model
        cmodel = config['logistic_models'][model]
        self.coeffs = validateCoefficients(cmodel)
        self.layers = validateLayers(cmodel)#key = layer name, value = file name
        self.terms,timeField = validateTerms(cmodel,self.coeffs,self.layers)
        self.interpolations = validateInterpolations(cmodel,self.layers)
        self.units = validateUnits(cmodel,self.layers)

        if 'baselayer' not in cmodel:
            raise Exception('You must specify a base layer file in config.')
        if cmodel['baselayer'] not in list(self.layers.keys()):
            raise Exception('You must specify a base layer corresponding to one of the files in the layer section.')

        #get the geodict for the shakemap
        geodict = ShakeGrid.getFileGeoDict(shakefile,adjust='res')
        griddict,eventdict,specdict,fields,uncertainties = getHeaderData(shakefile)
        YEAR = eventdict['event_timestamp'].year
        MONTH = MONTHS[(eventdict['event_timestamp'].month)-1]
        DAY = eventdict['event_timestamp'].day
        HOUR = eventdict['event_timestamp'].hour

        #now find the layer that is our base layer and get the largest bounds we can guaranteed not to exceed shakemap bounds
        basefile = self.layers[cmodel['baselayer']]
        ftype = getFileType(basefile)
        if ftype == 'esri':
            basegeodict = GDALGrid.getFileGeoDict(basefile)
            sampledict = basegeodict.getBoundsWithin(geodict)
        elif ftype == 'gmt':
            basegeodict = GMTGrid.getFileGeoDict(basefile)
            sampledict = basegeodict.getBoundsWithin(geodict)
        else:
            raise Exception('All predictor variable grids must be a valid GMT or ESRI file type')

        #now load the shakemap, resampling and padding if necessary
        self.shakemap = ShakeGrid.load(shakefile,samplegeodict=sampledict,resample=True,doPadding=True,adjust='res')

        #load the predictor layers into a dictionary
        self.layerdict = {} #key = layer name, value = grid object
        for layername,layerfile in self.layers.items():
            if isinstance(layerfile,list):
                for lfile in layerfile:
                    if timeField == 'MONTH':
                        if lfile.find(MONTH) > -1:
                            layerfile = lfile
                            ftype = getFileType(layerfile)
                            interp = self.interpolations[layername]
                            if ftype == 'gmt':
                                lyr = GMTGrid.load(layerfile,sampledict,resample=True,method=interp,doPadding=True)
                            elif ftype == 'esri':
                                lyr = GDALGrid.load(layerfile,sampledict,resample=True,method=interp,doPadding=True)
                            else:
                                msg = 'Layer %s (file %s) does not appear to be a valid GMT or ESRI file.' % (layername,layerfile)
                                raise Exception(msg)
                            self.layerdict[layername] = lyr
            else:
                #first, figure out what kind of file we have (or is it a directory?)
                ftype = getFileType(layerfile)
                interp = self.interpolations[layername]
                if ftype == 'gmt':
                    lyr = GMTGrid.load(layerfile,sampledict,resample=True,method=interp,doPadding=True)
                elif ftype == 'esri':
                    lyr = GDALGrid.load(layerfile,sampledict,resample=True,method=interp,doPadding=True)
                else:
                    msg = 'Layer %s (file %s) does not appear to be a valid GMT or ESRI file.' % (layername,layerfile)
                    raise Exception(msg)
                self.layerdict[layername] = lyr

        shapes = {}
        for layername,layer in self.layerdict.items():
            shapes[layername] = layer.getData().shape

        x = 1
        self.nuggets = [str(self.coeffs['b0'])]
        ckeys = list(self.terms.keys())
        ckeys.sort()
        for key in ckeys:
            term = self.terms[key]
            coeff = self.coeffs[key]
            self.nuggets.append('(%g * %s)' % (coeff, term))

        self.equation = ' + '.join(self.nuggets)
        self.geodict = self.shakemap.getGeoDict()
コード例 #49
0
def statsCoverage(modelgrid,
                  inventorygrid,
                  bins=None,
                  showplots=True,
                  saveplots=False,
                  filepath=None):
    """TO DO - FIND MORE TESTS THAT ARE EASY TO COMPARE WITH EACH OTHER
    Compute stats and make comparison plots specific to models that output areal coverage like Godt et al 2008

    :param modelgrid: Grid2D object of model results
    :param inventorygrid: Grid2D object of areal coverage of inventory computed on same grid as modelgrid using, for example, computeCoverage
    :param bins: bin edges to use for various binning and threshold statistical calculations. if None bins = np.linspace(0, np.max((inv.max(), model.max())), 11)
    :param showplots: if True, will display the plots
    :param saveplots: if True, will save the plots
    :param filepath: Filepath for saved plots, if None, will save in current directory. Files are named with test name and time stamp
    :returns:
        * results: dictionary of results of tests.
                      {'Compare coverage': dictionary,
                        'RMS': float,
                        'RMS_nonzero': float,
                        'Percent in bin': dictionary,
                        'Direct Comparison': dictionary]}
        * invminusmod: Grid2D object of difference between inventory and model (inventory - model)

    """

    inv = inventorygrid.getData()
    model = modelgrid.getData()
    invminusmod = GDALGrid(inv - model, inventorygrid.getGeoDict())

    plt.ioff()

    # Make statistical comparisons
    results = {}
    if bins is None:
        bins = np.linspace(0, np.max((inv.max(), model.max())), 11)
    fig = plt.figure()
    ax = fig.add_subplot(111)
    ax.hist((model, inv), bins=bins)
    ax.set_yscale('log')
    ax.legend(('Model', 'Inventory'))
    ax.set_ylabel('Total # of cells')
    ax.set_xlabel('Predicted coverage bin')
    results['Direct Comparison'] = {
        'bins': bins,
        'model': model,
        'inventory': inv
    }

    # Statistical comparison
    perc = []
    areatot = []
    for i, bin in enumerate(bins[:-1]):
        idx = (model > bin) & (model <= bins[i + 1])
        areain = inv[idx]
        totalin = sum((areain > bin) & (areain <= bins[i + 1]))
        perc.append(float(totalin) / len(idx) * 100)
        areatot.append(np.mean(areain))
    areatot = np.nan_to_num(areatot)

    binvec = []
    for i in range(len(bins[:-1])):
        binvec.append(bins[i] + (bins[i + 1] - bins[i]) / 2)
    fig1 = plt.figure()
    ax1 = fig1.add_subplot(111)
    ax1.plot(binvec, perc, 'o-')
    ax1.set_ylabel('% in right bin')
    ax1.set_xlabel('Predicted coverage bin')
    ax1.set_title('Actual coverage in predicted bin')
    results['Percent in bin'] = {'bincenters': binvec, 'perc': perc}

    fig2 = plt.figure()
    ax2 = fig2.add_subplot(111)
    ax2.plot(binvec, areatot, 'o-')
    ax2.plot([0., 1.], [0., 1.], '--', color='gray')
    ax2.set_ylabel('Actual coverage')
    ax2.set_xlabel('Predicted coverage')
    ax2.set_title('Actual vs. Predicted coverage')
    results['Compare coverage'] = {'bincenters': binvec, 'areatot': areatot}

    if showplots is True:
        plt.show()
    if saveplots is True:
        if filepath is None:
            filepath = os.getcwd()
        import datetime
        time1 = datetime.datetime.utcnow().strftime('%d%b%Y_%H%M')
        fig.savefig(os.path.join(filepath,
                                 'Direct_compare_%s.pdf' % (time1, )))
        fig1.savefig(
            os.path.join(filepath, 'Percent_in_bin_%s.pdf' % (time1, )))
        fig2.savefig(
            os.path.join(filepath, 'Compare_coverage_%s.pdf' % (time1, )))

    # RMS
    idx = np.union1d(model.nonzero()[0], inv.nonzero()[0])
    results['RMS'] = np.sqrt((model - inv)**2).mean()
    print(('RMS: %0.3f' % results['RMS']))
    results['RMS_nonzero'] = np.sqrt((model[idx] - inv[idx])**2).mean()
    print(('RMS_nonzero: %0.3f' % results['RMS_nonzero']))

    return invminusmod, results
コード例 #50
0
    def __init__(self,
                 shakefile,
                 config,
                 uncertfile=None,
                 saveinputs=False,
                 slopefile=None,
                 bounds=None,
                 slopemod=None,
                 trimfile=None):
        """
        Sets up the logistic model

        Args:
            shakefile (str): Path to shakemap grid.xml file for the event.
            config: configobj object defining the model and its inputs. Only
                one model should be described in each config file.
            uncertfile (str): Path to uncertainty.xml file.
            saveinputs (bool): Save input layers as Grid2D objects in addition
                to the model? If false (the default), it will just output the
                model.
            slopefile (str): Optional path to slopefile that will be resampled
                to the other input files for applying thresholds. OVERWRITES
                VALUE IN CONFIG.
            bounds (dict): Default of None uses ShakeMap boundaries, otherwise
                a dictionary of boundaries to cut to like

                .. code-block:: python

                    bounds = {
                        'xmin': lonmin, 'xmax': lonmax,
                        'ymin': latmin, 'ymax': latmax
                    }
            slopemod (str): How slope input should be modified to be in
                degrees: e.g., ``np.arctan(slope) * 180. / np.pi`` or
                ``slope/100.`` (note that this may be in the config file
                already).
            trimfile (str): shapefile of earth's landmasses to use to cut
                offshore areas.
        """
        mnames = getLogisticModelNames(config)
        if len(mnames) == 0:
            raise Exception('No config file found or problem with config '
                            'file format')
        if len(mnames) > 1:
            raise Exception('Config file contains more than one model which '
                            'is no longer allowed, update your config file '
                            'to the newer format')

        self.model = mnames[0]
        self.config = config
        cmodel = config[self.model]
        self.modeltype = cmodel['gfetype']
        self.coeffs = validateCoefficients(cmodel)
        # key = layer name, value = file name
        self.layers = validateLayers(cmodel)
        self.terms, timeField = validateTerms(cmodel, self.coeffs, self.layers)
        self.interpolations = validateInterpolations(cmodel, self.layers)
        self.units = validateUnits(cmodel, self.layers)
        self.gmused = [
            value for term, value in cmodel['terms'].items()
            if 'pga' in value.lower() or 'pgv' in value.lower()
            or 'mmi' in value.lower()
        ]
        self.modelrefs, self.longrefs, self.shortrefs = validateRefs(cmodel)
        #self.numstd = numstd
        self.clips = validateClips(cmodel, self.layers, self.gmused)
        self.notes = ''

        if cmodel['baselayer'] not in list(self.layers.keys()):
            raise Exception('You must specify a base layer corresponding to '
                            'one of the files in the layer section.')
        self.saveinputs = saveinputs
        if slopefile is None:
            try:
                self.slopefile = cmodel['slopefile']
            except:
                # print('Slopefile not specified in config, no slope '
                #      'thresholds will be applied\n')
                self.slopefile = None
        else:
            self.slopefile = slopefile
        if slopemod is None:
            try:
                self.slopemod = cmodel['slopemod']
            except:
                self.slopemod = None

        # See if trimfile exists
        if trimfile is not None:
            if not os.path.exists(trimfile):
                print('trimfile defined does not exist: %s\nOcean will not be '
                      'trimmed' % trimfile)
                self.trimfile = None
            elif os.path.splitext(trimfile)[1] != '.shp':
                print('trimfile must be a shapefile, ocean will not be '
                      'trimmed')
                self.trimfile = None
            else:
                self.trimfile = trimfile
        else:
            self.trimfile = None

        # Get month of event
        griddict, eventdict, specdict, fields, uncertainties = \
            getHeaderData(shakefile)
        MONTH = MONTHS[(eventdict['event_timestamp'].month) - 1]

        # Figure out how/if need to cut anything
        geodict = ShakeGrid.getFileGeoDict(shakefile, adjust='res')
        if bounds is not None:  # Make sure bounds are within ShakeMap Grid
            if geodict.xmin < geodict.xmax:  # only if signs are not opposite
                if (geodict.xmin > bounds['xmin']
                        or geodict.xmax < bounds['xmax']
                        or geodict.ymin > bounds['ymin']
                        or geodict.ymax < bounds['ymax']):
                    print('Specified bounds are outside shakemap area, using '
                          'ShakeMap bounds instead.')
                    bounds = None

        if bounds is not None:
            tempgdict = GeoDict.createDictFromBox(bounds['xmin'],
                                                  bounds['xmax'],
                                                  bounds['ymin'],
                                                  bounds['ymax'],
                                                  geodict.dx,
                                                  geodict.dy,
                                                  inside=False)
            # If Shakemap geodict crosses 180/-180 line, fix geodict so things don't break
            if geodict.xmin > geodict.xmax:
                if tempgdict.xmin < 0:
                    geodict._xmin -= 360.
                else:
                    geodict._xmax += 360.
            gdict = geodict.getBoundsWithin(tempgdict)
        else:
            gdict = geodict

        # Now find the layer that is our base layer and get the largest bounds
        # we can guarantee not to exceed shakemap bounds
        basefile = self.layers[cmodel['baselayer']]
        ftype = getFileType(basefile)
        if ftype == 'esri':
            basegeodict, firstcol = GDALGrid.getFileGeoDict(basefile)
            if basegeodict == gdict:
                sampledict = gdict
            else:
                sampledict = basegeodict.getBoundsWithin(gdict)
        elif ftype == 'gmt':
            basegeodict, firstcol = GMTGrid.getFileGeoDict(basefile)
            if basegeodict == gdict:
                sampledict = gdict
            else:
                sampledict = basegeodict.getBoundsWithin(gdict)
        else:
            raise Exception('All predictor variable grids must be a valid '
                            'GMT or ESRI file type.')

        # Do we need to subdivide baselayer?
        if 'divfactor' in self.config[self.model].keys():
            divfactor = float(self.config[self.model]['divfactor'])
            if divfactor != 1.:
                # adjust sampledict so everything will be resampled
                newxmin = sampledict.xmin - sampledict.dx / \
                    2. + sampledict.dx/(2.*divfactor)
                newymin = sampledict.ymin - sampledict.dy / \
                    2. + sampledict.dy/(2.*divfactor)
                newxmax = sampledict.xmax + sampledict.dx / \
                    2. - sampledict.dx/(2.*divfactor)
                newymax = sampledict.ymax + sampledict.dy / \
                    2. - sampledict.dy/(2.*divfactor)
                newdx = sampledict.dx / divfactor
                newdy = sampledict.dy / divfactor

                sampledict = GeoDict.createDictFromBox(newxmin,
                                                       newxmax,
                                                       newymin,
                                                       newymax,
                                                       newdx,
                                                       newdy,
                                                       inside=True)

        # Find slope thresholds, if applicable
        self.slopemin = 'none'
        self.slopemax = 'none'
        if self.slopefile is not None:
            try:
                self.slopemin = float(config[self.model]['slopemin'])
                self.slopemax = float(config[self.model]['slopemax'])
            except:
                print('Could not find slopemin and/or slopemax in config, '
                      'limits. No slope thresholds will be applied.')
                self.slopemin = 'none'
                self.slopemax = 'none'

        # Make temporary directory for hdf5 pytables file storage
        self.tempdir = tempfile.mkdtemp()

        # now load the shakemap, resampling and padding if necessary
        temp = ShakeGrid.load(shakefile)  # , adjust='res')
        self.shakedict = temp.getShakeDict()
        self.eventdict = temp.getEventDict()
        self.shakemap = {}

        # Read both PGA and PGV in, may need them for thresholds
        for gm in ['pga', 'pgv']:
            junkfile = os.path.join(self.tempdir, 'temp.bil')
            GDALGrid.copyFromGrid(temp.getLayer(gm)).save(junkfile)
            if gm in self.interpolations.keys():
                intermeth = self.interpolations[gm]
            else:
                intermeth = 'bilinear'
            junkgrid = quickcut(junkfile,
                                sampledict,
                                precise=True,
                                method=intermeth)
            if gm in self.clips:
                junkgrid.setData(
                    np.clip(junkgrid.getData(), self.clips[gm][0],
                            self.clips[gm][1]))
            self.shakemap[gm] = TempHdf(
                junkgrid, os.path.join(self.tempdir, '%s.hdf5' % gm))
            os.remove(junkfile)
        del (temp)

        # get updated geodict
        sampledict = junkgrid.getGeoDict()

        # take uncertainties into account, if available
        if uncertfile is not None:
            self.uncert = {}
            try:
                # Only read in the ones that will be needed
                temp = ShakeGrid.load(uncertfile)
                already = []
                for gm in self.gmused:
                    if 'pgv' in gm:
                        gmsimp = 'pgv'
                    elif 'pga' in gm:
                        gmsimp = 'pga'
                    elif 'mmi' in gm:
                        gmsimp = 'mmi'
                    if gmsimp in already:
                        continue
                    junkfile = os.path.join(self.tempdir, 'temp.bil')
                    GDALGrid.copyFromGrid(temp.getLayer('std%s' %
                                                        gmsimp)).save(junkfile)
                    if gmsimp in self.interpolations.keys():
                        intermeth = self.interpolations[gmsimp]
                    else:
                        intermeth = 'bilinear'
                    junkgrid = quickcut(junkfile,
                                        sampledict,
                                        precise=True,
                                        method=intermeth)
                    if gmsimp in self.clips:
                        junkgrid.setData(
                            np.clip(junkgrid.getData(), self.clips[gmsimp][0],
                                    self.clips[gmsimp][1]))
                    self.uncert['std' + gmsimp] = TempHdf(
                        junkgrid,
                        os.path.join(self.tempdir, 'std%s.hdf5' % gmsimp))
                    already.append(gmsimp)
                    os.remove(junkfile)
                del (temp)
            except:
                print('Could not read uncertainty file, ignoring '
                      'uncertainties')
                self.uncert = None
        else:
            self.uncert = None

        # Load the predictor layers, save as hdf5 temporary files, put file
        # locations into a dictionary.

        # Will be replaced in the next section if a slopefile was defined
        self.nonzero = None

        # key = layer name, value = grid object
        self.layerdict = {}

        didslope = False
        for layername, layerfile in self.layers.items():
            start = timer()
            if isinstance(layerfile, list):
                for lfile in layerfile:
                    if timeField == 'MONTH':
                        if lfile.find(MONTH) > -1:
                            layerfile = lfile
                            ftype = getFileType(layerfile)
                            interp = self.interpolations[layername]
                            temp = quickcut(layerfile,
                                            sampledict,
                                            precise=True,
                                            method=interp)
                            if layername in self.clips:
                                temp.setData(
                                    np.clip(temp.getData(),
                                            self.clips[layername][0],
                                            self.clips[layername][1]))
                            self.layerdict[layername] = TempHdf(
                                temp,
                                os.path.join(self.tempdir,
                                             '%s.hdf5' % layername))
                            del (temp)
            else:
                interp = self.interpolations[layername]
                temp = quickcut(layerfile,
                                sampledict,
                                precise=True,
                                method=interp)
                if layername in self.clips:
                    temp.setData(
                        np.clip(temp.getData(), self.clips[layername][0],
                                self.clips[layername][1]))
                if layername == 'rock':  # Convert unconsolidated sediments to a more reasonable coefficient
                    sub1 = temp.getData()
                    # Change to mixed sed rock coeff
                    sub1[sub1 <= -3.21] = -1.36
                    temp.setData(sub1)
                    self.notes += 'unconsolidated sediment coefficient changed\
                     to -1.36 (weaker) from -3.22 to better reflect that this \
                    unit is not actually strong\n'

                self.layerdict[layername] = TempHdf(
                    temp, os.path.join(self.tempdir, '%s.hdf5' % layername))
                td = temp.getGeoDict()
                if td != sampledict:
                    raise Exception(
                        'Geodictionaries of resampled files do not match')

                if layerfile == self.slopefile:
                    flag = 0
                    if self.slopemin == 'none' and self.slopemax == 'none':
                        flag = 1
                    if self.slopemod is None:
                        slope1 = temp.getData().astype(float)
                        slope = 0
                    else:
                        try:
                            slope = temp.getData().astype(float)
                            slope1 = eval(self.slopemod)
                        except:
                            print('slopemod provided not valid, continuing '
                                  'without slope thresholds.')
                            flag = 1
                    if flag == 0:
                        nonzero = np.array([(slope1 > self.slopemin) &
                                            (slope1 <= self.slopemax)])
                        self.nonzero = nonzero[0, :, :]
                        del (slope1)
                        del (slope)
                    else:
                        # Still remove areas where the slope equals exactly
                        # 0.0 to remove offshore liq areas.
                        nonzero = np.array([slope1 != 0.0])
                        self.nonzero = nonzero[0, :, :]
                        del (slope1)
                    didslope = True
                del (temp)

            print('Loading %s layer: %1.1f sec' % (layername, timer() - start))

        if didslope is False and self.slopefile is not None:
            # Slope didn't get read in yet
            temp = quickcut(self.slopefile,
                            sampledict,
                            precise=True,
                            method='bilinear')
            flag = 0
            if self.slopemin == 'none' and self.slopemax == 'none':
                flag = 1
            if self.slopemod is None:
                slope1 = temp.getData().astype(float)
                slope = 0
            else:
                try:
                    slope = temp.getData().astype(float)
                    slope1 = eval(self.slopemod)
                except:
                    print('slopemod provided not valid, continuing without '
                          'slope thresholds')
                    flag = 1
            if flag == 0:
                nonzero = np.array([
                    (slope1 > self.slopemin) & (slope1 <= self.slopemax)
                ])
                self.nonzero = nonzero[0, :, :]
                del (slope1)
                del (slope)
            else:
                # Still remove areas where the slope equals exactly
                # 0.0 to remove offshore liq areas.
                nonzero = np.array([slope1 != 0.0])
                self.nonzero = nonzero[0, :, :]
                del (slope1)

        self.nuggets = [str(self.coeffs['b0'])]

        ckeys = list(self.terms.keys())
        ckeys.sort()
        for key in ckeys:
            term = self.terms[key]
            coeff = self.coeffs[key]
            self.nuggets.append('(%g * %s)' % (coeff, term))

        self.equation = ' + '.join(self.nuggets)
        self.geodict = sampledict
コード例 #51
0
ファイル: hazus.py プロジェクト: kbiegel-usgs/groundfailure
def hazus_liq(shakefile,
              config,
              uncertfile=None,
              saveinputs=False,
              modeltype=None,
              displmodel=None,
              probtype=None,
              bounds=None):
    """
    Method for computing the probability of liquefaction using the Hazus method
    using the Wills et al. (2015) Vs30 map of California to define the
    susceptibility classes and the Fan et al. global water table model. 
    """
    layers = config['hazus_liq_cal']['layers']
    vs30_file = layers['vs30']['file']
    wtd_file = layers['watertable']['file']
    shkgdict = ShakeGrid.getFileGeoDict(shakefile)
    fgeodict = GMTGrid.getFileGeoDict(vs30_file)[0]

    #---------------------------------------------------------------------------
    # Loading
    #---------------------------------------------------------------------------
    shakemap = ShakeGrid.load(shakefile,
                              fgeodict,
                              resample=True,
                              method='linear',
                              doPadding=True)
    PGA = shakemap.getLayer('pga').getData() / 100  # convert to g
    griddict, eventdict, specdict, fields, uncertainties = getHeaderData(
        shakefile)
    mag = eventdict['magnitude']

    # Correction factor for moment magnitudes other than M=7.5
    k_m = 0.0027 * mag**3 - 0.0267 * mag**2 - 0.2055 * mag + 2.9188

    #---------------------------------------------------------------------------
    # Susceptibility from Vs30
    #---------------------------------------------------------------------------
    vs30_grid = GMTGrid.load(vs30_file)

    vs30 = vs30_grid.getData()
    p_ml = np.zeros_like(vs30)
    a = np.zeros_like(vs30)
    b = np.zeros_like(vs30)
    for k, v in config['hazus_liq_cal']['parameters'].items():
        ind = np.where(vs30 == float(v[0]))
        if v[1] == "VH":
            p_ml[ind] = 0.25
            a[ind] = 9.09
            b[ind] = -0.82
        if v[1] == "H":
            p_ml[ind] = 0.2
            a[ind] = 7.67
            b[ind] = -0.92
        if v[1] == "M":
            p_ml[ind] = 0.1
            a[ind] = 6.67
            b[ind] = -1.0
        if v[1] == "L":
            p_ml[ind] = 0.05
            a[ind] = 5.57
            b[ind] = -1.18
        if v[1] == "VL":
            p_ml[ind] = 0.02
            a[ind] = 4.16
            b[ind] = -1.08

    # Conditional liquefaction probability for a given susceptibility category
    # at a specified PGA
    p_liq_pga = a * PGA + b
    p_liq_pga = p_liq_pga.clip(min=0, max=1)

    #---------------------------------------------------------------------------
    # Water table
    #---------------------------------------------------------------------------
    wtd_grid = GMTGrid.load(wtd_file,
                            fgeodict,
                            resample=True,
                            method=layers['watertable']['interpolation'],
                            doPadding=True)
    tmp = wtd_grid._data
    tmp = np.nan_to_num(tmp)

    # Convert to ft
    wt_ft = tmp * 3.28084

    # Correction factor for groundwater depths other than five feet
    k_w = 0.022 * wt_ft + 0.93

    #---------------------------------------------------------------------------
    # Combine to get conditional liquefaction probability
    #---------------------------------------------------------------------------
    p_liq_sc = p_liq_pga * p_ml / k_m / k_w

    #---------------------------------------------------------------------------
    # Turn output and inputs into into grids and put in maplayers dictionary
    #---------------------------------------------------------------------------
    maplayers = collections.OrderedDict()

    temp = shakemap.getShakeDict()
    shakedetail = '%s_ver%s' % (temp['shakemap_id'], temp['shakemap_version'])
    modelsref = config['hazus_liq_cal']['shortref']
    modellref = config['hazus_liq_cal']['longref']
    modeltype = 'Hazus/Wills'
    maplayers['model'] = {
        'grid': GDALGrid(p_liq_sc, fgeodict),
        'label': 'Probability',
        'type': 'output',
        'description': {
            'name': modelsref,
            'longref': modellref,
            'units': 'coverage',
            'shakemap': shakedetail,
            'parameters': {
                'modeltype': modeltype
            }
        }
    }

    if saveinputs is True:
        maplayers['pga'] = {
            'grid': GDALGrid(PGA, fgeodict),
            'label': 'PGA (g)',
            'type': 'input',
            'description': {
                'units': 'g',
                'shakemap': shakedetail
            }
        }
        maplayers['vs30'] = {
            'grid': GDALGrid(vs30, fgeodict),
            'label': 'Vs30 (m/s)',
            'type': 'input',
            'description': {
                'units': 'm/s'
            }
        }
        maplayers['wtd'] = {
            'grid': GDALGrid(wtd_grid._data, fgeodict),
            'label': 'wtd (m)',
            'type': 'input',
            'description': {
                'units': 'm'
            }
        }
    return maplayers
コード例 #52
0
ファイル: sample.py プロジェクト: mhearne-usgs/lsprocess
def getFileGeoDict(filename,gridtype):
    if gridtype == 'gmt':
        fgeodict,tmp = GMTGrid.getFileGeoDict(filename)
    else:
        fgeodict,tmp = GDALGrid.getFileGeoDict(filename)
    return fgeodict