Esempio n. 1
0
def godt2008(shakefile,
             config,
             uncertfile=None,
             saveinputs=False,
             displmodel=None,
             bounds=None,
             slopediv=100.,
             codiv=10.,
             numstd=None,
             trimfile=None):
    """
    This function runs the Godt and others (2008) global method for a given
    ShakeMap. The Factor of Safety is calculated using infinite slope analysis
    assumuing dry conditions. The method uses threshold newmark displacement
    and estimates areal coverage by doing the calculations for each slope
    quantile.

    Args:
        shakefile (str): Path to shakemap xml file.
        config (ConfigObj): ConfigObj of config file containing inputs required
            for running the model
        uncertfile (str): Path to shakemap uncertainty xml file (optional).
        saveinputs (bool): Whether or not to return the model input layers,
            False (default) returns only the model output (one layer).
        displmodel (str): Newmark displacement regression model to use

            * ``'J_PGA'`` (default) -- PGA-based model, equation 6 from
              Jibson (2007).
            * ``'J_PGA_M'`` -- PGA and M-based model, equation 7 from
              Jibson (2007).
            * ``'RS_PGA_M'`` -- PGA and M-based model from from Rathje and
              Saygili (2009).
            * ``'RS_PGA_PGV'`` -- PGA and PGV-based model, equation 6
              from Saygili and Rathje (2008).

        bounds (dict): Optional dictionary with keys 'xmin', 'xmax', 'ymin',
            'ymax' that defines a subset of the shakemap area to compute.
        slopediv (float): Divide slope by this number to get slope in degrees
            (Verdin datasets need to be divided by 100).
        codiv (float): Divide cohesion input layer by this number
            (For Godt method, need to divide by 10 because that is how it was
            calibrated).
        numstd (float): Number of (+/-) standard deviations to use if
            uncertainty is computed (uncertfile must be supplied).
        trimfile (str): shapefile of earth's land masses to trim offshore areas
            of model

    Returns:
        dict: Dictionary containing output and input layers (if
        saveinputs=True):

        .. code-block:: python

            {
                'grid': mapio grid2D object,
                'label': 'label for colorbar and top line of subtitle',
                'type': 'output or input to model',
                'description': {'name': 'short reference of model',
                                'longref': 'full model reference',
                                'units': 'units of output',
                                'shakemap': 'information about shakemap used',
                                'event_id': 'shakemap event id',
                                'parameters': 'dictionary of model parameters
                                               used'

                }
            }

    Raises:
         NameError: when unable to parse the config correctly (probably a
             formatting issue in the configfile) or when unable to find the
             shakefile (Shakemap filepath) -- these cause program to end.

    """
    # TODO:
    #    - Add 'all' -- averages Dn from all four equations, add term to
    #      convert PGA and PGV to Ia and use other equations, add Ambraseys and
    #      Menu (1988) option.

    # Empty refs
    slopesref = 'unknown'
    slopelref = 'unknown'
    cohesionlref = 'unknown'
    cohesionsref = 'unknown'
    frictionsref = 'unknown'
    frictionlref = 'unknown'
    modellref = 'unknown'
    modelsref = 'unknown'

    # See if trimfile exists
    if trimfile is not None:
        if not os.path.exists(trimfile):
            print('trimfile defined does not exist: %s\n'
                  'Ocean will not be trimmed' % trimfile)
            trimfile = None
        if os.path.splitext(trimfile)[1] != '.shp':
            print('trimfile must be a shapefile, ocean will not be trimmed')
            trimfile = None

    # Parse config
    try:  # May want to add error handling so if refs aren't given, just
        # includes unknown
        slopefilepath = config['godt_2008']['layers']['slope']['filepath']
        slopeunits = config['godt_2008']['layers']['slope']['units']
        cohesionfile = config['godt_2008']['layers']['cohesion']['file']
        cohesionunits = config['godt_2008']['layers']['cohesion']['units']
        frictionfile = config['godt_2008']['layers']['friction']['file']
        frictionunits = config['godt_2008']['layers']['friction']['units']

        thick = float(config['godt_2008']['parameters']['thick'])
        uwt = float(config['godt_2008']['parameters']['uwt'])
        nodata_cohesion = \
            float(config['godt_2008']['parameters']['nodata_cohesion'])
        nodata_friction = \
            float(config['godt_2008']['parameters']['nodata_friction'])
        dnthresh = float(config['godt_2008']['parameters']['dnthresh'])
        fsthresh = float(config['godt_2008']['parameters']['fsthresh'])
        acthresh = float(config['godt_2008']['parameters']['acthresh'])
        try:
            slopemin = float(config['godt_2008']['parameters']['slopemin'])
        except:
            slopemin = 0.01
            print('No slopemin found in config file, using 0.01 deg '
                  'for slope minimum')
    except Exception as e:
        raise NameError('Could not parse configfile, %s' % e)

    if displmodel is None:
        try:
            displmodel = config['godt_2008']['parameters']['displmodel']
        except:
            print('No regression model specified, using default of J_PGA_M')
            displmodel = 'J_PGA_M'

    # TO DO: ADD ERROR CATCHING ON UNITS, MAKE SURE THEY ARE WHAT THEY SHOULD
    #        BE FOR THIS MODEL

    try:  # Try to fetch source information from config
        modelsref = config['godt_2008']['shortref']
        modellref = config['godt_2008']['longref']
        slopesref = config['godt_2008']['layers']['slope']['shortref']
        slopelref = config['godt_2008']['layers']['slope']['longref']
        cohesionsref = config['godt_2008']['layers']['cohesion']['shortref']
        cohesionlref = config['godt_2008']['layers']['cohesion']['longref']
        frictionsref = config['godt_2008']['layers']['friction']['shortref']
        frictionlref = config['godt_2008']['layers']['friction']['longref']
    except:
        print('Was not able to retrieve all references from config file. '
              'Continuing')

    # Figure out how/if need to cut anything
    geodict = ShakeGrid.getFileGeoDict(shakefile)  # , adjust='res')
    if bounds is not None:  # Make sure bounds are within ShakeMap Grid
        if geodict.xmin < geodict.xmax:  # only if signs are not opposite
            if (geodict.xmin > bounds['xmin'] or geodict.xmax < bounds['xmax']
                    or geodict.ymin > bounds['ymin']
                    or geodict.ymax < bounds['ymax']):
                print('Specified bounds are outside shakemap area, using '
                      'ShakeMap bounds instead.')
                bounds = None

    if bounds is not None:
        tempgdict = GeoDict.createDictFromBox(bounds['xmin'],
                                              bounds['xmax'],
                                              bounds['ymin'],
                                              bounds['ymax'],
                                              geodict.dx,
                                              geodict.dy,
                                              inside=False)
        # If Shakemap geodict crosses 180/-180 line, fix geodict so things don't break
        if geodict.xmin > geodict.xmax:
            if tempgdict.xmin < 0:
                geodict._xmin -= 360.
            else:
                geodict._xmax += 360.
        geodict = geodict.getBoundsWithin(tempgdict)

    basegeodict, firstcol = GDALGrid.getFileGeoDict(
        os.path.join(slopefilepath, 'slope_min.bil'))
    if basegeodict == geodict:
        sampledict = geodict
    else:
        sampledict = basegeodict.getBoundsWithin(geodict)

    # Do we need to subdivide baselayer?
    if 'divfactor' in config['godt_2008'].keys():
        divfactor = float(config['godt_2008']['divfactor'])
        if divfactor != 1.:
            # adjust sampledict so everything will be resampled (cut one cell
            # of each edge so will be inside bounds)
            newxmin = sampledict.xmin - sampledict.dx/2. + \
                sampledict.dx/(2.*divfactor) + sampledict.dx
            newymin = sampledict.ymin - sampledict.dy/2. + \
                sampledict.dy/(2.*divfactor) + sampledict.dy
            newxmax = sampledict.xmax + sampledict.dx/2. - \
                sampledict.dx/(2.*divfactor) - sampledict.dx
            newymax = sampledict.ymax + sampledict.dy/2. - \
                sampledict.dy/(2.*divfactor) - sampledict.dy
            newdx = sampledict.dx / divfactor
            newdy = sampledict.dy / divfactor

            sampledict = GeoDict.createDictFromBox(newxmin,
                                                   newxmax,
                                                   newymin,
                                                   newymax,
                                                   newdx,
                                                   newdy,
                                                   inside=True)

    tmpdir = tempfile.mkdtemp()

    # Load in ShakeMap and get new geodictionary
    temp = ShakeGrid.load(shakefile)  # , adjust='res')
    junkfile = os.path.join(tmpdir, 'temp.bil')
    GDALGrid.copyFromGrid(temp.getLayer('pga')).save(junkfile)
    pga = quickcut(junkfile, sampledict, precise=True, method='bilinear')
    os.remove(junkfile)
    GDALGrid.copyFromGrid(temp.getLayer('pgv')).save(junkfile)
    pgv = quickcut(junkfile, sampledict, precise=True, method='bilinear')
    os.remove(junkfile)
    # Update geodictionary
    sampledict = pga.getGeoDict()

    t2 = temp.getEventDict()
    M = t2['magnitude']
    event_id = t2['event_id']
    shakedict = temp.getShakeDict()
    del (temp)

    # read in uncertainty if present
    if uncertfile is not None:
        try:
            temp = ShakeGrid.load(uncertfile)  # , adjust='res')
            GDALGrid.copyFromGrid(temp.getLayer('stdpga')).save(junkfile)
            uncertpga = quickcut(junkfile,
                                 sampledict,
                                 precise=True,
                                 method='bilinear',
                                 override=True)
            os.remove(junkfile)
            GDALGrid.copyFromGrid(temp.getLayer('stdpgv')).save(junkfile)
            uncertpgv = quickcut(junkfile,
                                 sampledict,
                                 precise=True,
                                 method='bilinear',
                                 override=True)
            os.remove(junkfile)
        except:
            print('Could not read uncertainty file, ignoring uncertainties')
            uncertfile = None
        if numstd is None:
            numstd = 1.

    # Read in all the slope files, divide all by 100 to get to slope in
    # degrees (because input files are multiplied by 100.)
    slopes = []
    quantiles = [
        'slope_min.bil', 'slope10.bil', 'slope30.bil', 'slope50.bil',
        'slope70.bil', 'slope90.bil', 'slope_max.bil'
    ]
    for quant in quantiles:
        tmpslp = quickcut(os.path.join(slopefilepath, quant), sampledict)
        tgd = tmpslp.getGeoDict()
        if tgd != sampledict:
            raise Exception('Input layers are not aligned to same geodict')
        else:
            slopes.append(tmpslp.getData() / slopediv)

    slopestack = np.dstack(slopes)

    # Change any zero slopes to a very small number to avoid dividing by
    # zero later
    slopestack[slopestack == 0] = 1e-8

    # Read in the cohesion and friction files and duplicate layers so they
    # are same shape as slope structure

    tempco = quickcut(cohesionfile, sampledict, method='near')
    tempco = tempco.getData()[:, :, np.newaxis] / codiv
    cohesion = np.repeat(tempco, 7, axis=2)
    cohesion[cohesion == -999.9] = nodata_cohesion
    cohesion = np.nan_to_num(cohesion)
    cohesion[cohesion == 0] = nodata_cohesion

    tempfric = quickcut(frictionfile, sampledict, method='near')
    tempfric = tempfric.getData().astype(float)[:, :, np.newaxis]
    friction = np.repeat(tempfric, 7, axis=2)
    friction[friction == -9999] = nodata_friction
    friction = np.nan_to_num(friction)
    friction[friction == 0] = nodata_friction

    # Do the calculations using Jibson (2007) PGA only model for Dn
    FS = (cohesion / (uwt * thick * np.sin(slopestack * (np.pi / 180.))) +
          np.tan(friction * (np.pi / 180.)) / np.tan(slopestack *
                                                     (np.pi / 180.)))
    FS[FS < fsthresh] = fsthresh

    # Compute critical acceleration, in g
    # This gives ac in g, equations that multiply by g give ac in m/s2
    Ac = (FS - 1) * np.sin(slopestack * (np.pi / 180.)).astype(float)
    Ac[Ac < acthresh] = acthresh

    # Get PGA in g (PGA is %g in ShakeMap, convert to g)
    PGA = np.repeat(pga.getData()[:, :, np.newaxis] / 100., 7,
                    axis=2).astype(float)
    if 'PGV' in displmodel:  # Load in PGV also, in cm/sec
        PGV = np.repeat(pgv.getData()[:, :, np.newaxis], 7,
                        axis=2).astype(float)
    else:
        PGV = None

    if uncertfile is not None:
        stdpga = np.repeat(uncertpga.getData()[:, :, np.newaxis], 7,
                           axis=2).astype(float)
        stdpgv = np.repeat(uncertpgv.getData()[:, :, np.newaxis], 7,
                           axis=2).astype(float)
        # estimate PGA +- 1std
        PGAmin = np.exp(np.log(PGA * 100) - numstd * stdpga) / 100
        PGAmax = np.exp(np.log(PGA * 100) + numstd * stdpga) / 100
        if 'PGV' in displmodel:
            PGVmin = np.exp(np.log(PGV) - numstd * stdpgv)
            PGVmax = np.exp(np.log(PGV) + numstd * stdpgv)
        else:
            PGVmin = None
            PGVmax = None

    # Ignore errors so still runs when Ac > PGA, just leaves nan instead
    # of crashing.
    np.seterr(invalid='ignore')

    Dn, logDnstd, logtype = NMdisp(Ac, PGA, model=displmodel, M=M, PGV=PGV)
    if uncertfile is not None:
        Dnmin, logDnstdmin, logtype = NMdisp(Ac,
                                             PGAmin,
                                             model=displmodel,
                                             M=M,
                                             PGV=PGVmin)
        Dnmax, logDnstdmax, logtype = NMdisp(Ac,
                                             PGAmax,
                                             model=displmodel,
                                             M=M,
                                             PGV=PGVmax)

    PROB = Dn.copy()
    PROB[PROB < dnthresh] = 0.
    PROB[PROB >= dnthresh] = 1.
    PROB = np.sum(PROB, axis=2)
    if uncertfile is not None:
        PROBmin = Dnmin.copy()
        PROBmin[PROBmin <= dnthresh] = 0.
        PROBmin[PROBmin > dnthresh] = 1.
        PROBmin = np.sum(PROBmin, axis=2)
        PROBmax = Dnmax.copy()
        PROBmax[PROBmax <= dnthresh] = 0.
        PROBmax[PROBmax > dnthresh] = 1.
        PROBmax = np.sum(PROBmax, axis=2)

    PROB[PROB == 1.] = 0.01
    PROB[PROB == 2.] = 0.10
    PROB[PROB == 3.] = 0.30
    PROB[PROB == 4.] = 0.50
    PROB[PROB == 5.] = 0.70
    PROB[PROB == 6.] = 0.90
    PROB[PROB == 7.] = 0.99

    if uncertfile is not None:
        PROBmin[PROBmin == 1.] = 0.01
        PROBmin[PROBmin == 2.] = 0.10
        PROBmin[PROBmin == 3.] = 0.30
        PROBmin[PROBmin == 4.] = 0.50
        PROBmin[PROBmin == 5.] = 0.70
        PROBmin[PROBmin == 6.] = 0.90
        PROBmin[PROBmin == 7.] = 0.99
        PROBmax[PROBmax == 1.] = 0.01
        PROBmax[PROBmax == 2.] = 0.10
        PROBmax[PROBmax == 3.] = 0.30
        PROBmax[PROBmax == 4.] = 0.50
        PROBmax[PROBmax == 5.] = 0.70
        PROBmax[PROBmax == 6.] = 0.90
        PROBmax[PROBmax == 7.] = 0.99

    if slopemin is not None:
        PROB[slopestack[:, :, 6] <= slopemin] = 0.
        # uncert too
        if uncertfile is not None:
            PROBmin[slopestack[:, :, 6] <= slopemin] = 0.
            PROBmax[slopestack[:, :, 6] <= slopemin] = 0.

    # Turn output and inputs into into grids and put in mapLayers dictionary
    maplayers = collections.OrderedDict()

    shakedetail = '%s_ver%s' % (shakedict['shakemap_id'],
                                shakedict['shakemap_version'])

    description = {
        'name': modelsref,
        'longref': modellref,
        'units': 'Proportion of Area Affected',
        'shakemap': shakedetail,
        'event_id': event_id,
        'parameters': {
            'displmodel': displmodel,
            'thickness_m': thick,
            'unitwt_kNm3': uwt,
            'dnthresh_cm': dnthresh,
            'acthresh_g': acthresh,
            'fsthresh': fsthresh,
            'modeltype': 'Landslide'
        }
    }
    PROBgrid = GDALGrid(PROB, sampledict)
    if trimfile is not None:
        PROBgrid = trim_ocean(PROBgrid, trimfile)

    maplayers['model'] = {
        'grid': PROBgrid,
        'label': 'Landslide - Proportion of Area Affected',
        'type': 'output',
        'description': description
    }

    if uncertfile is not None:
        PROBmingrid = GDALGrid(PROBmin, sampledict)
        PROBmaxgrid = GDALGrid(PROBmax, sampledict)
        if trimfile is not None:
            PROBmingrid = trim_ocean(PROBmingrid, trimfile)
            PROBmaxgrid = trim_ocean(PROBmaxgrid, trimfile)
        maplayers['modelmin'] = {
            'grid': PROBmingrid,
            'label': 'Landslide Probability-%1.2fstd' % numstd,
            'type': 'output',
            'description': description
        }
        maplayers['modelmax'] = {
            'grid': PROBmaxgrid,
            'label': 'Landslide Probability+%1.2fstd' % numstd,
            'type': 'output',
            'description': description
        }

    if saveinputs is True:
        maplayers['pga'] = {
            'grid': GDALGrid(PGA[:, :, 0], sampledict),
            'label': 'PGA (g)',
            'type': 'input',
            'description': {
                'units': 'g',
                'shakemap': shakedetail
            }
        }
        if 'PGV' in displmodel:
            maplayers['pgv'] = {
                'grid': GDALGrid(PGV[:, :, 0], sampledict),
                'label': 'PGV (cm/s)',
                'type': 'input',
                'description': {
                    'units': 'cm/s',
                    'shakemap': shakedetail
                }
            }
        maplayers['minFS'] = {
            'grid': GDALGrid(np.min(FS, axis=2), sampledict),
            'label': 'Min Factor of Safety',
            'type': 'input',
            'description': {
                'units': 'unitless'
            }
        }
        maplayers['max slope'] = {
            'grid': GDALGrid(slopestack[:, :, -1], sampledict),
            'label': r'Maximum slope ($^\circ$)',
            'type': 'input',
            'description': {
                'units': 'degrees',
                'name': slopesref,
                'longref': slopelref
            }
        }
        maplayers['cohesion'] = {
            'grid': GDALGrid(cohesion[:, :, 0], sampledict),
            'label': 'Cohesion (kPa)',
            'type': 'input',
            'description': {
                'units': 'kPa (adjusted)',
                'name': cohesionsref,
                'longref': cohesionlref
            }
        }
        maplayers['friction angle'] = {
            'grid': GDALGrid(friction[:, :, 0], sampledict),
            'label': r'Friction angle ($^\circ$)',
            'type': 'input',
            'description': {
                'units': 'degrees',
                'name': frictionsref,
                'longref': frictionlref
            }
        }
        if uncertfile is not None:
            maplayers['pgamin'] = {
                'grid': GDALGrid(PGAmin[:, :, 0], sampledict),
                'label': 'PGA - %1.2fstd (g)' % numstd,
                'type': 'input',
                'description': {
                    'units': 'g',
                    'shakemap': shakedetail
                }
            }
            maplayers['pgamax'] = {
                'grid': GDALGrid(PGAmax[:, :, 0], sampledict),
                'label': 'PGA + %1.2fstd (g)' % numstd,
                'type': 'input',
                'description': {
                    'units': 'g',
                    'shakemap': shakedetail
                }
            }
        if 'PGV' in displmodel:
            if uncertfile is not None:
                maplayers['pgvmin'] = {
                    'grid': GDALGrid(PGVmin[:, :, 0], sampledict),
                    'label': 'PGV - %1.2fstd (cm/s)' % numstd,
                    'type': 'input',
                    'description': {
                        'units': 'cm/s',
                        'shakemap': shakedetail
                    }
                }
                maplayers['pgvmax'] = {
                    'grid': GDALGrid(PGVmax[:, :, 0], sampledict),
                    'label': 'PGV + %1.2fstd (cm/s)' % numstd,
                    'type': 'input',
                    'description': {
                        'units': 'cm/s',
                        'shakemap': shakedetail
                    }
                }

    shutil.rmtree(tmpdir)

    return maplayers
Esempio n. 2
0
def computeParea(grid2D,
                 proj='moll',
                 probthresh=0.0,
                 shakefile=None,
                 shakethreshtype='pga',
                 shakethresh=0.0):
    """
    Alternative to Aggregate Hazard (Hagg), which is equal to the
    the sum of the area of grid cells that exceeds a given probability.

    Args:
        grid2D: grid2D object of model output.
        proj: projection to use to obtain equal area, 'moll'  mollweide, or
            'laea' lambert equal area.
        probthresh: Optional, Float or list of probability thresholds.
        shakefile: Optional, path to shakemap file to use for ground motion
            threshold.
        shakethreshtype: Optional, Type of ground motion to use for
            shakethresh, 'pga', 'pgv', or 'mmi'.
        shakethresh: Optional, Float of shaking thresholds in %g for
            pga, cm/s for pgv, float for mmi.

    Returns:
        Parea (float) if no or only one probthresh defined,
        otherwise, a list of floats of Parea corresponding to all
        specified probthresh values.
    """
    if type(probthresh) != list and type(probthresh) != np.ndarray:
        probthresh = [probthresh]

    Parea = []
    bounds = grid2D.getBounds()
    lat0 = np.mean((bounds[2], bounds[3]))
    lon0 = np.mean((bounds[0], bounds[1]))
    projs = ('+proj=%s +lat_0=%f +lon_0=%f +x_0=0 +y_0=0 +ellps=WGS84 '
             '+units=km +no_defs' % (proj, lat0, lon0))
    geodict = grid2D.getGeoDict()

    if shakefile is not None:
        if shakethresh < 0.:
            raise Exception('shaking threshold must be equal or greater '
                            'than zero')
        tmpdir = tempfile.mkdtemp()
        # resample shakemap to grid2D
        temp = ShakeGrid.load(shakefile)
        junkfile = os.path.join(tmpdir, 'temp.bil')
        GDALGrid.copyFromGrid(temp.getLayer(shakethreshtype)).save(junkfile)
        shk = quickcut(junkfile, geodict, precise=True, method='bilinear')
        shutil.rmtree(tmpdir)
        if shk.getGeoDict() != geodict:
            raise Exception('shakemap was not resampled to exactly the same '
                            'geodict as the model')

    grid = grid2D.project(projection=projs)
    geodictRS = grid.getGeoDict()
    cell_area_km2 = geodictRS.dx * geodictRS.dy
    model = grid.getData()
    model[np.isnan(model)] = -1.
    for probt in probthresh:
        if probt < 0.:
            raise Exception('probability threshold must be equal or greater '
                            'than zero')
        modcop = model.copy()
        if shakefile is not None:
            shkgrid = shk.project(projection=projs)
            shkdat = shkgrid.getData()
            # use -1 to avoid nan errors and warnings, will always be thrown
            # out because default probthresh is 0 and must be positive.
            shkdat[np.isnan(shkdat)] = -1.
            modcop[shkdat < shakethresh] = -1.
        one_mat = np.ones_like(modcop)
        Parea.append(np.sum(one_mat[modcop >= probt] * cell_area_km2))

    if len(Parea) == 1:
        Parea = Parea[0]
    return Parea
Esempio n. 3
0
def get_exposures(grid,
                  pop_file,
                  shakefile=None,
                  shakethreshtype=None,
                  shakethresh=None,
                  probthresh=None):
    """
    Get exposure-based statistics.

    Args:
        grid: Model grid.
        pop_file (str):  Path to the landscan population grid.
        shakefile (str): Optional, path to shakemap file to use for ground
            motion threshold.
        shakethreshtype(str): Optional, Type of ground motion to use for
            shakethresh, 'pga', 'pgv', or 'mmi'.
        shakethresh: Optional, Float or list of shaking thresholds in %g for
            pga, cm/s for pgv, float for mmi.
        probthresh: Optional, None or float, exclude any cells with probabilities
            less than or equal to this value

    Returns:
        dict: Dictionary with keys named exp_pop_# where # is the shakethresh
    """

    # If probthresh defined, zero out any areas less than or equal to probthresh
    # before proceeding

    if probthresh is not None:
        origdata = grid.getData()
        moddat = origdata.copy()
        moddat[moddat <= probthresh] = 0.0
        moddat[np.isnan(origdata)] = float('nan')
    else:
        moddat = grid.getData()

    mdict = grid.getGeoDict()

    # Cut out area from population file
    popcut = quickcut(pop_file,
                      mdict,
                      precise=False,
                      extrasamp=2.,
                      method='nearest')
    popdat = popcut.getData()
    pdict = popcut.getGeoDict()

    # Pad grid with nans to beyond extent of pdict
    pad_dict = {}
    pad_dict['padleft'] = int(
        np.abs(np.ceil((mdict.xmin - pdict.xmin) / mdict.dx)))
    pad_dict['padright'] = int(
        np.abs(np.ceil((pdict.xmax - mdict.xmax) / mdict.dx)))
    pad_dict['padbottom'] = int(
        np.abs(np.ceil((mdict.ymin - pdict.ymin) / mdict.dy)))
    pad_dict['padtop'] = int(
        np.abs(np.ceil((pdict.ymax - mdict.ymax) / mdict.dy)))
    padgrid, mdict2 = Grid2D.padGrid(moddat, mdict, pad_dict)  # padds with inf
    padgrid[np.isinf(padgrid)] = float('nan')  # change to pad with nan
    padgrid = Grid2D(data=padgrid, geodict=mdict2)  # Turn into grid2d object

    # Resample model grid so as to be the nearest integer multiple of popdict
    factor = np.round(pdict.dx / mdict2.dx)

    # Create geodictionary that is a factor of X higher res but otherwise
    # identical
    ndict = GeoDict.createDictFromBox(pdict.xmin, pdict.xmax, pdict.ymin,
                                      pdict.ymax, pdict.dx / factor,
                                      pdict.dy / factor)

    # Resample
    grid2 = padgrid.interpolate2(ndict, method='linear')

    # Get proportion of each cell that has values (to account properly
    # for any nans)
    prop = block_reduce(~np.isnan(grid2.getData().copy()),
                        block_size=(int(factor), int(factor)),
                        cval=float('nan'),
                        func=np.sum) / (factor**2.)

    # Now block reduce to same geodict as popfile
    modresamp = block_reduce(grid2.getData().copy(),
                             block_size=(int(factor), int(factor)),
                             cval=float('nan'),
                             func=np.nanmean)

    exp_pop = {}
    if shakefile is not None:
        # Resample shakefile to population grid
        # , doPadding=True, padValue=0.)
        shakemap = ShakeGrid.load(shakefile, resample=False)
        shakemap = shakemap.getLayer(shakethreshtype)
        shakemap = shakemap.interpolate2(pdict)
        shkdat = shakemap.getData()
        for shaket in shakethresh:
            threshmult = shkdat > shaket
            threshmult = threshmult.astype(float)
            exp_pop['exp_pop_%1.2fg' % (shaket / 100., )] = np.nansum(
                popdat * prop * modresamp * threshmult)

    else:
        exp_pop['exp_pop_0.00g'] = np.nansum(popdat * prop * modresamp)

    return exp_pop
Esempio n. 4
0
def computeHagg(grid2D,
                proj='moll',
                probthresh=0.0,
                shakefile=None,
                shakethreshtype='pga',
                shakethresh=0.0):
    """
    Computes the Aggregate Hazard (Hagg) which is equal to the
    probability * area of grid cell For models that compute areal coverage,
    this is equivalant to the total predicted area affected in km2.

    Args:
        grid2D: grid2D object of model output.
        proj: projection to use to obtain equal area, 'moll'  mollweide, or
            'laea' lambert equal area.
        probthresh: Probability threshold, any values less than this will not
            be included in aggregate hazard estimation.
        shakefile: Optional, path to shakemap file to use for ground motion
            threshold.
        shakethreshtype: Optional, Type of ground motion to use for
            shakethresh, 'pga', 'pgv', or 'mmi'.
        shakethresh: Optional, Float or list of shaking thresholds in %g for
            pga, cm/s for pgv, float for mmi.

    Returns: Aggregate hazard (float) if no shakethresh or only one shakethresh was defined,
        otherwise, a list of floats of aggregate hazard for all shakethresh values.
    """
    Hagg = []
    bounds = grid2D.getBounds()
    lat0 = np.mean((bounds[2], bounds[3]))
    lon0 = np.mean((bounds[0], bounds[1]))
    projs = ('+proj=%s +lat_0=%f +lon_0=%f +x_0=0 +y_0=0 +ellps=WGS84 '
             '+units=km +no_defs' % (proj, lat0, lon0))
    geodict = grid2D.getGeoDict()

    if shakefile is not None:
        if type(shakethresh) != list and type(shakethresh) != np.ndarray:
            shakethresh = [shakethresh]
        for shaket in shakethresh:
            if shaket < 0.:
                raise Exception('shaking threshold must be equal or greater '
                                'than zero')
        tmpdir = tempfile.mkdtemp()
        # resample shakemap to grid2D
        temp = ShakeGrid.load(shakefile)
        junkfile = os.path.join(tmpdir, 'temp.bil')
        GDALGrid.copyFromGrid(temp.getLayer(shakethreshtype)).save(junkfile)
        shk = quickcut(junkfile, geodict, precise=True, method='bilinear')
        shutil.rmtree(tmpdir)
        if shk.getGeoDict() != geodict:
            raise Exception('shakemap was not resampled to exactly the same '
                            'geodict as the model')

    if probthresh < 0.:
        raise Exception('probability threshold must be equal or greater '
                        'than zero')

    grid = grid2D.project(projection=projs, method='bilinear')
    geodictRS = grid.getGeoDict()
    cell_area_km2 = geodictRS.dx * geodictRS.dy
    model = grid.getData()
    model[np.isnan(model)] = -1.
    if shakefile is not None:
        for shaket in shakethresh:
            modcop = model.copy()
            shkgrid = shk.project(projection=projs)
            shkdat = shkgrid.getData()
            # use -1 to avoid nan errors and warnings, will always be thrown
            # out because default is 0.
            shkdat[np.isnan(shkdat)] = -1.
            modcop[shkdat < shaket] = -1.
            Hagg.append(np.sum(modcop[modcop >= probthresh] * cell_area_km2))
    else:
        Hagg.append(np.sum(model[model >= probthresh] * cell_area_km2))
    if len(Hagg) == 1:
        Hagg = Hagg[0]
    return Hagg
Esempio n. 5
0
def computePexp(grid, pop_file, shakefile=None, shakethreshtype='pga',
                shakethresh=0., probthresh=0., stdgrid2D=None,
                stdtype='full', maxP=1., sill1=None, range1=None):
    """
    Get exposure-based statistics.

    Args:
        grid: Model grid.
        pop_file (str):  Path to the landscan population grid.
        shakefile (str): Optional, path to shakemap file to use for ground
            motion threshold.
        shakethreshtype(str): Optional, Type of ground motion to use for
            shakethresh, 'pga', 'pgv', or 'mmi'.
        shakethresh: Float or list of shaking thresholds in %g for
            pga, cm/s for pgv, float for mmi.
        probthresh: Float, exclude any cells with
            probabilities less than or equal to this value
        stdgrid2D: grid2D object of model standard deviations (optional)
        stdtype (str): assumption of spatial correlation used to compute
            the stdev of the statistics, 'max', 'min', 'mean' of max and min,
            or 'full' (default) which estimates the range of correlation and
            accounts for covariance. Will return 'mean' if
            ridge and sill cannot be estimated.
        maxP (float): the maximum possible probability of the model
        sill1 (float): If known, the sill of the variogram of grid2D, will be
            estimated if None and stdtype='full'
        range1 (float): If known, the range of the variogram of grid2D, will
            be estimated if None and stdtype='full'

    Returns:
        dict: Dictionary with keys named exp_pop_# where # is the shakethresh
            and exp_std_# if stdgrid2D is supplied (stdev of exp_pop)
            and elim_#, the maximum exposure value possible with the
            applied thresholds and given maxP value
            p_exp_# beta distribution shape factor p (sometimes called alpha)
            q_exp_# beta distribution shape factor q (sometimes called beta)
    """

    model = grid.getData().copy()
    mdict = grid.getGeoDict()

    # Figure out difference in resolution of popfile to shakefile
    ptemp, J = GDALGrid.getFileGeoDict(pop_file)
    factor = ptemp.dx/mdict.dx

    # Cut out area from population file
    popcut1 = quickcut(pop_file, mdict, precise=False, extrasamp=2., method='nearest')
    #tot1 = np.sum(popcut1.getData())
    # Adjust for factor to prepare for upsampling to avoid creating new people
    popcut1.setData(popcut1.getData()/factor**2)

    # Upsample to mdict
    popcut = popcut1.interpolate2(mdict, method='nearest')
    popdat = popcut.getData()
    exp_pop = {}

    if shakefile is not None:
        if shakethresh < 0.:
            raise Exception('shaking threshold must be equal or greater '
                            'than zero')
        # resample shakemap to grid2D
        temp = ShakeGrid.load(shakefile)
        shk = temp.getLayer(shakethreshtype)
        shk = shk.interpolate2(mdict)
        if shk.getGeoDict() != mdict:
            raise Exception('shakemap was not resampled to exactly the same '
                            'geodict as the model')
        shkdat = shk.getData()
        model[shkdat < shakethresh] = float('nan')
    else:
        shakethresh = 0.
        shkdat = None

    mu = np.nansum(model[model >= probthresh] * popdat[model >= probthresh])
    exp_pop['exp_pop_%1.2fg' % (shakethresh/100.,)] = mu
    #N = np.nansum([model >= probthresh])
    #exp_pop['N_%1.2fg' % (shakethresh/100.,)] = N
    elim = np.nansum(popdat[model >= probthresh])*maxP
    exp_pop['elim_%1.2fg' % (shakethresh/100.,)] = elim

    if stdgrid2D is not None:
        std = stdgrid2D.getData().copy()
        if np.nanmax(std) > 0. and np.nanmax(model) >= probthresh:
            totalmin = np.sqrt(np.nansum((popdat[model >= probthresh]*std[model >= probthresh])**2.))
            totalmax = np.nansum(std[model >= probthresh] * popdat[model >= probthresh])
            if stdtype=='full':
                if sill1 is None or range1 is None:
                    modelfresh = grid.getData().copy()
                    range1, sill1 = semivario(modelfresh, probthresh,
                                              shakethresh=shakethresh,
                                              shakegrid=shkdat)
                if range1 is None:
                    # Use mean
                    exp_pop['exp_std_%1.2fg' % (shakethresh/100.,)] = (totalmax+totalmin)/2.
                else:
                    # Zero out std at cells where the model probability was below
                    # the threshold because we aren't including those cells in Hagg
                    stdz = std.copy()
                    stdz[model < probthresh] = 0.
                    svar1 = svar(stdz, range1, sill1, scale=popdat)
                    exp_pop['exp_std_%1.2fg' % (shakethresh/100.,)] = np.sqrt(svar1)
                    #exp_pop['exp_range_%1.2fg' % (shakethresh/100.,)] = range1
                    #exp_pop['exp_sill_%1.2fg' % (shakethresh/100.,)] = sill1

            elif stdtype == 'max':
                exp_pop['exp_std_%1.2fg' % (shakethresh/100.,)] = totalmax
            elif stdtype == 'min':
                exp_pop['exp_std_%1.2fg' % (shakethresh/100.,)] = totalmin
            else:
                exp_pop['exp_std_%1.2fg' % (shakethresh/100.,)] = (totalmax+totalmin)/2.
            # Beta distribution shape factors
            var = exp_pop['exp_std_%1.2fg' % (shakethresh/100.,)]**2.
            exp_pop['p_exp_%1.2fg' % (shakethresh/100.,)] = (mu/elim)*((elim*mu-mu**2)/var-1)
            exp_pop['q_exp_%1.2fg' % (shakethresh/100.,)] = (1-mu/elim)*((elim*mu-mu**2)/var-1)
        else:
            print('no std values above zero, filling with zeros')
            exp_pop['exp_std_%1.2fg' % (shakethresh/100.,)] = 0.
            exp_pop['p_exp_%1.2fg' % (shakethresh/100.,)] = 0.
            exp_pop['q_exp_%1.2fg' % (shakethresh/100.,)] = 0.
    else:
        exp_pop['exp_std_%1.2fg' % (shakethresh/100.,)] = 0.
        exp_pop['p_exp_%1.2fg' % (shakethresh/100.,)] = 0.
        exp_pop['q_exp_%1.2fg' % (shakethresh/100.,)] = 0.

    return exp_pop
Esempio n. 6
0
    def __init__(self,
                 shakefile,
                 config,
                 uncertfile=None,
                 saveinputs=False,
                 slopefile=None,
                 bounds=None,
                 slopemod=None,
                 trimfile=None):
        """
        Sets up the logistic model

        Args:
            shakefile (str): Path to shakemap grid.xml file for the event.
            config: configobj object defining the model and its inputs. Only
                one model should be described in each config file.
            uncertfile (str): Path to uncertainty.xml file.
            saveinputs (bool): Save input layers as Grid2D objects in addition
                to the model? If false (the default), it will just output the
                model.
            slopefile (str): Optional path to slopefile that will be resampled
                to the other input files for applying thresholds. OVERWRITES
                VALUE IN CONFIG.
            bounds (dict): Default of None uses ShakeMap boundaries, otherwise
                a dictionary of boundaries to cut to like

                .. code-block:: python

                    bounds = {
                        'xmin': lonmin, 'xmax': lonmax,
                        'ymin': latmin, 'ymax': latmax
                    }
            slopemod (str): How slope input should be modified to be in
                degrees: e.g., ``np.arctan(slope) * 180. / np.pi`` or
                ``slope/100.`` (note that this may be in the config file
                already).
            trimfile (str): shapefile of earth's landmasses to use to cut
                offshore areas.
        """
        mnames = getLogisticModelNames(config)
        if len(mnames) == 0:
            raise Exception('No config file found or problem with config '
                            'file format')
        if len(mnames) > 1:
            raise Exception('Config file contains more than one model which '
                            'is no longer allowed, update your config file '
                            'to the newer format')

        self.model = mnames[0]
        self.config = config
        cmodel = config[self.model]
        self.modeltype = cmodel['gfetype']
        self.coeffs = validateCoefficients(cmodel)
        # key = layer name, value = file name
        self.layers = validateLayers(cmodel)
        self.terms, timeField = validateTerms(cmodel, self.coeffs, self.layers)
        self.interpolations = validateInterpolations(cmodel, self.layers)
        self.units = validateUnits(cmodel, self.layers)
        self.gmused = [
            value for term, value in cmodel['terms'].items()
            if 'pga' in value.lower() or 'pgv' in value.lower()
            or 'mmi' in value.lower()
        ]
        self.modelrefs, self.longrefs, self.shortrefs = validateRefs(cmodel)
        #self.numstd = numstd
        self.clips = validateClips(cmodel, self.layers, self.gmused)
        self.notes = ''

        if cmodel['baselayer'] not in list(self.layers.keys()):
            raise Exception('You must specify a base layer corresponding to '
                            'one of the files in the layer section.')
        self.saveinputs = saveinputs
        if slopefile is None:
            try:
                self.slopefile = cmodel['slopefile']
            except:
                # print('Slopefile not specified in config, no slope '
                #      'thresholds will be applied\n')
                self.slopefile = None
        else:
            self.slopefile = slopefile
        if slopemod is None:
            try:
                self.slopemod = cmodel['slopemod']
            except:
                self.slopemod = None

        # See if trimfile exists
        if trimfile is not None:
            if not os.path.exists(trimfile):
                print('trimfile defined does not exist: %s\nOcean will not be '
                      'trimmed' % trimfile)
                self.trimfile = None
            elif os.path.splitext(trimfile)[1] != '.shp':
                print('trimfile must be a shapefile, ocean will not be '
                      'trimmed')
                self.trimfile = None
            else:
                self.trimfile = trimfile
        else:
            self.trimfile = None

        # Get month of event
        griddict, eventdict, specdict, fields, uncertainties = \
            getHeaderData(shakefile)
        MONTH = MONTHS[(eventdict['event_timestamp'].month) - 1]

        # Figure out how/if need to cut anything
        geodict = ShakeGrid.getFileGeoDict(shakefile, adjust='res')
        if bounds is not None:  # Make sure bounds are within ShakeMap Grid
            if geodict.xmin < geodict.xmax:  # only if signs are not opposite
                if (geodict.xmin > bounds['xmin']
                        or geodict.xmax < bounds['xmax']
                        or geodict.ymin > bounds['ymin']
                        or geodict.ymax < bounds['ymax']):
                    print('Specified bounds are outside shakemap area, using '
                          'ShakeMap bounds instead.')
                    bounds = None

        if bounds is not None:
            tempgdict = GeoDict.createDictFromBox(bounds['xmin'],
                                                  bounds['xmax'],
                                                  bounds['ymin'],
                                                  bounds['ymax'],
                                                  geodict.dx,
                                                  geodict.dy,
                                                  inside=False)
            # If Shakemap geodict crosses 180/-180 line, fix geodict so things don't break
            if geodict.xmin > geodict.xmax:
                if tempgdict.xmin < 0:
                    geodict._xmin -= 360.
                else:
                    geodict._xmax += 360.
            gdict = geodict.getBoundsWithin(tempgdict)
        else:
            gdict = geodict

        # Now find the layer that is our base layer and get the largest bounds
        # we can guarantee not to exceed shakemap bounds
        basefile = self.layers[cmodel['baselayer']]
        ftype = getFileType(basefile)
        if ftype == 'esri':
            basegeodict, firstcol = GDALGrid.getFileGeoDict(basefile)
            if basegeodict == gdict:
                sampledict = gdict
            else:
                sampledict = basegeodict.getBoundsWithin(gdict)
        elif ftype == 'gmt':
            basegeodict, firstcol = GMTGrid.getFileGeoDict(basefile)
            if basegeodict == gdict:
                sampledict = gdict
            else:
                sampledict = basegeodict.getBoundsWithin(gdict)
        else:
            raise Exception('All predictor variable grids must be a valid '
                            'GMT or ESRI file type.')

        # Do we need to subdivide baselayer?
        if 'divfactor' in self.config[self.model].keys():
            divfactor = float(self.config[self.model]['divfactor'])
            if divfactor != 1.:
                # adjust sampledict so everything will be resampled
                newxmin = sampledict.xmin - sampledict.dx / \
                    2. + sampledict.dx/(2.*divfactor)
                newymin = sampledict.ymin - sampledict.dy / \
                    2. + sampledict.dy/(2.*divfactor)
                newxmax = sampledict.xmax + sampledict.dx / \
                    2. - sampledict.dx/(2.*divfactor)
                newymax = sampledict.ymax + sampledict.dy / \
                    2. - sampledict.dy/(2.*divfactor)
                newdx = sampledict.dx / divfactor
                newdy = sampledict.dy / divfactor

                sampledict = GeoDict.createDictFromBox(newxmin,
                                                       newxmax,
                                                       newymin,
                                                       newymax,
                                                       newdx,
                                                       newdy,
                                                       inside=True)

        # Find slope thresholds, if applicable
        self.slopemin = 'none'
        self.slopemax = 'none'
        if self.slopefile is not None:
            try:
                self.slopemin = float(config[self.model]['slopemin'])
                self.slopemax = float(config[self.model]['slopemax'])
            except:
                print('Could not find slopemin and/or slopemax in config, '
                      'limits. No slope thresholds will be applied.')
                self.slopemin = 'none'
                self.slopemax = 'none'

        # Make temporary directory for hdf5 pytables file storage
        self.tempdir = tempfile.mkdtemp()

        # now load the shakemap, resampling and padding if necessary
        temp = ShakeGrid.load(shakefile)  # , adjust='res')
        self.shakedict = temp.getShakeDict()
        self.eventdict = temp.getEventDict()
        self.shakemap = {}

        # Read both PGA and PGV in, may need them for thresholds
        for gm in ['pga', 'pgv']:
            junkfile = os.path.join(self.tempdir, 'temp.bil')
            GDALGrid.copyFromGrid(temp.getLayer(gm)).save(junkfile)
            if gm in self.interpolations.keys():
                intermeth = self.interpolations[gm]
            else:
                intermeth = 'bilinear'
            junkgrid = quickcut(junkfile,
                                sampledict,
                                precise=True,
                                method=intermeth)
            if gm in self.clips:
                junkgrid.setData(
                    np.clip(junkgrid.getData(), self.clips[gm][0],
                            self.clips[gm][1]))
            self.shakemap[gm] = TempHdf(
                junkgrid, os.path.join(self.tempdir, '%s.hdf5' % gm))
            os.remove(junkfile)
        del (temp)

        # get updated geodict
        sampledict = junkgrid.getGeoDict()

        # take uncertainties into account, if available
        if uncertfile is not None:
            self.uncert = {}
            try:
                # Only read in the ones that will be needed
                temp = ShakeGrid.load(uncertfile)
                already = []
                for gm in self.gmused:
                    if 'pgv' in gm:
                        gmsimp = 'pgv'
                    elif 'pga' in gm:
                        gmsimp = 'pga'
                    elif 'mmi' in gm:
                        gmsimp = 'mmi'
                    if gmsimp in already:
                        continue
                    junkfile = os.path.join(self.tempdir, 'temp.bil')
                    GDALGrid.copyFromGrid(temp.getLayer('std%s' %
                                                        gmsimp)).save(junkfile)
                    if gmsimp in self.interpolations.keys():
                        intermeth = self.interpolations[gmsimp]
                    else:
                        intermeth = 'bilinear'
                    junkgrid = quickcut(junkfile,
                                        sampledict,
                                        precise=True,
                                        method=intermeth)
                    if gmsimp in self.clips:
                        junkgrid.setData(
                            np.clip(junkgrid.getData(), self.clips[gmsimp][0],
                                    self.clips[gmsimp][1]))
                    self.uncert['std' + gmsimp] = TempHdf(
                        junkgrid,
                        os.path.join(self.tempdir, 'std%s.hdf5' % gmsimp))
                    already.append(gmsimp)
                    os.remove(junkfile)
                del (temp)
            except:
                print('Could not read uncertainty file, ignoring '
                      'uncertainties')
                self.uncert = None
        else:
            self.uncert = None

        # Load the predictor layers, save as hdf5 temporary files, put file
        # locations into a dictionary.

        # Will be replaced in the next section if a slopefile was defined
        self.nonzero = None

        # key = layer name, value = grid object
        self.layerdict = {}

        didslope = False
        for layername, layerfile in self.layers.items():
            start = timer()
            if isinstance(layerfile, list):
                for lfile in layerfile:
                    if timeField == 'MONTH':
                        if lfile.find(MONTH) > -1:
                            layerfile = lfile
                            ftype = getFileType(layerfile)
                            interp = self.interpolations[layername]
                            temp = quickcut(layerfile,
                                            sampledict,
                                            precise=True,
                                            method=interp)
                            if layername in self.clips:
                                temp.setData(
                                    np.clip(temp.getData(),
                                            self.clips[layername][0],
                                            self.clips[layername][1]))
                            self.layerdict[layername] = TempHdf(
                                temp,
                                os.path.join(self.tempdir,
                                             '%s.hdf5' % layername))
                            del (temp)
            else:
                interp = self.interpolations[layername]
                temp = quickcut(layerfile,
                                sampledict,
                                precise=True,
                                method=interp)
                if layername in self.clips:
                    temp.setData(
                        np.clip(temp.getData(), self.clips[layername][0],
                                self.clips[layername][1]))
                if layername == 'rock':  # Convert unconsolidated sediments to a more reasonable coefficient
                    sub1 = temp.getData()
                    # Change to mixed sed rock coeff
                    sub1[sub1 <= -3.21] = -1.36
                    temp.setData(sub1)
                    self.notes += 'unconsolidated sediment coefficient changed\
                     to -1.36 (weaker) from -3.22 to better reflect that this \
                    unit is not actually strong\n'

                self.layerdict[layername] = TempHdf(
                    temp, os.path.join(self.tempdir, '%s.hdf5' % layername))
                td = temp.getGeoDict()
                if td != sampledict:
                    raise Exception(
                        'Geodictionaries of resampled files do not match')

                if layerfile == self.slopefile:
                    flag = 0
                    if self.slopemin == 'none' and self.slopemax == 'none':
                        flag = 1
                    if self.slopemod is None:
                        slope1 = temp.getData().astype(float)
                        slope = 0
                    else:
                        try:
                            slope = temp.getData().astype(float)
                            slope1 = eval(self.slopemod)
                        except:
                            print('slopemod provided not valid, continuing '
                                  'without slope thresholds.')
                            flag = 1
                    if flag == 0:
                        nonzero = np.array([(slope1 > self.slopemin) &
                                            (slope1 <= self.slopemax)])
                        self.nonzero = nonzero[0, :, :]
                        del (slope1)
                        del (slope)
                    else:
                        # Still remove areas where the slope equals exactly
                        # 0.0 to remove offshore liq areas.
                        nonzero = np.array([slope1 != 0.0])
                        self.nonzero = nonzero[0, :, :]
                        del (slope1)
                    didslope = True
                del (temp)

            print('Loading %s layer: %1.1f sec' % (layername, timer() - start))

        if didslope is False and self.slopefile is not None:
            # Slope didn't get read in yet
            temp = quickcut(self.slopefile,
                            sampledict,
                            precise=True,
                            method='bilinear')
            flag = 0
            if self.slopemin == 'none' and self.slopemax == 'none':
                flag = 1
            if self.slopemod is None:
                slope1 = temp.getData().astype(float)
                slope = 0
            else:
                try:
                    slope = temp.getData().astype(float)
                    slope1 = eval(self.slopemod)
                except:
                    print('slopemod provided not valid, continuing without '
                          'slope thresholds')
                    flag = 1
            if flag == 0:
                nonzero = np.array([
                    (slope1 > self.slopemin) & (slope1 <= self.slopemax)
                ])
                self.nonzero = nonzero[0, :, :]
                del (slope1)
                del (slope)
            else:
                # Still remove areas where the slope equals exactly
                # 0.0 to remove offshore liq areas.
                nonzero = np.array([slope1 != 0.0])
                self.nonzero = nonzero[0, :, :]
                del (slope1)

        self.nuggets = [str(self.coeffs['b0'])]

        ckeys = list(self.terms.keys())
        ckeys.sort()
        for key in ckeys:
            term = self.terms[key]
            coeff = self.coeffs[key]
            self.nuggets.append('(%g * %s)' % (coeff, term))

        self.equation = ' + '.join(self.nuggets)
        self.geodict = sampledict
Esempio n. 7
0
def get_exposures(grid,
                  pop_file,
                  shakefile=None,
                  shakethreshtype=None,
                  shakethresh=0.0,
                  probthresh=None,
                  stdgrid2D=None,
                  stdtype='mean',
                  maxP=1.):
    """
    Get exposure-based statistics.

    Args:
        grid: Model grid.
        pop_file (str):  Path to the landscan population grid.
        shakefile (str): Optional, path to shakemap file to use for ground
            motion threshold.
        shakethreshtype(str): Optional, Type of ground motion to use for
            shakethresh, 'pga', 'pgv', or 'mmi'.
        shakethresh: Optional, Float or list of shaking thresholds in %g for
            pga, cm/s for pgv, float for mmi.
        probthresh: Optional, None or float, exclude any cells with
            probabilities less than or equal to this value
        stdgrid2D: grid2D object of model standard deviations (optional)
        stdtype (str): assumption of spatial correlation used to compute
            the stdev of the statistics, 'max', 'min' or 'mean' of max and min

    Returns:
        dict: Dictionary with keys named exp_pop_# where # is the shakethresh
            and exp_std_# if stdgrid2D is supplied (stdev of exp_pop)
            and elim_#, the maximum exposure value possible with the
            applied thresholds and given maxP value
            p_exp_# beta distribution shape factor p (sometimes called alpha)
            q_exp_# beta distribution shape factor q (sometimes called beta)
    """

    # If probthresh defined, zero out any areas less than or equal to
    # probthresh before proceeding
    if type(shakethresh) != list and type(shakethresh) != np.ndarray:
        shakethresh = [shakethresh]
    if probthresh is not None:
        origdata = grid.getData()
        moddat = origdata.copy()
        moddat[moddat <= probthresh] = 0.0
        moddat[np.isnan(origdata)] = float('nan')
        if stdgrid2D is not None:
            stddat = stdgrid2D.getData().copy()
            stddat[moddat <= probthresh] = 0.0
            stddat[np.isnan(origdata)] = 0.0
    else:
        moddat = grid.getData().copy()
        if stdgrid2D is not None:
            stddat = stdgrid2D.getData().copy()

    mdict = grid.getGeoDict()

    # Cut out area from population file
    popcut = quickcut(pop_file,
                      mdict,
                      precise=False,
                      extrasamp=2.,
                      method='nearest')
    popdat = popcut.getData()
    pdict = popcut.getGeoDict()

    # Pad grid with nans to beyond extent of pdict
    pad_dict = {}
    pad_dict['padleft'] = int(
        np.abs(np.ceil((mdict.xmin - pdict.xmin) / mdict.dx)))
    pad_dict['padright'] = int(
        np.abs(np.ceil((pdict.xmax - mdict.xmax) / mdict.dx)))
    pad_dict['padbottom'] = int(
        np.abs(np.ceil((mdict.ymin - pdict.ymin) / mdict.dy)))
    pad_dict['padtop'] = int(
        np.abs(np.ceil((pdict.ymax - mdict.ymax) / mdict.dy)))

    padgrid, mdict2 = Grid2D.padGrid(moddat, mdict, pad_dict)  # padds with inf
    padgrid[np.isinf(padgrid)] = float('nan')  # change to pad with nan
    padgrid = Grid2D(data=padgrid, geodict=mdict2)  # Turn into grid2d object

    if stdgrid2D is not None:
        padstdgrid, mdict3 = Grid2D.padGrid(stddat, mdict,
                                            pad_dict)  # padds with inf
        padstdgrid[np.isinf(padstdgrid)] = float(
            'nan')  # change to pad with nan
        padstdgrid = Grid2D(data=padstdgrid,
                            geodict=mdict3)  # Turn into grid2d object

    # Resample model grid so as to be the nearest integer multiple of popdict
    factor = np.round(pdict.dx / mdict2.dx)

    # Create geodictionary that is a factor of X higher res but otherwise
    # identical
    ndict = GeoDict.createDictFromBox(pdict.xmin, pdict.xmax, pdict.ymin,
                                      pdict.ymax, pdict.dx / factor,
                                      pdict.dy / factor)

    # Resample
    grid2 = padgrid.interpolate2(ndict, method='linear')

    # Get proportion of each cell that has values (to account properly
    # for any nans)
    prop = block_reduce(~np.isnan(grid2.getData().copy()),
                        block_size=(int(factor), int(factor)),
                        cval=float('nan'),
                        func=np.sum) / (factor**2.)

    # Now block reduce to same geodict as popfile
    modresamp = block_reduce(grid2.getData().copy(),
                             block_size=(int(factor), int(factor)),
                             cval=float('nan'),
                             func=np.nanmean)

    if stdgrid2D is not None:
        grid2std = padstdgrid.interpolate2(ndict, method='linear')
        propstd = block_reduce(~np.isnan(grid2std.getData().copy()),
                               block_size=(int(factor), int(factor)),
                               cval=float('nan'),
                               func=np.sum) / (factor**2.)
        modresampstd = block_reduce(grid2std.getData().copy(),
                                    block_size=(int(factor), int(factor)),
                                    cval=float('nan'),
                                    func=np.nanmean)

    exp_pop = {}
    if shakefile is not None:
        # Resample shakefile to population grid
        # , doPadding=True, padValue=0.)
        shakemap = ShakeGrid.load(shakefile, resample=False)
        shakemap = shakemap.getLayer(shakethreshtype)
        shakemap = shakemap.interpolate2(pdict)
        shkdat = shakemap.getData()
        for shaket in shakethresh:
            threshmult = shkdat > shaket
            threshmult = threshmult.astype(float)
            mu = np.nansum(popdat * prop * modresamp * threshmult)
            exp_pop['exp_pop_%1.2fg' % (shaket / 100., )] = mu
            elim = maxP * np.nansum(popdat * prop * threshmult)
            exp_pop['elim_%1.2fg' % (shaket / 100., )] = elim
            if stdgrid2D is not None:
                totalmax = np.nansum(popdat * propstd * modresampstd *
                                     threshmult)
                totalmin = np.sqrt(
                    np.nansum(
                        (popdat * propstd * modresampstd * threshmult)**2.))
                if stdtype == 'max':
                    exp_pop['exp_std_%1.2fg' % (shaket / 100., )] = totalmax
                elif stdtype == 'min':
                    exp_pop['exp_std_%1.2fg' % (shaket / 100., )] = totalmin
                else:
                    exp_pop['exp_std_%1.2fg' %
                            (shaket / 100., )] = (totalmax + totalmin) / 2.
                # Beta distribution shape factors
                var = exp_pop['exp_std_%1.2fg' % (shaket / 100., )]**2.
                exp_pop['p_exp_%1.2fg' % (shaket / 100., )] = (mu / elim) * (
                    (elim * mu - mu**2) / var - 1)
                exp_pop['q_exp_%1.2fg' %
                        (shaket / 100., )] = (1 - mu / elim) * (
                            (elim * mu - mu**2) / var - 1)

    else:
        mu = np.nansum(popdat * prop * modresamp)
        exp_pop['exp_pop_0.00g'] = mu
        elim = maxP * np.nansum(popdat * prop)
        exp_pop['elim_0.00g'] = elim
        if stdgrid2D is not None:
            totalmax = np.nansum(popdat * propstd * modresampstd)
            totalmin = np.sqrt(np.nansum(
                (popdat * propstd * modresampstd)**2.))
            if stdtype == 'max':
                exp_pop['exp_std_0.00g'] = totalmax
            elif stdtype == 'min':
                exp_pop['exp_std_0.00g'] = totalmin
            else:
                exp_pop['exp_std_0.00g'] = (totalmax + totalmin) / 2.
            # Beta distribution shape factors
            var = exp_pop['exp_std_0.00g']**2.
            exp_pop['exp_std_0.00g'] = (mu / elim) * (
                (elim * mu - mu**2) / var - 1)
            exp_pop['exp_std_0.00g'] = (1 - mu / elim) * (
                (elim * mu - mu**2) / var - 1)
            #exp_pop['exp_std_0.00g'] = np.nansum(popdat * propstd * modresampstd)

    return exp_pop
Esempio n. 8
0
def computeHagg(grid2D,
                proj='moll',
                probthresh=0.0,
                shakefile=None,
                shakethreshtype='pga',
                shakethresh=0.0,
                stdgrid2D=None,
                stdtype='mean',
                maxP=1.):
    """
    Computes the Aggregate Hazard (Hagg) which is equal to the
    probability * area of grid cell For models that compute areal coverage,
    this is equivalant to the total predicted area affected in km2.

    Args:
        grid2D: grid2D object of model output.
        proj: projection to use to obtain equal area, 'moll'  mollweide, or
            'laea' lambert equal area.
        probthresh: Probability threshold, any values less than this will not
            be included in aggregate hazard estimation.
        shakefile: Optional, path to shakemap file to use for ground motion
            threshold.
        shakethreshtype: Optional, Type of ground motion to use for
            shakethresh, 'pga', 'pgv', or 'mmi'.
        shakethresh: Optional, Float or list of shaking thresholds in %g for
            pga, cm/s for pgv, float for mmi.
        stdgrid2D: grid2D object of model standard deviations (optional)
        stdtype (str): assumption of spatial correlation used to compute
            the stdev of the statistics, 'max', 'min' or 'mean' of max and min
        maxP (float): the maximum possible probability of the model

    Returns:
        dict: Dictionary with keys:
            hagg_#g where # is the shakethresh
            std_# if stdgrid2D is supplied (stdev of exp_pop)
            hlim_#, the maximum exposure value possible with the
            applied thresholds and given maxP value
            N_# the number of cells exceeding that value (in projected coords)
            cell_area_km2 grid cell area
            p_hagg_# beta distribution shape factor p (sometimes called alpha)
            q_hagg_# beta distribution shape factor q (sometimes called beta)
    """
    bounds = grid2D.getBounds()
    lat0 = np.mean((bounds[2], bounds[3]))
    lon0 = np.mean((bounds[0], bounds[1]))
    projs = ('+proj=%s +lat_0=%f +lon_0=%f +x_0=0 +y_0=0 +ellps=WGS84 '
             '+units=km +no_defs' % (proj, lat0, lon0))
    geodict = grid2D.getGeoDict()

    if shakefile is not None:
        if type(shakethresh) != list and type(shakethresh) != np.ndarray:
            shakethresh = [shakethresh]
        for shaket in shakethresh:
            if shaket < 0.:
                raise Exception('shaking threshold must be equal or greater '
                                'than zero')
        tmpdir = tempfile.mkdtemp()
        # resample shakemap to grid2D
        temp = ShakeGrid.load(shakefile)
        junkfile = os.path.join(tmpdir, 'temp.bil')
        GDALGrid.copyFromGrid(temp.getLayer(shakethreshtype)).save(junkfile)
        shk = quickcut(junkfile, geodict, precise=True, method='bilinear')
        shutil.rmtree(tmpdir)
        if shk.getGeoDict() != geodict:
            raise Exception('shakemap was not resampled to exactly the same '
                            'geodict as the model')

    if probthresh < 0.:
        raise Exception('probability threshold must be equal or greater '
                        'than zero')

    grid = grid2D.project(projection=projs, method='bilinear')
    geodictRS = grid.getGeoDict()
    cell_area_km2 = geodictRS.dx * geodictRS.dy
    model = grid.getData().copy()
    if stdgrid2D is not None:
        stdgrid = stdgrid2D.project(projection=projs, method='bilinear')
        std = stdgrid.getData().copy()
        std[np.isnan(model)] = -1.

    Hagg = {}
    model[np.isnan(model)] = -1.
    if shakefile is not None:
        shkgrid = shk.project(projection=projs)
        shkdat = shkgrid.getData()
        for shaket in shakethresh:
            # use -1 to avoid nan errors and warnings, will always be thrown
            # out because default probthresh is 0.
            model[np.isnan(shkdat)] = -1.
            model[shkdat < shaket] = -1.
            mu = np.sum(model[model >= probthresh] * cell_area_km2)
            Hagg['hagg_%1.2fg' % (shaket / 100., )] = mu
            Hagg['cell_area_km2'] = cell_area_km2
            N = np.sum([model >= probthresh])
            Hagg['N_%1.2fg' % (shaket / 100., )] = N
            hlim = cell_area_km2 * N * maxP
            Hagg['hlim_%1.2fg' % (shaket / 100., )] = hlim
            if stdgrid2D is not None:
                totalmin = cell_area_km2 * np.sqrt(
                    np.nansum((std[model >= probthresh])**2.))
                totalmax = np.nansum(std[model >= probthresh] * cell_area_km2)
                if stdtype == 'max':
                    Hagg['hagg_std_%1.2fg' % (shaket / 100., )] = totalmax
                elif stdtype == 'min':
                    Hagg['hagg_std_%1.2fg' % (shaket / 100., )] = totalmin
                else:
                    Hagg['hagg_std_%1.2fg' %
                         (shaket / 100., )] = (totalmax + totalmin) / 2.
                var = Hagg['hagg_std_%1.2fg' % (shaket / 100., )]**2.
                # Beta distribution shape factors
                Hagg['p_hagg_%1.2fg' % (shaket / 100., )] = (mu / hlim) * (
                    (hlim * mu - mu**2) / var - 1)
                Hagg['q_hagg_%1.2fg' % (shaket / 100., )] = (1 - mu / hlim) * (
                    (hlim * mu - mu**2) / var - 1)
    else:
        mu = np.sum(model[model >= probthresh] * cell_area_km2)
        Hagg['hagg_0.00g'] = mu
        Hagg['cell_area_km2'] = cell_area_km2
        N = np.sum([model >= probthresh])
        Hagg['N_0.00g'] = N
        hlim = cell_area_km2 * N * maxP
        Hagg['hlim_0.00g'] = hlim
        if stdgrid2D is not None:
            totalmax = np.nansum(std[model >= probthresh] * cell_area_km2)
            totalmin = cell_area_km2 * np.sqrt(
                np.nansum((std[model >= probthresh])**2.))
            if stdtype == 'max':
                Hagg['hagg_std_0.00g'] = totalmax
            elif stdtype == 'min':
                Hagg['hagg_std_0.00g'] = totalmin
            else:
                Hagg['std_0.00g'] = (totalmax + totalmin) / 2.

            var = Hagg['hagg_std_0.00g']
            # Beta distribution shape factors
            Hagg['p_hagg_0.00g'] = (mu / hlim) * (
                (hlim * mu - mu**2) / var - 1)
            Hagg['q_hagg_0.00g'] = (1 - mu / hlim) * (
                (hlim * mu - mu**2) / var - 1)

    return Hagg