def test_zhu2015():
    conf_file = os.path.join(upone, 'defaultconfigfiles', 'models',
                             'zhu_2015.ini')
    conf = ConfigObj(conf_file)
    data_path = os.path.join(datadir, 'loma_prieta', 'model_inputs')
    # Check slopefile trimming
    conf['zhu_2015']['slopefile'] = 'global_gted_maxslope_30c.flt'
    conf = correct_config_filepaths(data_path, conf)
    # Run with divfactor of 1
    conf['zhu_2015']['divfactor'] = '1.'

    shakefile = os.path.join(datadir, 'loma_prieta', 'grid.xml')
    lm = LM.LogisticModel(shakefile, conf, saveinputs=True)
    maplayers = lm.calculate()

    pgrid = maplayers['model']['grid']
    test_data = pgrid.getData()

    if changetarget:
        # To change target data:
        pgrd = GMTGrid(pgrid.getData(), pgrid.getGeoDict())
        pgrd.save(
            os.path.join(datadir, 'loma_prieta', 'targets', 'zhu2015.grd'))

    # Load target
    target_file = os.path.join(datadir, 'loma_prieta', 'targets',
                               'zhu2015.grd')
    target_grid = GMTGrid.load(target_file)
    target_data = target_grid.getData()
    # Assert
    np.testing.assert_allclose(target_data, test_data, rtol=1e-3)
Exemple #2
0
    def getSlabInfo(self, lat, lon):
        """Return a dictionary with depth,dip,strike, and depth uncertainty.

        Args:
            lat (float):  Hypocentral latitude in decimal degrees.
            lon (float):  Hypocentral longitude in decimal degrees.
        Returns:
            dict: Dictionary containing keys:
                - region Three letter Slab model region code.
                - strike Slab model strike angle.
                - dip Slab model dip angle.
                - depth Slab model depth (km).
                - depth_uncertainty Slab model depth uncertainty.
        """
        slabinfo = {}
        if not self.contains(lat, lon):
            return slabinfo
        fpath, fname = os.path.split(self._depth_file)
        parts = fname.split('_')
        region = parts[0]
        depth_grid = GMTGrid.load(self._depth_file)
        # slab grids are negative depth
        depth = -1 * depth_grid.getValue(lat, lon)
        dip_grid = GMTGrid.load(self._dip_file)
        strike_grid = GMTGrid.load(self._strike_file)
        if self._error_file is not None:
            error_grid = GMTGrid.load(self._error_file)
            error = error_grid.getValue(lat, lon)
        else:
            error = DEFAULT_DEPTH_ERROR

        # Slab 2.0 dip directions are positive, 1.0 is negative
        dip = dip_grid.getValue(lat, lon)
        if dip < 0:
            dip = dip * -1
        strike = strike_grid.getValue(lat, lon)
        strike = strike
        if strike < 0:
            strike += 360

        if np.isnan(strike):
            error = np.nan

        # get the maximum interface depth from table (if present)
        if self._slab_table is not None:
            df = self._slab_table
            max_int_depth = df[df['zone'] ==
                               region].iloc[0]['interface_max_depth']
        else:
            max_int_depth = MAX_INTERFACE_DEPTH

        slabinfo = {
            'region': region,
            'strike': strike,
            'dip': dip,
            'depth': depth,
            'maximum_interface_depth': max_int_depth,
            'depth_uncertainty': error
        }
        return slabinfo
def test_godt_2008():
    conf_file = os.path.join(upone, 'defaultconfigfiles', 'models',
                             'godt_2008.ini')
    conf = ConfigObj(conf_file)
    conf['godt_2008']['divfactor'] = '1.'
    data_path = os.path.join(datadir, 'loma_prieta', 'model_inputs')
    conf = correct_config_filepaths(data_path, conf)
    shakefile = os.path.join(datadir, 'loma_prieta', 'grid.xml')
    maplayers = godt2008(shakefile, conf)

    pgrid = maplayers['model']['grid']
    test_data = pgrid.getData()

    if changetarget:
        # To change target data:
        pgrd = GMTGrid(pgrid.getData(), pgrid.getGeoDict())
        pgrd.save(
            os.path.join(datadir, 'loma_prieta', 'targets', 'godt_2008.grd'))

    # Load target
    target_file = os.path.join(datadir, 'loma_prieta', 'targets',
                               'godt_2008.grd')
    target_grid = GMTGrid.load(target_file)
    target_data = target_grid.getData()

    # Assert
    np.testing.assert_allclose(target_data, test_data, rtol=1e-3)
Exemple #4
0
def sampleGridFile(gridfile, xypoints, method='nearest'):
    """Sample grid file (ESRI or GMT format) at each of a set of XY (decimal degrees) points.

    :param gridfile:
      Name of ESRI or GMT grid format file from which to sample values.
    :param xypoints:
      2D numpy array of XY points, decimal degrees.
    :param method:
      Interpolation method, either 'nearest' or 'linear'.
    :returns:
      1D numpy array of grid values at each of input XY points.
    """
    xmin = np.min(xypoints[:, 0])
    xmax = np.max(xypoints[:, 0])
    ymin = np.min(xypoints[:, 1])
    ymax = np.max(xypoints[:, 1])
    gridtype = None
    try:
        fdict = GMTGrid.getFileGeoDict(gridfile)
        gridtype = 'gmt'
    except Exception as error:
        try:
            fdict = GDALGrid.getFileGeoDict(gridfile)
            gridtype = 'esri'
        except:
            pass
    if gridtype is None:
        raise Exception(
            'File "%s" does not appear to be either a GMT grid or an ESRI grid.'
            % gridfile)
    xmin = xmin - fdict.dx * 3
    xmax = xmax + fdict.dx * 3
    ymin = ymin - fdict.dy * 3
    ymax = ymax + fdict.dy * 3
    #bounds = (xmin, xmax, ymin, ymax)
    if gridtype == 'gmt':
        fgeodict = GMTGrid.getFileGeoDict(gridfile)
    else:
        fgeodict = GDALGrid.getFileGeoDict(gridfile)
    dx, dy = (fgeodict.dx, fgeodict.dy)
    sdict = GeoDict.createDictFromBox(xmin, xmax, ymin, ymax, dx, dy)
    if gridtype == 'gmt':
        grid = GMTGrid.load(gridfile,
                            samplegeodict=sdict,
                            resample=False,
                            method=method,
                            doPadding=True)
    else:
        grid = GDALGrid.load(gridfile,
                             samplegeodict=sdict,
                             resample=False,
                             method=method,
                             doPadding=True)

    return sampleFromGrid(grid, xypoints)
Exemple #5
0
def grdcmp(x, y, rtol=1e-6, atol=0):
    """
    Compare contents of two GMT GRD files using numpy assert method.

    Args:
        x: Path to a GRD file.
        y: Another path to a GRD file.

    """
    xgrid = GMTGrid.load(x)
    xdata = xgrid.getData()
    ygrid = GMTGrid.load(y)
    ydata = ygrid.getData()
    np.testing.assert_allclose(xdata, ydata, rtol=rtol, atol=atol)
Exemple #6
0
def getFileType(filename):
    """
    Determine whether input file is a shapefile or a grid (ESRI or GMT).

    :param filename:
      String path to candidate filename.
    :returns:
      String, one of 'shapefile','grid','unknown'.
    """

    fname, fext = os.path.splitext(filename)
    dbf = fname + '.dbf'
    ftype = 'unknown'
    if os.path.isfile(dbf):
        ftype = 'shapefile'
    else:
        try:
            fdict = GMTGrid.getFileGeoDict(filename)
            ftype = 'grid'
        except Exception as error:
            try:
                fdict = GDALGrid.getFileGeoDict(filename)
                ftype = 'grid'
            except:
                pass
    return ftype
Exemple #7
0
def sampleGridFile(gridfile,xypoints,method='nearest'):
    """
    Sample grid file (ESRI or GMT format) at each of a set of XY (decimal degrees) points.
    :param gridfile:
      Name of ESRI or GMT grid format file from which to sample values.
    :param xypoints:
      2D numpy array of XY points, decimal degrees.
    :param method:
      Interpolation method, either 'nearest' or 'linear'.
    :returns:
      1D numpy array of grid values at each of input XY points.
    """
    xmin = np.min(xypoints[:,0])
    xmax = np.max(xypoints[:,0])
    ymin = np.min(xypoints[:,1])
    ymax = np.max(xypoints[:,1])
    gridtype = None
    try:
        fdict = GMTGrid.getFileGeoDict(gridfile)
        gridtype = 'gmt'
    except Exception,error:
        try:
            fdict = GDALGrid.getFileGeoDict(gridfile)
            gridtype = 'esri'
        except:
            pass
Exemple #8
0
def _load(vs30File,
          samplegeodict=None,
          resample=False,
          method='linear',
          doPadding=False,
          padValue=np.nan):
    try:
        vs30grid = GMTGrid.load(vs30File,
                                samplegeodict=samplegeodict,
                                resample=resample,
                                method=method,
                                doPadding=doPadding,
                                padValue=padValue)
    except Exception as msg1:
        try:
            vs30grid = GDALGrid.load(vs30File,
                                     samplegeodict=samplegeodict,
                                     resample=resample,
                                     method=method,
                                     doPadding=doPadding,
                                     padValue=padValue)
        except Exception as msg2:
            msg = 'Load failure of %s - error messages: "%s"\n "%s"' % (
                vs30File, str(msg1), str(msg2))
            raise ShakeMapException(msg)

    if vs30grid.getData().dtype != np.float64:
        vs30grid.setData(vs30grid.getData().astype(np.float64))

    return vs30grid
Exemple #9
0
def getFileType(filename):
    """
    Determine whether input file is a shapefile or a grid (ESRI or GMT).

    :param filename:
      String path to candidate filename.
    :returns:
      String, one of 'shapefile','grid','unknown'.
    """

    fname, fext = os.path.splitext(filename)
    dbf = fname + '.dbf'
    ftype = 'unknown'
    if os.path.isfile(dbf):
        ftype = 'shapefile'
    else:
        try:
            fdict = GMTGrid.getFileGeoDict(filename)
            ftype = 'grid'
        except Exception as error:
            try:
                fdict = GDALGrid.getFileGeoDict(filename)
                ftype = 'grid'
            except:
                pass
    return ftype
Exemple #10
0
def getFileType(filename):
    """
    Determine whether input file is a shapefile or a grid (ESRI or GMT).

    Args:
        filename (str): Path to candidate filename.

    Returns:
        str: 'shapefile', 'grid', or 'unknown'.
    """
    # TODO MOVE TO MAPIO.
    if os.path.isdir(filename):
        return 'dir'
    ftype = GMTGrid.getFileType(filename)
    if ftype != 'unknown':
        return 'gmt'
    # Skip over ESRI header files
    if filename.endswith('.hdr'):
        return 'unknown'
    try:
        GDALGrid.getFileGeoDict(filename)
        return 'esri'
    except:
        pass
    return 'unknown'
Exemple #11
0
    def _load(vs30File, samplegeodict=None, resample=False, method='linear',
              doPadding=False, padValue=np.nan):
        try:
            vs30grid = GMTGrid.load(vs30File,
                                    samplegeodict=samplegeodict,
                                    resample=resample,
                                    method=method,
                                    doPadding=doPadding,
                                    padValue=padValue)
        except Exception as msg1:
            try:
                vs30grid = GDALGrid.load(vs30File,
                                         samplegeodict=samplegeodict,
                                         resample=resample,
                                         method=method,
                                         doPadding=doPadding,
                                         padValue=padValue)
            except Exception as msg2:
                msg = 'Load failure of %s - error messages: "%s"\n "%s"' % (
                    vs30File, str(msg1), str(msg2))
                raise ShakeLibException(msg)

        if vs30grid.getData().dtype != np.float64:
            vs30grid.setData(vs30grid.getData().astype(np.float64))

        return vs30grid
Exemple #12
0
def test_mapmaker_intensity():
    homedir = os.path.dirname(
        os.path.abspath(__file__))  # where is this script?
    shakedir = os.path.abspath(os.path.join(homedir, '..', '..', '..'))
    out_file = os.path.join(shakedir, 'tests', 'data', 'containers',
                            'northridge', 'shake_result.hdf')
    container = ShakeMapOutputContainer.load(out_file)
    topofile = os.path.join(homedir, '..', '..', 'data', 'install', 'data',
                            'mapping', 'CA_topo.grd')

    info = container.getMetadata()
    xmin = info['output']['map_information']['min']['longitude']
    xmax = info['output']['map_information']['max']['longitude']
    ymin = info['output']['map_information']['min']['latitude']
    ymax = info['output']['map_information']['max']['latitude']
    xmin = float(xmin) - 0.1
    xmax = float(xmax) + 0.1
    ymin = float(ymin) - 0.1
    ymax = float(ymax) + 0.1
    dy = float(info['output']['map_information']['grid_spacing']['latitude'])
    dx = float(info['output']['map_information']['grid_spacing']['longitude'])
    sampledict = GeoDict.createDictFromBox(xmin, xmax, ymin, ymax, dx, dy)
    topogrid = GMTGrid.load(topofile, samplegeodict=sampledict, resample=False)

    outpath = mkdtemp()

    model_config = container.getConfig()
    comp = container.getComponents('MMI')[0]
    textfile = os.path.join(get_data_path(), 'mapping', 'map_strings.en')
    text_dict = get_text_strings(textfile)

    cities = Cities.fromDefault()
    d = {
        'imtype': 'MMI',
        'topogrid': topogrid,
        'allcities': cities,
        'states_provinces': None,
        'countries': None,
        'oceans': None,
        'lakes': None,
        'roads': None,
        'faults': None,
        'datadir': outpath,
        'operator': 'NEIC',
        'filter_size': 10,
        'info': info,
        'component': comp,
        'imtdict': container.getIMTGrids('MMI', comp),
        'ruptdict': copy.deepcopy(container.getRuptureDict()),
        'stationdict': container.getStationDict(),
        'config': model_config,
        'tdict': text_dict
    }

    try:
        fig1, fig2 = draw_map(d)
    except Exception:
        assert 1 == 2
    finally:
        shutil.rmtree(outpath)
Exemple #13
0
def getNoDataGrid(predictors,xmin,xmax,ymin,ymax):
    txmin = xmin
    txmax = xmax
    tymin = ymin
    tymax = ymax
    mindx = 9999999999
    mindy = 9999999999
    #figure out bounds enclosing all files
    for predname,predfile in predictors.items():
        if not os.path.isfile(predfile):
            continue
        ftype = getFileType(predfile)
        if ftype == 'shapefile':
            f = fiona.open(predfile,'r')
            bxmin,bymin,bxmax,bymax = f.bounds
            f.close()
            if bxmin < txmin:
                txmin = bxmin
            if bxmax > txmax:
                txmax = bxmax
            if bymin < tymin:
                tymin = bymin
            if bymax > tymax:
                tymax = bymax
        elif ftype == 'grid':
            gridtype = getGridType(predfile)
            if gridtype is None:
                raise Exception('File "%s" does not appear to be either a GMT grid or an ESRI grid.' % gridfile)
            fdict = getFileGeoDict(predfile,gridtype)
            if fdict.dx < mindx:
                mindx = fdict.dx
            if fdict.dy < mindy:
                mindy = fdict.dy
            if fdict.xmin < txmin:
                txmin = fdict.xmin
            if fdict.xmax > txmax:
                txmax = txmax
            if fdict.ymin < tymin:
                tymin = tymin
            if fdict.ymax > tymax:
                tymax = tymax
    sdict = GeoDict.createDictFromBox(txmin,txmax,tymin,tymax,mindx,mindy)
    nanarray = np.zeros((sdict.ny,sdict.nx),dtype=np.int8)
    for predname,predfile in predictors.items():
        if not os.path.isfile(predfile):
            continue
        ftype = getFileType(predfile)
        if ftype == 'shapefile':
            shapes = list(fiona.open(predfile,'r'))
            grid = Grid2D.rasterizeFromGeometry(shapes,sdict)
        else:
            gridtype = getGridType(predfile)
            if gridtype == 'gmt':
                grid = GMTGrid.load(predfile,samplegeodict=sdict,resample=True,method='nearest',doPadding=True)
            else:
                grid = GDALGrid.load(predfile,samplegeodict=sdict,resample=True,method='nearest',doPadding=True)
        nangrid = np.isnan(grid.getData())
        nanarray = nanarray | nangrid
    nangrid = Grid2D(data=nanarray,geodict=sdict)
    return nangrid
Exemple #14
0
def sampleGridFile(gridfile,xypoints,method='nearest'):
    """
    Sample grid file (ESRI or GMT format) at each of a set of XY (decimal degrees) points.
    :param gridfile:
      Name of ESRI or GMT grid format file from which to sample values.
    :param xypoints:
      2D numpy array of XY points, decimal degrees.
    :param method:
      Interpolation method, either 'nearest' or 'linear'.
    :returns:
      1D numpy array of grid values at each of input XY points.
    """
    if not len(xypoints):
        return np.array([])
    xmin = np.min(xypoints[:,0])
    xmax = np.max(xypoints[:,0])
    ymin = np.min(xypoints[:,1])
    ymax = np.max(xypoints[:,1])
    gridtype = None
    try:
        fdict,tmp = GMTGrid.getFileGeoDict(gridfile)
        gridtype = 'gmt'
    except Exception as error:
        try:
            fdict,tmp = GDALGrid.getFileGeoDict(gridfile)
            gridtype = 'esri'
        except:
            pass
    if gridtype is None:
        raise Exception('File "%s" does not appear to be either a GMT grid or an ESRI grid.' % gridfile)
    xmin = xmin - fdict.dx*3
    xmax = xmax + fdict.dx*3
    ymin = ymin - fdict.dy*3
    ymax = ymax + fdict.dy*3
    bounds = (xmin,xmax,ymin,ymax)
    if gridtype == 'gmt':
        fgeodict,tmp = GMTGrid.getFileGeoDict(gridfile)
    else:
        fgeodict,tmp = GDALGrid.getFileGeoDict(gridfile)
    dx,dy = (fgeodict.dx,fgeodict.dy)
    sdict = GeoDict.createDictFromBox(xmin,xmax,ymin,ymax,dx,dy)
    if gridtype == 'gmt':
        grid = GMTGrid.load(gridfile,samplegeodict=sdict,resample=True,method=method,doPadding=True)
    else:
        grid = GDALGrid.load(gridfile,samplegeodict=sdict,resample=True,method=method,doPadding=True)

    return sampleFromGrid(grid,xypoints)
def test_jessee_2018():
    conf_file = os.path.join(upone, 'defaultconfigfiles', 'models',
                             'jessee_2018.ini')
    conf = ConfigObj(conf_file)
    data_path = os.path.join(datadir, 'loma_prieta', 'model_inputs')
    conf = correct_config_filepaths(data_path, conf)
    shakefile = os.path.join(datadir, 'loma_prieta', 'grid.xml')
    undertainty_file = os.path.join(datadir, 'loma_prieta', 'uncertainty.xml')

    lm = LM.LogisticModel(shakefile,
                          conf,
                          saveinputs=True,
                          uncertfile=undertainty_file)
    maplayers = lm.calculate()

    pgrid = maplayers['model']['grid']
    stdgrid = maplayers['std']['grid']
    test_data = pgrid.getData()
    test_data_std = stdgrid.getData()

    if changetarget:
        # To change target data:
        pgrd = GMTGrid(pgrid.getData(), pgrid.getGeoDict())
        pgrd.save(
            os.path.join(datadir, 'loma_prieta', 'targets', 'jessee_2018.grd'))
        stdgrd = GMTGrid(stdgrid.getData(), stdgrid.getGeoDict())
        stdgrd.save(
            os.path.join(datadir, 'loma_prieta', 'targets',
                         'jessee_2018_std.grd'))

    # Load target
    target_file = os.path.join(datadir, 'loma_prieta', 'targets',
                               'jessee_2018.grd')
    target_grid = GMTGrid.load(target_file)
    target_data = target_grid.getData()
    std_file = os.path.join(datadir, 'loma_prieta', 'targets',
                            'jessee_2018_std.grd')
    target_grid_std = GMTGrid.load(std_file)
    target_data_std = target_grid_std.getData()

    # Assert
    np.testing.assert_allclose(target_data, test_data, rtol=1e-3)
    np.testing.assert_allclose(target_data_std, test_data_std, rtol=1e-3)
Exemple #16
0
def _getFileGeoDict(fname):
    geodict = None
    try:
        geodict = GMTGrid.getFileGeoDict(fname)
    except Exception as msg1:
        try:
            geodict = GDALGrid.getFileGeoDict(fname)
        except Exception as msg2:
            msg = 'File geodict failure with %s - error messages: "%s"\n "%s"' % (fname,str(msg1),str(msg2))
            raise ShakeMapException(msg)
    return geodict
Exemple #17
0
def _getFileGeoDict(fname):
    geodict = None
    try:
        geodict = GMTGrid.getFileGeoDict(fname)
    except Exception as msg1:
        try:
            geodict = GDALGrid.getFileGeoDict(fname)
        except Exception as msg2:
            msg = 'File geodict failure with %s - error messages: "%s"\n "%s"' % (
                fname, str(msg1), str(msg2))
            raise ShakeMapException(msg)
    return geodict
Exemple #18
0
def getGridType(gridfile):
    gridtype = None
    try:
        fdict = GMTGrid.getFileGeoDict(gridfile)
        gridtype = 'gmt'
    except Exception as error:
        try:
            fdict = GDALGrid.getFileGeoDict(gridfile)
            gridtype = 'esri'
        except:
            pass
    return gridtype
Exemple #19
0
def get_file_type(file):
    """Internal method to figure out which file type (GMT or GDAL) the population/country code 
    grid files are.
    """
    if GMTGrid.getFileType(file) == 'unknown':
        try:
            gdict = GDALGrid.getFileGeoDict(file)
            return GDALGrid
        except:
            pass
    else:
        return GMTGrid
    return None
def getFileType(filename):
    if os.path.isdir(filename):
        return 'dir'
    ftype = GMTGrid.getFileType(filename)
    if ftype != 'unknown':
        return 'gmt'
    #skip over ESRI header files
    if filename.endswith('.hdr'):
        return 'unknown'
    try:
        GDALGrid.getFileGeoDict(filename)
        return 'esri'
    except:
        pass
    return 'unknown'
Exemple #21
0
def test_mapmaker_contour():
    homedir = os.path.dirname(os.path.abspath(
        __file__))  # where is this script?
    shakedir = os.path.abspath(os.path.join(homedir, '..', '..', '..'))
    out_file = os.path.join(shakedir, 'tests', 'data',
                            'containers', 'northridge',
                            'shake_result.hdf')
    container = ShakeMapOutputContainer.load(out_file)
    topofile = os.path.join(homedir, '..', '..', 'data', 'install', 'data',
                            'mapping', 'CA_topo.grd')

    info = container.getMetadata()
    xmin = info['output']['map_information']['min']['longitude']
    xmax = info['output']['map_information']['max']['longitude']
    ymin = info['output']['map_information']['min']['latitude']
    ymax = info['output']['map_information']['max']['latitude']
    xmin = float(xmin) - 0.1
    xmax = float(xmax) + 0.1
    ymin = float(ymin) - 0.1
    ymax = float(ymax) + 0.1
    dy = float(info['output']['map_information']
               ['grid_spacing']['latitude'])
    dx = float(info['output']['map_information']
               ['grid_spacing']['longitude'])
    sampledict = GeoDict.createDictFromBox(xmin, xmax, ymin, ymax, dx, dy)
    topogrid = GMTGrid.load(topofile,
                            samplegeodict=sampledict,
                            resample=False)

    oceanfile = os.path.join(homedir, '..', '..', 'data', 'install', 'data',
                             'mapping', 'northridge_ocean.json')
    outpath = mkdtemp()
    filter_size = 10
    try:
        pdf, png = draw_contour(container, 'PGA', topogrid, oceanfile,
                                outpath, 'NEIC', filter_size)
        print(pdf)
    except Exception:
        assert 1 == 2
    finally:
        shutil.rmtree(outpath)
Exemple #22
0
    def contains(self, lat, lon):
        """Check to see if input coordinates are contained inside Slab model.

        Args:
            lat (float):  Hypocentral latitude in decimal degrees.
            lon (float):  Hypocentral longitude in decimal degrees.
        Returns:
            bool: True if point falls inside minimum bounding box of slab model.
        """
        gdict, tmp = GMTGrid.getFileGeoDict(self._depth_file)
        gxmin = gdict.xmin
        gxmax = gdict.xmax
        if lon < 0:
            if gxmin > gxmax:
                gxmin -= 360
        else:
            if gxmin > gxmax:
                gxmax += 360
        if lat >= gdict.ymin and lat <= gdict.ymax and lon >= gxmin and lon <= gxmax:
            return True
        return False
Exemple #23
0
    def main(self, gmrecords):
        """Compute station metrics.

        Args:
            gmrecords:
                GMrecordsApp instance.
        """
        logging.info('Running subcommand \'%s\'' % self.command_name)

        self.gmrecords = gmrecords
        self._check_arguments()
        self._get_events()

        vs30_grids = None
        if gmrecords.conf is not None:
            if 'vs30' in gmrecords.conf['metrics']:
                vs30_grids = gmrecords.conf['metrics']['vs30']
                for vs30_name in vs30_grids:
                    vs30_grids[vs30_name]['grid_object'] = GMTGrid.load(
                        vs30_grids[vs30_name]['file'])
        self.vs30_grids = vs30_grids

        if gmrecords.args.num_processes:
            # parallelize processing on events
            try:
                client = Client(n_workers=gmrecords.args.num_processes)
            except BaseException as ex:
                print(ex)
                print("Could not create a dask client.")
                print("To turn off paralleization, use '--num-processes 0'.")
                sys.exit(1)
            futures = client.map(self._event_station_metrics, self.events)
            for result in as_completed(futures, with_results=True):
                print(result)
                # print('Completed event: %s' % result)
        else:
            for event in self.events:
                self._event_station_metrics(event)

        self._summarize_files_created()
Exemple #24
0
def getFileType(filename):
    """Determine whether input file is a shapefile or a grid (ESRI or GMT).
    EVENTUALLY WILL BE MOVED TO MAPIO

    :param filename:
      String path to candidate filename.
    :returns:
      String, one of 'shapefile','grid','unknown'.

    """
    if os.path.isdir(filename):
        return 'dir'
    ftype = GMTGrid.getFileType(filename)
    if ftype != 'unknown':
        return 'gmt'
    #skip over ESRI header files
    if filename.endswith('.hdr'):
        return 'unknown'
    try:
        GDALGrid.getFileGeoDict(filename)
        return 'esri'
    except:
        pass
    return 'unknown'
Exemple #25
0
def getFileType(filename):
    """Determine whether input file is a shapefile or a grid (ESRI or GMT).
    EVENTUALLY WILL BE MOVED TO MAPIO

    :param filename:
      String path to candidate filename.
    :returns:
      String, one of 'shapefile','grid','unknown'.

    """
    if os.path.isdir(filename):
        return 'dir'
    ftype = GMTGrid.getFileType(filename)
    if ftype != 'unknown':
        return 'gmt'
    #skip over ESRI header files
    if filename.endswith('.hdr'):
        return 'unknown'
    try:
        GDALGrid.getFileGeoDict(filename)
        return 'esri'
    except:
        pass
    return 'unknown'
Exemple #26
0
def basic_test():

    mmidata = np.array([[7, 8, 8, 8, 7], [8, 9, 9, 9, 8], [8, 9, 10, 9, 8],
                        [8, 9, 9, 8, 8], [7, 8, 8, 6, 5]],
                       dtype=np.float32)
    popdata = np.ones_like(mmidata) * 1e7
    isodata = np.array(
        [[4, 4, 4, 4, 4], [4, 4, 4, 4, 4], [4, 4, 156, 156, 156],
         [156, 156, 156, 156, 156], [156, 156, 156, 156, 156]],
        dtype=np.int32)

    shakefile = get_temp_file_name()
    popfile = get_temp_file_name()
    isofile = get_temp_file_name()
    geodict = GeoDict({
        'xmin': 0.5,
        'xmax': 4.5,
        'ymin': 0.5,
        'ymax': 4.5,
        'dx': 1.0,
        'dy': 1.0,
        'nx': 5,
        'ny': 5
    })
    layers = OrderedDict([
        ('mmi', mmidata),
    ])
    event_dict = {
        'event_id': 'us12345678',
        'magnitude': 7.8,
        'depth': 10.0,
        'lat': 34.123,
        'lon': -118.123,
        'event_timestamp': datetime.utcnow(),
        'event_description': 'foo',
        'event_network': 'us'
    }
    shake_dict = {
        'event_id': 'us12345678',
        'shakemap_id': 'us12345678',
        'shakemap_version': 1,
        'code_version': '4.5',
        'process_timestamp': datetime.utcnow(),
        'shakemap_originator': 'us',
        'map_status': 'RELEASED',
        'shakemap_event_type': 'ACTUAL'
    }
    unc_dict = {'mmi': (1, 1)}
    shakegrid = ShakeGrid(layers, geodict, event_dict, shake_dict, unc_dict)
    shakegrid.save(shakefile)
    popgrid = GMTGrid(popdata, geodict.copy())
    isogrid = GMTGrid(isodata, geodict.copy())
    popgrid.save(popfile)
    isogrid.save(isofile)

    ratedict = {
        4: {
            'start': [2010, 2012, 2014, 2016],
            'end': [2012, 2014, 2016, 2018],
            'rate': [0.01, 0.02, 0.03, 0.04]
        },
        156: {
            'start': [2010, 2012, 2014, 2016],
            'end': [2012, 2014, 2016, 2018],
            'rate': [0.02, 0.03, 0.04, 0.05]
        }
    }

    popgrowth = PopulationGrowth(ratedict)
    popyear = datetime.utcnow().year
    exposure = Exposure(popfile, popyear, isofile, popgrowth=popgrowth)
    expdict = exposure.calcExposure(shakefile)

    modeldict = [
        LognormalModel('AF', 11.613073, 0.180683, 1.0),
        LognormalModel('CN', 10.328811, 0.100058, 1.0)
    ]
    fatmodel = EmpiricalLoss(modeldict)

    # for the purposes of this test, let's override the rates
    # for Afghanistan and China with simpler numbers.
    fatmodel.overrideModel(
        'AF',
        np.array([0, 0, 0, 0, 1e-6, 1e-5, 1e-4, 1e-3, 1e-2, 0],
                 dtype=np.float32))
    fatmodel.overrideModel(
        'CN',
        np.array([0, 0, 0, 0, 1e-7, 1e-6, 1e-5, 1e-4, 1e-3, 0],
                 dtype=np.float32))

    print('Testing very basic fatality calculation...')
    fatdict = fatmodel.getLosses(expdict)
    # strictly speaking, the afghanistant fatalities should be 462,000 but floating point precision dictates otherwise.
    testdict = {'CN': 46111, 'AF': 461999, 'TotalFatalities': 508110}
    for key, value in fatdict.items():
        assert value == testdict[key]
    print('Passed very basic fatality calculation...')

    print('Testing grid fatality calculations...')
    mmidata = exposure.getShakeGrid().getLayer('mmi').getData()
    popdata = exposure.getPopulationGrid().getData()
    isodata = exposure.getCountryGrid().getData()
    fatgrid = fatmodel.getLossGrid(mmidata, popdata, isodata)

    assert np.nansum(fatgrid) == 508111
    print('Passed grid fatality calculations...')

    # Testing modifying rates and stuffing them back in...
    chile = LognormalModel('CL', 19.786773, 0.259531, 0.0)
    rates = chile.getLossRates(np.arange(5, 10))
    modrates = rates * 2  # does this make event twice as deadly?

    # roughly the exposures from 2015-9-16 CL event
    expo_pop = np.array(
        [0, 0, 0, 1047000, 7314000, 1789000, 699000, 158000, 0, 0])
    mmirange = np.arange(5, 10)
    chile_deaths = chile.getLosses(expo_pop[4:9], mmirange)
    chile_double_deaths = chile.getLosses(expo_pop[4:9],
                                          mmirange,
                                          rates=modrates)
    print('Chile model fatalities: %f' % chile_deaths)
    print('Chile model x2 fatalities: %f' % chile_double_deaths)
Exemple #27
0
    def __init__(self,
                 shakefile,
                 config,
                 uncertfile=None,
                 saveinputs=False,
                 slopefile=None,
                 bounds=None,
                 slopemod=None,
                 trimfile=None):
        """
        Sets up the logistic model

        Args:
            shakefile (str): Path to shakemap grid.xml file for the event.
            config: configobj object defining the model and its inputs. Only
                one model should be described in each config file.
            uncertfile (str): Path to uncertainty.xml file.
            saveinputs (bool): Save input layers as Grid2D objects in addition
                to the model? If false (the default), it will just output the
                model.
            slopefile (str): Optional path to slopefile that will be resampled
                to the other input files for applying thresholds. OVERWRITES
                VALUE IN CONFIG.
            bounds (dict): Default of None uses ShakeMap boundaries, otherwise
                a dictionary of boundaries to cut to like

                .. code-block:: python

                    bounds = {
                        'xmin': lonmin, 'xmax': lonmax,
                        'ymin': latmin, 'ymax': latmax
                    }
            slopemod (str): How slope input should be modified to be in
                degrees: e.g., ``np.arctan(slope) * 180. / np.pi`` or
                ``slope/100.`` (note that this may be in the config file
                already).
            trimfile (str): shapefile of earth's landmasses to use to cut
                offshore areas.
        """
        mnames = getLogisticModelNames(config)
        if len(mnames) == 0:
            raise Exception('No config file found or problem with config '
                            'file format')
        if len(mnames) > 1:
            raise Exception('Config file contains more than one model which '
                            'is no longer allowed, update your config file '
                            'to the newer format')

        self.model = mnames[0]
        self.config = config
        cmodel = config[self.model]
        self.modeltype = cmodel['gfetype']
        self.coeffs = validateCoefficients(cmodel)
        # key = layer name, value = file name
        self.layers = validateLayers(cmodel)
        self.terms, timeField = validateTerms(cmodel, self.coeffs, self.layers)
        self.interpolations = validateInterpolations(cmodel, self.layers)
        self.units = validateUnits(cmodel, self.layers)
        self.gmused = [
            value for term, value in cmodel['terms'].items()
            if 'pga' in value.lower() or 'pgv' in value.lower()
            or 'mmi' in value.lower()
        ]
        self.modelrefs, self.longrefs, self.shortrefs = validateRefs(cmodel)
        #self.numstd = numstd
        self.clips = validateClips(cmodel, self.layers, self.gmused)
        self.notes = ''

        if cmodel['baselayer'] not in list(self.layers.keys()):
            raise Exception('You must specify a base layer corresponding to '
                            'one of the files in the layer section.')
        self.saveinputs = saveinputs
        if slopefile is None:
            try:
                self.slopefile = cmodel['slopefile']
            except:
                # print('Slopefile not specified in config, no slope '
                #      'thresholds will be applied\n')
                self.slopefile = None
        else:
            self.slopefile = slopefile
        if slopemod is None:
            try:
                self.slopemod = cmodel['slopemod']
            except:
                self.slopemod = None

        # See if trimfile exists
        if trimfile is not None:
            if not os.path.exists(trimfile):
                print('trimfile defined does not exist: %s\nOcean will not be '
                      'trimmed' % trimfile)
                self.trimfile = None
            elif os.path.splitext(trimfile)[1] != '.shp':
                print('trimfile must be a shapefile, ocean will not be '
                      'trimmed')
                self.trimfile = None
            else:
                self.trimfile = trimfile
        else:
            self.trimfile = None

        # Get month of event
        griddict, eventdict, specdict, fields, uncertainties = \
            getHeaderData(shakefile)
        MONTH = MONTHS[(eventdict['event_timestamp'].month) - 1]

        # Figure out how/if need to cut anything
        geodict = ShakeGrid.getFileGeoDict(shakefile, adjust='res')
        if bounds is not None:  # Make sure bounds are within ShakeMap Grid
            if geodict.xmin < geodict.xmax:  # only if signs are not opposite
                if (geodict.xmin > bounds['xmin']
                        or geodict.xmax < bounds['xmax']
                        or geodict.ymin > bounds['ymin']
                        or geodict.ymax < bounds['ymax']):
                    print('Specified bounds are outside shakemap area, using '
                          'ShakeMap bounds instead.')
                    bounds = None

        if bounds is not None:
            tempgdict = GeoDict.createDictFromBox(bounds['xmin'],
                                                  bounds['xmax'],
                                                  bounds['ymin'],
                                                  bounds['ymax'],
                                                  geodict.dx,
                                                  geodict.dy,
                                                  inside=False)
            # If Shakemap geodict crosses 180/-180 line, fix geodict so things don't break
            if geodict.xmin > geodict.xmax:
                if tempgdict.xmin < 0:
                    geodict._xmin -= 360.
                else:
                    geodict._xmax += 360.
            gdict = geodict.getBoundsWithin(tempgdict)
        else:
            gdict = geodict

        # Now find the layer that is our base layer and get the largest bounds
        # we can guarantee not to exceed shakemap bounds
        basefile = self.layers[cmodel['baselayer']]
        ftype = getFileType(basefile)
        if ftype == 'esri':
            basegeodict, firstcol = GDALGrid.getFileGeoDict(basefile)
            if basegeodict == gdict:
                sampledict = gdict
            else:
                sampledict = basegeodict.getBoundsWithin(gdict)
        elif ftype == 'gmt':
            basegeodict, firstcol = GMTGrid.getFileGeoDict(basefile)
            if basegeodict == gdict:
                sampledict = gdict
            else:
                sampledict = basegeodict.getBoundsWithin(gdict)
        else:
            raise Exception('All predictor variable grids must be a valid '
                            'GMT or ESRI file type.')

        # Do we need to subdivide baselayer?
        if 'divfactor' in self.config[self.model].keys():
            divfactor = float(self.config[self.model]['divfactor'])
            if divfactor != 1.:
                # adjust sampledict so everything will be resampled
                newxmin = sampledict.xmin - sampledict.dx / \
                    2. + sampledict.dx/(2.*divfactor)
                newymin = sampledict.ymin - sampledict.dy / \
                    2. + sampledict.dy/(2.*divfactor)
                newxmax = sampledict.xmax + sampledict.dx / \
                    2. - sampledict.dx/(2.*divfactor)
                newymax = sampledict.ymax + sampledict.dy / \
                    2. - sampledict.dy/(2.*divfactor)
                newdx = sampledict.dx / divfactor
                newdy = sampledict.dy / divfactor

                sampledict = GeoDict.createDictFromBox(newxmin,
                                                       newxmax,
                                                       newymin,
                                                       newymax,
                                                       newdx,
                                                       newdy,
                                                       inside=True)

        # Find slope thresholds, if applicable
        self.slopemin = 'none'
        self.slopemax = 'none'
        if self.slopefile is not None:
            try:
                self.slopemin = float(config[self.model]['slopemin'])
                self.slopemax = float(config[self.model]['slopemax'])
            except:
                print('Could not find slopemin and/or slopemax in config, '
                      'limits. No slope thresholds will be applied.')
                self.slopemin = 'none'
                self.slopemax = 'none'

        # Make temporary directory for hdf5 pytables file storage
        self.tempdir = tempfile.mkdtemp()

        # now load the shakemap, resampling and padding if necessary
        temp = ShakeGrid.load(shakefile)  # , adjust='res')
        self.shakedict = temp.getShakeDict()
        self.eventdict = temp.getEventDict()
        self.shakemap = {}

        # Read both PGA and PGV in, may need them for thresholds
        for gm in ['pga', 'pgv']:
            junkfile = os.path.join(self.tempdir, 'temp.bil')
            GDALGrid.copyFromGrid(temp.getLayer(gm)).save(junkfile)
            if gm in self.interpolations.keys():
                intermeth = self.interpolations[gm]
            else:
                intermeth = 'bilinear'
            junkgrid = quickcut(junkfile,
                                sampledict,
                                precise=True,
                                method=intermeth)
            if gm in self.clips:
                junkgrid.setData(
                    np.clip(junkgrid.getData(), self.clips[gm][0],
                            self.clips[gm][1]))
            self.shakemap[gm] = TempHdf(
                junkgrid, os.path.join(self.tempdir, '%s.hdf5' % gm))
            os.remove(junkfile)
        del (temp)

        # get updated geodict
        sampledict = junkgrid.getGeoDict()

        # take uncertainties into account, if available
        if uncertfile is not None:
            self.uncert = {}
            try:
                # Only read in the ones that will be needed
                temp = ShakeGrid.load(uncertfile)
                already = []
                for gm in self.gmused:
                    if 'pgv' in gm:
                        gmsimp = 'pgv'
                    elif 'pga' in gm:
                        gmsimp = 'pga'
                    elif 'mmi' in gm:
                        gmsimp = 'mmi'
                    if gmsimp in already:
                        continue
                    junkfile = os.path.join(self.tempdir, 'temp.bil')
                    GDALGrid.copyFromGrid(temp.getLayer('std%s' %
                                                        gmsimp)).save(junkfile)
                    if gmsimp in self.interpolations.keys():
                        intermeth = self.interpolations[gmsimp]
                    else:
                        intermeth = 'bilinear'
                    junkgrid = quickcut(junkfile,
                                        sampledict,
                                        precise=True,
                                        method=intermeth)
                    if gmsimp in self.clips:
                        junkgrid.setData(
                            np.clip(junkgrid.getData(), self.clips[gmsimp][0],
                                    self.clips[gmsimp][1]))
                    self.uncert['std' + gmsimp] = TempHdf(
                        junkgrid,
                        os.path.join(self.tempdir, 'std%s.hdf5' % gmsimp))
                    already.append(gmsimp)
                    os.remove(junkfile)
                del (temp)
            except:
                print('Could not read uncertainty file, ignoring '
                      'uncertainties')
                self.uncert = None
        else:
            self.uncert = None

        # Load the predictor layers, save as hdf5 temporary files, put file
        # locations into a dictionary.

        # Will be replaced in the next section if a slopefile was defined
        self.nonzero = None

        # key = layer name, value = grid object
        self.layerdict = {}

        didslope = False
        for layername, layerfile in self.layers.items():
            start = timer()
            if isinstance(layerfile, list):
                for lfile in layerfile:
                    if timeField == 'MONTH':
                        if lfile.find(MONTH) > -1:
                            layerfile = lfile
                            ftype = getFileType(layerfile)
                            interp = self.interpolations[layername]
                            temp = quickcut(layerfile,
                                            sampledict,
                                            precise=True,
                                            method=interp)
                            if layername in self.clips:
                                temp.setData(
                                    np.clip(temp.getData(),
                                            self.clips[layername][0],
                                            self.clips[layername][1]))
                            self.layerdict[layername] = TempHdf(
                                temp,
                                os.path.join(self.tempdir,
                                             '%s.hdf5' % layername))
                            del (temp)
            else:
                interp = self.interpolations[layername]
                temp = quickcut(layerfile,
                                sampledict,
                                precise=True,
                                method=interp)
                if layername in self.clips:
                    temp.setData(
                        np.clip(temp.getData(), self.clips[layername][0],
                                self.clips[layername][1]))
                if layername == 'rock':  # Convert unconsolidated sediments to a more reasonable coefficient
                    sub1 = temp.getData()
                    # Change to mixed sed rock coeff
                    sub1[sub1 <= -3.21] = -1.36
                    temp.setData(sub1)
                    self.notes += 'unconsolidated sediment coefficient changed\
                     to -1.36 (weaker) from -3.22 to better reflect that this \
                    unit is not actually strong\n'

                self.layerdict[layername] = TempHdf(
                    temp, os.path.join(self.tempdir, '%s.hdf5' % layername))
                td = temp.getGeoDict()
                if td != sampledict:
                    raise Exception(
                        'Geodictionaries of resampled files do not match')

                if layerfile == self.slopefile:
                    flag = 0
                    if self.slopemin == 'none' and self.slopemax == 'none':
                        flag = 1
                    if self.slopemod is None:
                        slope1 = temp.getData().astype(float)
                        slope = 0
                    else:
                        try:
                            slope = temp.getData().astype(float)
                            slope1 = eval(self.slopemod)
                        except:
                            print('slopemod provided not valid, continuing '
                                  'without slope thresholds.')
                            flag = 1
                    if flag == 0:
                        nonzero = np.array([(slope1 > self.slopemin) &
                                            (slope1 <= self.slopemax)])
                        self.nonzero = nonzero[0, :, :]
                        del (slope1)
                        del (slope)
                    else:
                        # Still remove areas where the slope equals exactly
                        # 0.0 to remove offshore liq areas.
                        nonzero = np.array([slope1 != 0.0])
                        self.nonzero = nonzero[0, :, :]
                        del (slope1)
                    didslope = True
                del (temp)

            print('Loading %s layer: %1.1f sec' % (layername, timer() - start))

        if didslope is False and self.slopefile is not None:
            # Slope didn't get read in yet
            temp = quickcut(self.slopefile,
                            sampledict,
                            precise=True,
                            method='bilinear')
            flag = 0
            if self.slopemin == 'none' and self.slopemax == 'none':
                flag = 1
            if self.slopemod is None:
                slope1 = temp.getData().astype(float)
                slope = 0
            else:
                try:
                    slope = temp.getData().astype(float)
                    slope1 = eval(self.slopemod)
                except:
                    print('slopemod provided not valid, continuing without '
                          'slope thresholds')
                    flag = 1
            if flag == 0:
                nonzero = np.array([
                    (slope1 > self.slopemin) & (slope1 <= self.slopemax)
                ])
                self.nonzero = nonzero[0, :, :]
                del (slope1)
                del (slope)
            else:
                # Still remove areas where the slope equals exactly
                # 0.0 to remove offshore liq areas.
                nonzero = np.array([slope1 != 0.0])
                self.nonzero = nonzero[0, :, :]
                del (slope1)

        self.nuggets = [str(self.coeffs['b0'])]

        ckeys = list(self.terms.keys())
        ckeys.sort()
        for key in ckeys:
            term = self.terms[key]
            coeff = self.coeffs[key]
            self.nuggets.append('(%g * %s)' % (coeff, term))

        self.equation = ' + '.join(self.nuggets)
        self.geodict = sampledict
Exemple #28
0
def modelMap(grids, shakefile=None, suptitle=None, inventory_shapefile=None,
             plotorder=None, maskthreshes=None, colormaps=None, boundaries=None,
             zthresh=0, scaletype='continuous', lims=None, logscale=False,
             ALPHA=0.7, maproads=True, mapcities=True, isScenario=False,
             roadfolder=None, topofile=None, cityfile=None, oceanfile=None,
             roadcolor='#6E6E6E', watercolor='#B8EEFF', countrycolor='#177F10',
             outputdir=None, savepdf=True, savepng=True, showplots=False,
             roadref='unknown', cityref='unknown', oceanref='unknown',
             printparam=False, ds=True, dstype='mean', upsample=False):
    """
    This function creates maps of mapio grid layers (e.g. liquefaction or
    landslide models with their input layers)
    All grids must use the same bounds
    TO DO change so that all input layers do not have to have the same bounds,
    test plotting multiple probability layers, and add option so that if PDF and
    PNG aren't output, opens plot on screen using plt.show()

    :param grids: Dictionary of N layers and metadata formatted like:
        maplayers['layer name']={
        'grid': mapio grid2D object,
        'label': 'label for colorbar and top line of subtitle',
        'type': 'output or input to model',
        'description': 'detailed description of layer for subtitle'}.
      Layer names must be unique.
    :type name: Dictionary or Ordered dictionary - import collections;
      grids = collections.OrderedDict()
    :param shakefile: optional ShakeMap file (url or full file path) to extract information for labels and folder names
    :type shakefile: Shakemap Event Dictionary
    :param suptitle: This will be displayed at the top of the plots and in the
      figure names
    :type suptitle: string
    :param plotorder: List of keys describing the order to plot the grids, if
      None and grids is an ordered dictionary, it will use the order of the
      dictionary, otherwise it will choose order which may be somewhat random
      but it will always put a probability grid first
    :type plotorder: list
    :param maskthreshes: N x 1 array or list of lower thresholds for masking
      corresponding to order in plotorder or order of OrderedDict if plotorder
      is None. If grids is not an ordered dict and plotorder is not specified,
      this will not work right. If None (default), nothing will be masked
    :param colormaps: List of strings of matplotlib colormaps (e.g. cm.autumn_r)
      corresponding to plotorder or order of dictionary if plotorder is None.
      The list can contain both strings and None e.g. colormaps = ['cm.autumn',
      None, None, 'cm.jet'] and None's will default to default colormap
    :param boundaries: None to show entire study area, 'zoom' to zoom in on the
      area of action (only works if there is a probability layer) using zthresh
      as a threshold, or a dictionary defining lats and lons in the form of
      boundaries.xmin = minlon, boundaries.xmax = maxlon, boundaries.ymin =
      min lat, boundaries.ymax = max lat
    :param zthresh: threshold for computing zooming bounds, only used if
      boundaries = 'zoom'
    :type zthresh: float
    :param scaletype: Type of scale for plotting, 'continuous' or 'binned' -
      will be reflected in colorbar
    :type scaletype: string
    :param lims: None or Nx1 list of tuples or numpy arrays corresponding to
      plotorder defining the limits for saturating the colorbar (vmin, vmax) if
      scaletype is continuous or the bins to use (clev) if scaletype if binned.
      The list can contain tuples, arrays, and Nones, e.g. lims = [(0., 10.),
      None, (0.1, 1.5), np.linspace(0., 1.5, 15)]. When None is specified, the
      program will estimate the limits, when an array is specified but the scale
      type is continuous, vmin will be set to min(array) and vmax will be set
      to max(array)
    :param lims: None or Nx1 list of Trues and Falses corresponding to
      plotorder defining whether to use a linear or log scale (log10) for
      plotting the layer. This will be reflected in the labels
    :param ALPHA: Transparency for mapping, if there is a hillshade that will
      plot below each layer, it is recommended to set this to at least 0.7
    :type ALPHA: float
    :param maproads: Whether to show roads or not, default True, but requires
      that roadfile is specified and valid to work
    :type maproads: boolean
    :param mapcities: Whether to show cities or not, default True, but requires
      that cityfile is specified and valid to work
    :type mapcities: boolean
    :param isScenario: Whether this is a scenario (True) or a real event (False)
      (default False)
    :type isScenario: boolean
    :param roadfolder: Full file path to folder containing road shapefiles
    :type roadfolder: string
    :param topofile: Full file path to topography grid (GDAL compatible) - this
      is only needed to make a hillshade if a premade hillshade is not specified
    :type topofile: string
    :param cityfile: Full file path to Pager file containing city & population
      information
    :type cityfile: string
    :param roadcolor: Color to use for roads, if plotted, default #6E6E6E
    :type roadcolor: Hex color or other matplotlib compatible way of defining
      color
    :param watercolor: Color to use for oceans, lakes, and rivers, default
      #B8EEFF
    :type watercolor: Hex color or other matplotlib compatible way of defining
      color
    :param countrycolor: Color for country borders, default #177F10
    :type countrycolor: Hex color or other matplotlib compatible way of defining
      color
    :param outputdir: File path for outputting figures, if edict is defined, a
      subfolder based on the event id will be created in this folder. If None,
      will use current directory
    :param savepdf: True to save pdf figure, False to not
    :param savepng: True to save png figure, False to not
    :param ds: True to allow downsampling for display (necessary when arrays
      are quite large, False to not allow)
    :param dstype: What function to use in downsampling, options are 'min',
      'max', 'median', or 'mean'
    :param upsample: True to upsample the layer to the DEM resolution for better
      looking hillshades

    :returns:
        * PDF and/or PNG of map
        * Downsampled and trimmed version of input grids. If no
        modification was needed for plotting, this will be identical to grids but
        without the metadata

    """

    if suptitle is None:
        suptitle = ' '

    plt.ioff()

    defaultcolormap = cm.jet

    if shakefile is not None:
        edict = ShakeGrid.load(shakefile, adjust='res').getEventDict()
        temp = ShakeGrid.load(shakefile, adjust='res').getShakeDict()
        edict['eventid'] = temp['shakemap_id']
        edict['version'] = temp['shakemap_version']
    else:
        edict = None

    # Get output file location
    if outputdir is None:
        print('No output location given, using current directory for outputs\n')
        outputdir = os.getcwd()
    if edict is not None:
        outfolder = os.path.join(outputdir, edict['event_id'])
    else:
        outfolder = outputdir
    if not os.path.isdir(outfolder):
        os.makedirs(outfolder)

    # Get plotting order, if not specified
    if plotorder is None:
        plotorder = list(grids.keys())

    # Get boundaries to use for all plots
    cut = True
    if boundaries is None:
        cut = False
        keytemp = list(grids.keys())
        boundaries = grids[keytemp[0]]['grid'].getGeoDict()
    elif boundaries == 'zoom':
        # Find probability layer (will just take the maximum bounds if there is
        # more than one)
        keytemp = list(grids.keys())
        key1 = [key for key in keytemp if 'model' in key.lower()]
        if len(key1) == 0:
            print('Could not find model layer to use for zoom, using default boundaries')
            keytemp = list(grids.keys())
            boundaries = grids[keytemp[0]]['grid'].getGeoDict()
        else:
            lonmax = -1.e10
            lonmin = 1.e10
            latmax = -1.e10
            latmin = 1.e10
            for key in key1:
                # get lat lons of areas affected and add, if no areas affected,
                # switch to shakemap boundaries
                temp = grids[key]['grid']
                xmin, xmax, ymin, ymax = temp.getBounds()
                lons = np.linspace(xmin, xmax, temp.getGeoDict().nx)
                lats = np.linspace(ymax, ymin, temp.getGeoDict().ny)  # backwards so it plots right
                row, col = np.where(temp.getData() > float(zthresh))
                lonmin = lons[col].min()
                lonmax = lons[col].max()
                latmin = lats[row].min()
                latmax = lats[row].max()
                # llons, llats = np.meshgrid(lons, lats)  # make meshgrid
                # llons1 = llons[temp.getData() > float(zthresh)]
                # llats1 = llats[temp.getData() > float(zthresh)]
                # if llons1.min() < lonmin:
                #     lonmin = llons1.min()
                # if llons1.max() > lonmax:
                #     lonmax = llons1.max()
                # if llats1.min() < latmin:
                #     latmin = llats1.min()
                # if llats1.max() > latmax:
                #     latmax = llats1.max()
            boundaries1 = {'dx': 100, 'dy': 100., 'nx': 100., 'ny': 100}  # dummy fillers, only really care about bounds
            if xmin < lonmin-0.15*(lonmax-lonmin):
                boundaries1['xmin'] = lonmin-0.1*(lonmax-lonmin)
            else:
                boundaries1['xmin'] = xmin
            if xmax > lonmax+0.15*(lonmax-lonmin):
                boundaries1['xmax'] = lonmax+0.1*(lonmax-lonmin)
            else:
                boundaries1['xmax'] = xmax
            if ymin < latmin-0.15*(latmax-latmin):
                boundaries1['ymin'] = latmin-0.1*(latmax-latmin)
            else:
                boundaries1['ymin'] = ymin
            if ymax > latmax+0.15*(latmax-latmin):
                boundaries1['ymax'] = latmax+0.1*(latmax-latmin)
            else:
                boundaries1['ymax'] = ymax
            boundaries = GeoDict(boundaries1, adjust='res')
    else:
        # SEE IF BOUNDARIES ARE SAME AS BOUNDARIES OF LAYERS
        keytemp = list(grids.keys())
        tempgdict = grids[keytemp[0]]['grid'].getGeoDict()
        if np.abs(tempgdict.xmin-boundaries['xmin']) < 0.05 and \
           np.abs(tempgdict.ymin-boundaries['ymin']) < 0.05 and \
           np.abs(tempgdict.xmax-boundaries['xmax']) < 0.05 and \
           np.abs(tempgdict.ymax - boundaries['ymax']) < 0.05:
            print('Input boundaries are almost the same as specified boundaries, no cutting needed')
            boundaries = tempgdict
            cut = False
        else:
            try:
                if boundaries['xmin'] > boundaries['xmax'] or \
                   boundaries['ymin'] > boundaries['ymax']:
                    print('Input boundaries are not usable, using default boundaries')
                    keytemp = list(grids.keys())
                    boundaries = grids[keytemp[0]]['grid'].getGeoDict()
                    cut = False
                else:
                    # Build dummy GeoDict
                    boundaries = GeoDict({'xmin': boundaries['xmin'],
                                          'xmax': boundaries['xmax'],
                                          'ymin': boundaries['ymin'],
                                          'ymax': boundaries['ymax'],
                                          'dx': 100.,
                                          'dy': 100.,
                                          'ny': 100.,
                                          'nx': 100.},
                                         adjust='res')
            except:
                print('Input boundaries are not usable, using default boundaries')
                keytemp = list(grids.keys())
                boundaries = grids[keytemp[0]]['grid'].getGeoDict()
                cut = False

    # Pull out bounds for various uses
    bxmin, bxmax, bymin, bymax = boundaries.xmin, boundaries.xmax, boundaries.ymin, boundaries.ymax

    # Determine if need a single panel or multi-panel plot and if multi-panel,
    # how many and how it will be arranged
    fig = plt.figure()
    numpanels = len(grids)
    if numpanels == 1:
        rowpan = 1
        colpan = 1
        # create the figure and axes instances.
        fig.set_figwidth(5)
    elif numpanels == 2 or numpanels == 4:
        rowpan = np.ceil(numpanels/2.)
        colpan = 2
        fig.set_figwidth(13)
    else:
        rowpan = np.ceil(numpanels/3.)
        colpan = 3
        fig.set_figwidth(15)
    if rowpan == 1:
        fig.set_figheight(rowpan*6.0)
    else:
        fig.set_figheight(rowpan*5.3)

    # Need to update naming to reflect the shakemap version once can get
    # getHeaderData to work, add edict['version'] back into title, maybe
    # shakemap id also?
    fontsizemain = 14.
    fontsizesub = 12.
    fontsizesmallest = 10.
    if rowpan == 1.:
        fontsizemain = 12.
        fontsizesub = 10.
        fontsizesmallest = 8.
    if edict is not None:
        if isScenario:
            title = edict['event_description']
        else:
            timestr = edict['event_timestamp'].strftime('%b %d %Y')
            title = 'M%.1f %s v%i - %s' % (edict['magnitude'], timestr, edict['version'], edict['event_description'])
        plt.suptitle(title+'\n'+suptitle, fontsize=fontsizemain)
    else:
        plt.suptitle(suptitle, fontsize=fontsizemain)

    clear_color = [0, 0, 0, 0.0]

    # Cut all of them and release extra memory

    xbuff = (bxmax-bxmin)/10.
    ybuff = (bymax-bymin)/10.
    cutxmin = bxmin-xbuff
    cutymin = bymin-ybuff
    cutxmax = bxmax+xbuff
    cutymax = bymax+ybuff
    if cut is True:
        newgrids = collections.OrderedDict()
        for k, layer in enumerate(plotorder):
            templayer = grids[layer]['grid']
            try:
                newgrids[layer] = {'grid': templayer.cut(cutxmin, cutxmax, cutymin, cutymax, align=True)}
            except Exception as e:
                print(('Cutting failed, %s, continuing with full layers' % e))
                newgrids = grids
                continue
        del templayer
        gc.collect()
    else:
        newgrids = grids
    tempgdict = newgrids[list(grids.keys())[0]]['grid'].getGeoDict()

    # Upsample layers to same as topofile if desired for better looking hillshades
    if upsample is True and topofile is not None:
        try:
            topodict = GDALGrid.getFileGeoDict(topofile)
            if topodict.dx >= tempgdict.dx or topodict.dy >= tempgdict.dy:
                print('Upsampling not possible, resolution of results already smaller than DEM')
                pass
            else:
                tempgdict1 = GeoDict({'xmin': tempgdict.xmin-xbuff,
                                      'ymin': tempgdict.ymin-ybuff,
                                      'xmax': tempgdict.xmax+xbuff,
                                      'ymax': tempgdict.ymax+ybuff,
                                      'dx': topodict.dx,
                                      'dy': topodict.dy,
                                      'nx': topodict.nx,
                                      'ny': topodict.ny},
                                     adjust='res')
                tempgdict2 = tempgdict1.getBoundsWithin(tempgdict)
                for k, layer in enumerate(plotorder):
                    newgrids[layer]['grid'] = newgrids[layer]['grid'].subdivide(tempgdict2)
        except:
            print('Upsampling failed, continuing')

    # Downsample all of them for plotting, if needed, and replace them in
    # grids (to save memory)
    tempgrid = newgrids[list(grids.keys())[0]]['grid']
    xsize = tempgrid.getGeoDict().nx
    ysize = tempgrid.getGeoDict().ny
    inchesx, inchesy = fig.get_size_inches()
    divx = int(np.round(xsize/(500.*inchesx)))
    divy = int(np.round(ysize/(500.*inchesy)))
    xmin, xmax, ymin, ymax = tempgrid.getBounds()
    gdict = tempgrid.getGeoDict()  # Will be replaced if downsampled
    del tempgrid
    gc.collect()

    if divx <= 1:
        divx = 1
    if divy <= 1:
        divy = 1
    if (divx > 1. or divy > 1.) and ds:
        if dstype == 'max':
            func = np.nanmax
        elif dstype == 'min':
            func = np.nanmin
        elif dstype == 'med':
            func = np.nanmedian
        else:
            func = np.nanmean
        for k, layer in enumerate(plotorder):
            layergrid = newgrids[layer]['grid']
            dat = block_reduce(layergrid.getData().copy(),
                               block_size=(divy, divx),
                               cval=float('nan'),
                               func=func)
            if k == 0:
                lons = block_reduce(np.linspace(xmin, xmax, layergrid.getGeoDict().nx),
                                    block_size=(divx,),
                                    func=np.mean,
                                    cval=float('nan'))
                if math.isnan(lons[-1]):
                    lons[-1] = lons[-2] + (lons[1]-lons[0])
                lats = block_reduce(np.linspace(ymax, ymin, layergrid.getGeoDict().ny),
                                    block_size=(divy,),
                                    func=np.mean,
                                    cval=float('nan'))
                if math.isnan(lats[-1]):
                    lats[-1] = lats[-2] + (lats[1]-lats[0])
                gdict = GeoDict({'xmin': lons.min(),
                                 'xmax': lons.max(),
                                 'ymin': lats.min(),
                                 'ymax': lats.max(),
                                 'dx': np.abs(lons[1]-lons[0]),
                                 'dy': np.abs(lats[1]-lats[0]),
                                 'nx': len(lons),
                                 'ny': len(lats)},
                                adjust='res')
            newgrids[layer]['grid'] = Grid2D(dat, gdict)
        del layergrid, dat
    else:
        lons = np.linspace(xmin, xmax, xsize)
        lats = np.linspace(ymax, ymin, ysize)  # backwards so it plots right side up

    #make meshgrid
    llons1, llats1 = np.meshgrid(lons, lats)

    # See if there is an oceanfile for masking
    bbox = PolygonSH(((cutxmin, cutymin), (cutxmin, cutymax), (cutxmax, cutymax), (cutxmax, cutymin)))
    if oceanfile is not None:
        try:
            f = fiona.open(oceanfile)
            oc = next(f)
            f.close
            shapes = shape(oc['geometry'])
            # make boundaries into a shape
            ocean = shapes.intersection(bbox)
        except:
            print('Not able to read specified ocean file, will use default ocean masking')
            oceanfile = None
    if inventory_shapefile is not None:
        try:
            f = fiona.open(inventory_shapefile)
            invshp = list(f.items(bbox=(bxmin, bymin, bxmax, bymax)))
            f.close()
            inventory = [shape(inv[1]['geometry']) for inv in invshp]
        except:
            print('unable to read inventory shapefile specified, will not plot inventory')
            inventory_shapefile = None

    # # Find cities that will be plotted
    if mapcities is True and cityfile is not None:
        try:
            mycity = BasemapCities.loadFromGeoNames(cityfile=cityfile)
            bcities = mycity.limitByBounds((bxmin, bxmax, bymin, bymax))
            #bcities = bcities.limitByPopulation(40000)
            bcities = bcities.limitByGrid(nx=4, ny=4, cities_per_grid=2)
        except:
            print('Could not read in cityfile, not plotting cities')
            mapcities = False
            cityfile = None

    # Load in topofile
    if topofile is not None:
        try:
            topomap = GDALGrid.load(topofile, resample=True, method='linear', samplegeodict=gdict)
        except:
            topomap = GMTGrid.load(topofile, resample=True, method='linear', samplegeodict=gdict)
        topodata = topomap.getData().copy()
        # mask oceans if don't have ocean shapefile
        if oceanfile is None:
            topodata = maskoceans(llons1, llats1, topodata, resolution='h', grid=1.25, inlands=True)
    else:
        print('no hillshade is possible\n')
        topomap = None
        topodata = None

    # Load in roads, if needed
    if maproads is True and roadfolder is not None:
        try:
            roadslist = []
            for folder in os.listdir(roadfolder):
                road1 = os.path.join(roadfolder, folder)
                shpfiles = glob.glob(os.path.join(road1, '*.shp'))
                if len(shpfiles):
                    shpfile = shpfiles[0]
                    f = fiona.open(shpfile)
                    shapes = list(f.items(bbox=(bxmin, bymin, bxmax, bymax)))
                    for shapeid, shapedict in shapes:
                        roadslist.append(shapedict)
                    f.close()
        except:
            print('Not able to plot roads')
            roadslist = None

    val = 1
    for k, layer in enumerate(plotorder):
        layergrid = newgrids[layer]['grid']
        if 'label' in list(grids[layer].keys()):
            label1 = grids[layer]['label']
        else:
            label1 = layer
        try:
            sref = grids[layer]['description']['name']
        except:
            sref = None
        ax = fig.add_subplot(rowpan, colpan, val)
        val += 1
        clat = bymin + (bymax-bymin)/2.0
        clon = bxmin + (bxmax-bxmin)/2.0
        # setup of basemap ('lcc' = lambert conformal conic).
        # use major and minor sphere radii from WGS84 ellipsoid.
        m = Basemap(llcrnrlon=bxmin, llcrnrlat=bymin, urcrnrlon=bxmax, urcrnrlat=bymax,
                    rsphere=(6378137.00, 6356752.3142),
                    resolution='l', area_thresh=1000., projection='lcc',
                    lat_1=clat, lon_0=clon, ax=ax)

        x1, y1 = m(llons1, llats1)  # get projection coordinates
        axsize = ax.get_window_extent().transformed(fig.dpi_scale_trans.inverted())
        if k == 0:
            wid, ht = axsize.width, axsize.height
        if colormaps is not None and \
           len(colormaps) == len(newgrids) and \
           colormaps[k] is not None:
            palette = colormaps[k]
        else:  # Find preferred default color map for each type of layer
            if 'prob' in layer.lower() or 'pga' in layer.lower() or \
               'pgv' in layer.lower() or 'cohesion' in layer.lower() or \
               'friction' in layer.lower() or 'fs' in layer.lower():
                palette = cm.jet
            elif 'slope' in layer.lower():
                palette = cm.gnuplot2
            elif 'precip' in layer.lower():
                palette = cm2.s3pcpn
            else:
                palette = defaultcolormap

        if topodata is not None:
            if k == 0:
                ptopo = m.transform_scalar(
                    np.flipud(topodata), lons+0.5*gdict.dx,
                    lats[::-1]-0.5*gdict.dy, np.round(300.*wid),
                    np.round(300.*ht), returnxy=False, checkbounds=False,
                    order=1, masked=False)
                #use lightsource class to make our shaded topography
                ls = LightSource(azdeg=135, altdeg=45)
                ls1 = LightSource(azdeg=120, altdeg=45)
                ls2 = LightSource(azdeg=225, altdeg=45)
                intensity1 = ls1.hillshade(ptopo, fraction=0.25, vert_exag=1.)
                intensity2 = ls2.hillshade(ptopo, fraction=0.25, vert_exag=1.)
                intensity = intensity1*0.5 + intensity2*0.5
                #hillshm_im = m.transform_scalar(np.flipud(hillshm), lons, lats[::-1], np.round(300.*wid), np.round(300.*ht), returnxy=False, checkbounds=False, order=0, masked=False)
            #m.imshow(hillshm_im, cmap='Greys', vmin=0., vmax=3., zorder=1, interpolation='none')  # vmax = 3 to soften colors to light gray
            #m.pcolormesh(x1, y1, hillshm, cmap='Greys', linewidth=0., rasterized=True, vmin=0., vmax=3., edgecolors='none', zorder=1);
            # plt.draw()

        # Get the data
        dat = layergrid.getData().copy()

        # mask out anything below any specified thresholds
        # Might need to move this up to before downsampling...might give illusion of no hazard in places where there is some that just got averaged out
        if maskthreshes is not None and len(maskthreshes) == len(newgrids):
            if maskthreshes[k] is not None:
                dat[dat <= maskthreshes[k]] = float('NaN')
                dat = np.ma.array(dat, mask=np.isnan(dat))

        if logscale is not False and len(logscale) == len(newgrids):
            if logscale[k] is True:
                dat = np.log10(dat)
                label1 = r'$log_{10}$(' + label1 + ')'

        if scaletype.lower() == 'binned':
            # Find order of range to know how to scale
            order = np.round(np.log(np.nanmax(dat) - np.nanmin(dat)))
            if order < 1.:
                scal = 10**-order
            else:
                scal = 1.
            if lims is None or len(lims) != len(newgrids):
                clev = (np.linspace(np.floor(scal*np.nanmin(dat)), np.ceil(scal*np.nanmax(dat)), 10))/scal
            else:
                if lims[k] is None:
                    clev = (np.linspace(np.floor(scal*np.nanmin(dat)), np.ceil(scal*np.nanmax(dat)), 10))/scal
                else:
                    clev = lims[k]
            # Adjust to colorbar levels
            dat[dat < clev[0]] = clev[0]
            for j, level in enumerate(clev[:-1]):
                dat[(dat >= clev[j]) & (dat < clev[j+1])] = clev[j]
            # So colorbar saturates at top
            dat[dat > clev[-1]] = clev[-1]
            #panelhandle = m.contourf(x1, y1, datm, clev, cmap=palette, linewidth=0., alpha=ALPHA, rasterized=True)
            vmin = clev[0]
            vmax = clev[-1]
        else:
            if lims is not None and len(lims) == len(newgrids):
                if lims[k] is None:
                    vmin = np.nanmin(dat)
                    vmax = np.nanmax(dat)
                else:
                    vmin = lims[k][0]
                    vmax = lims[k][-1]
            else:
                vmin = np.nanmin(dat)
                vmax = np.nanmax(dat)

        # Mask out cells overlying oceans or block with a shapefile if available
        if oceanfile is None:
            dat = maskoceans(llons1, llats1, dat, resolution='h', grid=1.25, inlands=True)
        else:
            #patches = []
            if type(ocean) is PolygonSH:
                ocean = [ocean]
            for oc in ocean:
                patch = getProjectedPatch(oc, m, edgecolor="#006280", facecolor=watercolor, lw=0.5, zorder=4.)
                #x, y = m(oc.exterior.xy[0], oc.exterior.xy[1])
                #xy = zip(x, y)
                #patch = Polygon(xy, facecolor=watercolor, edgecolor="#006280", lw=0.5, zorder=4.)
                ##patches.append(Polygon(xy, facecolor=watercolor, edgecolor=watercolor, zorder=500.))
                ax.add_patch(patch)
            ##ax.add_collection(PatchCollection(patches))

        if inventory_shapefile is not None:
            for in1 in inventory:
                if 'point' in str(type(in1)):
                    x, y = in1.xy
                    x = x[0]
                    y = y[0]
                    m.scatter(x, y, c='m', s=50, latlon=True, marker='^',
                              zorder=100001)
                else:
                    x, y = m(in1.exterior.xy[0], in1.exterior.xy[1])
                    xy = list(zip(x, y))
                    patch = Polygon(xy, facecolor='none', edgecolor='k', lw=0.5, zorder=10.)
                    #patches.append(Polygon(xy, facecolor=watercolor, edgecolor=watercolor, zorder=500.))
                    ax.add_patch(patch)
        palette.set_bad(clear_color, alpha=0.0)
        # Plot it up
        dat_im = m.transform_scalar(
            np.flipud(dat), lons+0.5*gdict.dx, lats[::-1]-0.5*gdict.dy,
            np.round(300.*wid), np.round(300.*ht), returnxy=False,
            checkbounds=False, order=0, masked=True)
        if topodata is not None:  # Drape over hillshade
            #turn data into an RGBA image
            cmap = palette
            #adjust data so scaled between vmin and vmax and between 0 and 1
            dat1 = dat_im.copy()
            dat1[dat1 < vmin] = vmin
            dat1[dat1 > vmax] = vmax
            dat1 = (dat1 - vmin)/(vmax-vmin)
            rgba_img = cmap(dat1)
            maskvals = np.dstack((dat1.mask, dat1.mask, dat1.mask))
            rgb = np.squeeze(rgba_img[:, :, 0:3])
            rgb[maskvals] = 1.
            draped_hsv = ls.blend_hsv(rgb, np.expand_dims(intensity, 2))
            m.imshow(draped_hsv, zorder=3., interpolation='none')
            # This is just a dummy layer that will be deleted to make the
            # colorbar look right
            panelhandle = m.imshow(dat_im, cmap=palette, zorder=0.,
                                   vmin=vmin, vmax=vmax)
        else:
            panelhandle = m.imshow(dat_im, cmap=palette, zorder=3.,
                                   vmin=vmin, vmax=vmax, interpolation='none')
        #panelhandle = m.pcolormesh(x1, y1, dat, linewidth=0., cmap=palette, vmin=vmin, vmax=vmax, alpha=ALPHA, rasterized=True, zorder=2.);
        #panelhandle.set_edgecolors('face')
        # add colorbar
        cbfmt = '%1.1f'
        if vmax is not None and vmin is not None:
            if (vmax - vmin) < 1.:
                cbfmt = '%1.2f'
            elif vmax > 5.:  # (vmax - vmin) > len(clev):
                cbfmt = '%1.0f'

        #norm = mpl.colors.Normalize(vmin=vmin, vmax=vmax)
        if scaletype.lower() == 'binned':
            cbar = fig.colorbar(panelhandle, spacing='proportional',
                                ticks=clev, boundaries=clev, fraction=0.036,
                                pad=0.04, format=cbfmt, extend='both')
            #cbar1 = ColorbarBase(cbar.ax, cmap=palette, norm=norm, spacing='proportional', ticks=clev, boundaries=clev, fraction=0.036, pad=0.04, format=cbfmt, extend='both', extendfrac='auto')

        else:
            cbar = fig.colorbar(panelhandle, fraction=0.036, pad=0.04,
                                extend='both', format=cbfmt)
            #cbar1 = ColorbarBase(cbar.ax, cmap=palette, norm=norm, fraction=0.036, pad=0.04, extend='both', extendfrac='auto', format=cbfmt)

        if topodata is not None:
            panelhandle.remove()

        cbar.set_label(label1, fontsize=10)
        cbar.ax.tick_params(labelsize=8)

        parallels = m.drawparallels(getMapLines(bymin, bymax, 3),
                                    labels=[1, 0, 0, 0], linewidth=0.5,
                                    labelstyle='+/-', fontsize=9, xoffset=-0.8,
                                    color='gray', zorder=100.)
        m.drawmeridians(getMapLines(bxmin, bxmax, 3), labels=[0, 0, 0, 1],
                        linewidth=0.5, labelstyle='+/-', fontsize=9,
                        color='gray', zorder=100.)
        for par in parallels:
            try:
                parallels[par][1][0].set_rotation(90)
            except:
                pass

        #draw roads on the map, if they were provided to us
        if maproads is True and roadslist is not None:
            try:
                for road in roadslist:
                    try:
                        xy = list(road['geometry']['coordinates'])
                        roadx, roady = list(zip(*xy))
                        mapx, mapy = m(roadx, roady)
                        m.plot(mapx, mapy, roadcolor, lw=0.5, zorder=9)
                    except:
                        continue
            except Exception as e:
                print(('Failed to plot roads, %s' % e))

        #add city names to map
        if mapcities is True and cityfile is not None:
            try:
                fontname = 'Arial'
                fontsize = 8
                if k == 0:  # Only need to choose cities first time and then apply to rest
                    fcities = bcities.limitByMapCollision(
                        m, fontname=fontname, fontsize=fontsize)
                    ctlats, ctlons, names = fcities.getCities()
                    cxis, cyis = m(ctlons, ctlats)
                for ctlat, ctlon, cxi, cyi, name in zip(ctlats, ctlons, cxis, cyis, names):
                    m.scatter(ctlon, ctlat, c='k', latlon=True, marker='.',
                              zorder=100000)
                    ax.text(cxi, cyi, name, fontname=fontname,
                            fontsize=fontsize, zorder=100000)
            except Exception as e:
                print('Failed to plot cities, %s' % e)

        #draw star at epicenter
        plt.sca(ax)
        if edict is not None:
            elat, elon = edict['lat'], edict['lon']
            ex, ey = m(elon, elat)
            plt.plot(ex, ey, '*', markeredgecolor='k', mfc='None', mew=1.0,
                     ms=15, zorder=10000.)

        m.drawmapboundary(fill_color=watercolor)

        m.fillcontinents(color=clear_color, lake_color=watercolor)
        m.drawrivers(color=watercolor)
        ##m.drawcoastlines()

        #draw country boundaries
        m.drawcountries(color=countrycolor, linewidth=1.0)

        #add map scale
        m.drawmapscale((bxmax+bxmin)/2., (bymin+(bymax-bymin)/9.), clon, clat, np.round((((bxmax-bxmin)*111)/5)/10.)*10, barstyle='fancy', zorder=10)

        # Add border
        autoAxis = ax.axis()
        rec = Rectangle((autoAxis[0]-0.7, autoAxis[2]-0.2), (autoAxis[1]-autoAxis[0])+1, (autoAxis[3]-autoAxis[2])+0.4, fill=False, lw=1, zorder=1e8)
        rec = ax.add_patch(rec)
        rec.set_clip_on(False)

        plt.draw()

        if sref is not None:
            label2 = '%s\nsource: %s' % (label1, sref)  # '%s\n' % label1 + r'{\fontsize{10pt}{3em}\selectfont{}%s}' % sref  #
        else:
            label2 = label1
        plt.title(label2, axes=ax, fontsize=fontsizesub)

        #draw scenario watermark, if scenario
        if isScenario:
            plt.sca(ax)
            cx, cy = m(clon, clat)
            plt.text(cx, cy, 'SCENARIO', rotation=45, alpha=0.10, size=72, ha='center', va='center', color='red')

        #if ds: # Could add this to print "downsampled" on map
        #    plt.text()

        if k == 1 and rowpan == 1:
            # adjust single level plot
            axsize = ax.get_window_extent().transformed(fig.dpi_scale_trans.inverted())
            ht2 = axsize.height
            fig.set_figheight(ht2*1.6)
        else:
            plt.tight_layout()

        # Make room for suptitle - tight layout doesn't account for it
        plt.subplots_adjust(top=0.92)

    if printparam is True:
        try:
            fig = plt.gcf()
            dictionary = grids['model']['description']['parameters']
            paramstring = 'Model parameters: '
            halfway = np.ceil(len(dictionary)/2.)
            for i, key in enumerate(dictionary):
                if i == halfway and colpan == 1:
                    paramstring += '\n'
                paramstring += ('%s = %s; ' % (key, dictionary[key]))
            print(paramstring)
            fig.text(0.01, 0.015, paramstring, fontsize=fontsizesmallest)
            plt.draw()
        except:
            print('Could not display model parameters')

    if edict is not None:
        eventid = edict['eventid']
    else:
        eventid = ''

    time1 = datetime.datetime.utcnow().strftime('%d%b%Y_%H%M')
    outfile = os.path.join(outfolder, '%s_%s_%s.pdf' % (eventid, suptitle, time1))
    pngfile = os.path.join(outfolder, '%s_%s_%s.png' % (eventid, suptitle, time1))

    if savepdf is True:
        print('Saving map output to %s' % outfile)
        plt.savefig(outfile, dpi=300)
    if savepng is True:
        print('Saving map output to %s' % pngfile)
        plt.savefig(pngfile)
    if showplots is True:
        plt.show()
    else:
        plt.close(fig)

    return newgrids
Exemple #29
0
    def drawContourMap(self, imt, outfolder, cmin=None, cmax=None):
        """
        Render IMT data as contours over topography, with oceans, coastlines,
        etc.

        Args:
            outfolder (str): Path to directory where output map should be
                saved.

        Returns:
            str: Path to output IMT map.
        """
        if self.contour_colormap is None:
            raise Exception('MapMaker.setGMTColormap() has not been called.')
        t0 = time.time()
        # resample shakemap to topogrid
        # get the geodict for the topo file
        topodict = GMTGrid.getFileGeoDict(self.topofile)[0]
        # get the geodict for the ShakeMap
        comp = self.container.getComponents(imt)[0]
        imtdict = self.container.getIMTGrids(imt, comp)
        imtgrid = imtdict['mean']
        smdict = imtgrid.getGeoDict()
        # get a geodict that is aligned with topo, but inside shakemap
        sampledict = topodict.getBoundsWithin(smdict)

        imtgrid = imtgrid.interpolateToGrid(sampledict)

        gd = imtgrid.getGeoDict()

        # establish the basemap object
        m = self._setMap(gd)

        # get topo layer and project it
        topogrid = GMTGrid.load(
            self.topofile, samplegeodict=sampledict, resample=False)
        topodata = topogrid.getData().copy()
        ptopo = self._projectGrid(topodata, m, gd)

        # get contour layer and project it1
        imtdata = imtgrid.getData().copy()

        # convert units if necessary
        if imt == 'MMI':
            pass
        elif imt == 'PGV':
            imtdata = np.exp(imtdata)
        else:
            imtdata = np.exp(imtdata) * 100

        pimt = self._projectGrid(imtdata, m, gd)

        # get the draped intensity data
        hillshade = self._getShaded(ptopo)

        # draw the draped intensity data
        m.imshow(hillshade, interpolation='none', zorder=IMG_ZORDER)

        # draw the contours of imt data
        xmin = gd.xmin
        if gd.xmax < gd.xmin:
            xmin -= 360
        lons = np.linspace(xmin, gd.xmax, gd.nx)
        # backwards so it plots right side up
        lats = np.linspace(gd.ymax, gd.ymin, gd.ny)
        x, y = m(*np.meshgrid(lons, lats))
        pimt = gaussian_filter(pimt, 5.0)
        dmin = pimt.min()
        dmax = pimt.max()
        levels = self.getContourLevels(dmin, dmax, imt)
        cs = m.contour(x, y, np.flipud(pimt), colors='w',
                       cmap=None, levels=levels, zorder=CONTOUR_ZORDER)
        clabels = plt.clabel(cs, colors='k', fmt='%.1f',
                             fontsize=8.0, zorder=CONTOUR_ZORDER)
        for cl in clabels:
            bbox = dict(boxstyle="round", facecolor='white', edgecolor='w')
            cl.set_bbox(bbox)
            cl.set_zorder(CONTOUR_ZORDER)

        # draw country/state boundaries
        self._drawBoundaries(m)

        # draw lakes
        self._drawLakes(m, gd)

        # draw oceans (pre-processed with islands taken out)
        t1 = time.time()
        self._drawOceans(m, gd)
        t2 = time.time()
        self.logger.debug('%.1f seconds to render oceans.' % (t2 - t1))

        # draw coastlines
        self._drawCoastlines(m, gd)

        # draw meridians, parallels, labels, ticks
        self._drawGraticules(m, gd)

        # draw filled symbols for MMI and instrumented measures
        self._drawStations(m, fill=True, imt=imt)

        # draw map scale
        self._drawMapScale(m, gd)

        # draw fault polygon, if present
        self._drawFault(m)  # get the fault loaded

        # draw epicenter
        origin = self.fault.getOrigin()
        hlon = origin.lon
        hlat = origin.lat
        m.plot(hlon, hlat, 'k*', latlon=True, fillstyle='none',
               markersize=22, mew=1.2, zorder=EPICENTER_ZORDER)

        # draw cities
        # reduce the number of cities to those whose labels don't collide
        # set up cities
        if self.city_cols is not None:
            self.cities = self.cities.limitByBounds(
                (gd.xmin, gd.xmax, gd.ymin, gd.ymax))
            self.cities = self.cities.limitByGrid(
                nx=self.city_cols, ny=self.city_rows,
                cities_per_grid=self.cities_per_grid)
            if 'Times New Roman' in self.cities._fontlist:
                font = 'Times New Roman'
            else:
                font = 'DejaVu Sans'
            self.cities = self.cities.limitByMapCollision(m, fontname=font)
        self.cities.renderToMap(m.ax, zorder=CITIES_ZORDER)

        # draw title and supertitle
        self._drawTitle(imt)

        # save plot to file
        fileimt = oq_to_file(imt)
        plt.draw()
        outfile = os.path.join(outfolder, 'contour_%s.pdf' %
                               (fileimt))
        plt.savefig(outfile)
        tn = time.time()
        self.logger.debug('%.1f seconds to render entire map.' % (tn - t0))
        return outfile
def check_input_extents(config, shakefile=None, bounds=None):
    """Make sure all input files exist and cover the extent desired

    Args:
        config: configObj of a single model
        shakefile: path to ShakeMap grid.xml file (used for bounds). If not
            provided, bounds must be provided
        bounds: dictionary of bounds with keys: 'xmin', 'xmax', 'ymin', 'ymax'

    Returns:
        tuple containing:
            notcovered: list of files that do not cover the entire area
                defined by bounds or shakefile
            newbounds: new dictionary of bounds of subarea of original
                bounds or shakefile extent that is covered by all input files
    """
    if shakefile is None and bounds is None:
        raise Exception('Must define either a shakemap file or bounds')
    modelname = config.keys()[0]
    # Make dummy geodict to use
    if bounds is None:
        evdict = ShakeGrid.getFileGeoDict(shakefile)
    else:
        evdict = GeoDict.createDictFromBox(
            bounds['xmin'], bounds['xmax'],
            bounds['ymin'], bounds['ymax'],
            0.00001, 0.00001, inside=False)

    # Check extents of all input layers
    notcovered = []
    notcovgdicts = []
    newbounds = None
    for item, value in config[modelname]['layers'].items():
        if 'file' in value.keys():
            filelook = value['file']
            if getFileType(filelook) == 'gmt':
                tmpgd, _ = GMTGrid.getFileGeoDict(filelook)
            else:
                tmpgd, _ = GDALGrid.getFileGeoDict(filelook)
            # See if tempgd contains evdict
            contains = tmpgd.contains(evdict)
            if not contains:
                notcovered.append(filelook)
                notcovgdicts.append(tmpgd)
                # print(filelook)
    if len(notcovered) > 0:
        # Figure out what bounds COULD be run
        xmins = [gd.xmin for gd in notcovgdicts]
        xmaxs = [gd.xmax for gd in notcovgdicts]
        ymins = [gd.ymin for gd in notcovgdicts]
        ymaxs = [gd.ymax for gd in notcovgdicts]

        # Set in by a buffer of 0.05 degrees because mapio doesn't like 
        # when bounds are exactly the same for getboundswithin
        newbounds = dict(xmin=evdict.xmin + 0.05,
                         xmax=evdict.xmax - 0.05,
                         ymin=evdict.ymin + 0.05,
                         ymax=evdict.ymax - 0.05)
        # Which one is the problem?
        if evdict.xmin < np.max(xmins):
            newbounds['xmin'] = np.max(xmins) + 0.05
        if evdict.xmax > np.min(xmaxs):
            newbounds['xmax'] = np.min(xmaxs) - 0.05
        if evdict.ymin < np.max(ymins):
            newbounds['ymin'] = np.max(ymins) + 0.05
        if evdict.ymax > np.min(ymaxs):
            newbounds['ymax'] = np.min(ymaxs) - 0.05

        # See if this is a possible extent
        try:
            test = GeoDict.createDictFromBox(
                newbounds['xmin'], newbounds['xmax'],
                newbounds['ymin'], newbounds['ymax'],
                0.00001, 0.00001, inside=False)
        except BaseException:
            print('Cannot make new bounds that will work')
            newbounds = None

    return notcovered, newbounds
Exemple #31
0
def getFileGeoDict(filename,gridtype):
    if gridtype == 'gmt':
        fgeodict,tmp = GMTGrid.getFileGeoDict(filename)
    else:
        fgeodict,tmp = GDALGrid.getFileGeoDict(filename)
    return fgeodict
Exemple #32
0
    def __init__(self,
                 shakefile,
                 config,
                 uncertfile=None,
                 saveinputs=False,
                 slopefile=None,
                 slopediv=1.,
                 bounds=None,
                 numstd=1):
        """Set up the logistic model
        # ADD BOUNDS TO THIS MODEL
        :param config: configobj (config .ini file read in using configobj) defining the model and its inputs. Only one
          model should be described in each config file.
        :type config: dictionary
        :param shakefile: Full file path to shakemap.xml file for the event of interest
        :type shakefile: string
        :param uncertfile: Full file path to xml file of shakemap uncertainties
        :type uncertfile: string
        :param saveinputs: if True, saves all the input layers as Grid2D objects in addition to the model
          if false, it will just output the model
        :type saveinputs: boolean
        :param slopefile: optional file path to slopefile that will be resampled to the other input files for applying
          thresholds OVERWRITES VALUE IN CONFIG
        :type slopefile: string
        :param slopediv: number to divide slope by to get to degrees (usually will be default
          of 1.)
        :type slopediv: float
        :param numstd: number of +/- standard deviations to use if uncertainty is computed (uncertfile is not None)

        """
        mnames = getLogisticModelNames(config)
        if len(mnames) == 0:
            raise Exception(
                'No config file found or problem with config file format')
        if len(mnames) > 1:
            raise Exception(
                'Config file contains more than one model which is no longer allowed,\
                            update your config file to the newer format')
        self.model = mnames[0]
        self.config = config
        cmodel = config[self.model]
        self.modeltype = cmodel['gfetype']
        self.coeffs = validateCoefficients(cmodel)
        self.layers = validateLayers(
            cmodel)  # key = layer name, value = file name
        self.terms, timeField = validateTerms(cmodel, self.coeffs, self.layers)
        self.interpolations = validateInterpolations(cmodel, self.layers)
        self.units = validateUnits(cmodel, self.layers)
        self.gmused = [
            value for term, value in cmodel['terms'].items()
            if 'pga' in value.lower() or 'pgv' in value.lower()
            or 'mmi' in value.lower()
        ]
        self.modelrefs, self.longrefs, self.shortrefs = validateRefs(cmodel)
        self.numstd = numstd
        if cmodel['baselayer'] not in list(self.layers.keys()):
            raise Exception(
                'You must specify a base layer corresponding to one of the files in the layer section.'
            )
        self.saveinputs = saveinputs
        if slopefile is None:
            try:
                self.slopefile = cmodel['slopefile']
            except:
                print(
                    'Could not find slopefile term in config, no slope thresholds will be applied\n'
                )
                self.slopefile = None
        else:
            self.slopefile = slopefile
        self.slopediv = slopediv

        #get the geodict for the shakemap
        geodict = ShakeGrid.getFileGeoDict(shakefile, adjust='res')
        griddict, eventdict, specdict, fields, uncertainties = getHeaderData(
            shakefile)
        #YEAR = eventdict['event_timestamp'].year
        MONTH = MONTHS[(eventdict['event_timestamp'].month) - 1]
        #DAY = eventdict['event_timestamp'].day
        #HOUR = eventdict['event_timestamp'].hour

        #now find the layer that is our base layer and get the largest bounds we can guarantee not to exceed shakemap bounds
        basefile = self.layers[cmodel['baselayer']]
        ftype = getFileType(basefile)
        if ftype == 'esri':
            basegeodict, firstcol = GDALGrid.getFileGeoDict(basefile)
            sampledict = basegeodict.getBoundsWithin(geodict)
        elif ftype == 'gmt':
            basegeodict, firstcol = GMTGrid.getFileGeoDict(basefile)
            sampledict = basegeodict.getBoundsWithin(geodict)
        else:
            raise Exception(
                'All predictor variable grids must be a valid GMT or ESRI file type'
            )

        #now load the shakemap, resampling and padding if necessary
        if ShakeGrid.getFileGeoDict(shakefile, adjust='res') == sampledict:
            self.shakemap = ShakeGrid.load(shakefile, adjust='res')
            flag = 1
        else:
            self.shakemap = ShakeGrid.load(shakefile,
                                           samplegeodict=sampledict,
                                           resample=True,
                                           doPadding=True,
                                           adjust='res')
            flag = 0

        # take uncertainties into account
        if uncertfile is not None:
            try:
                if flag == 1:
                    self.uncert = ShakeGrid.load(uncertfile, adjust='res')
                else:
                    self.uncert = ShakeGrid.load(uncertfile,
                                                 samplegeodict=sampledict,
                                                 resample=True,
                                                 doPadding=True,
                                                 adjust='res')
            except:
                print(
                    'Could not read uncertainty file, ignoring uncertainties')
                self.uncert = None
        else:
            self.uncert = None

        #load the predictor layers into a dictionary
        self.layerdict = {}  # key = layer name, value = grid object
        for layername, layerfile in self.layers.items():
            if isinstance(layerfile, list):
                for lfile in layerfile:
                    if timeField == 'MONTH':
                        if lfile.find(MONTH) > -1:
                            layerfile = lfile
                            ftype = getFileType(layerfile)
                            interp = self.interpolations[layername]
                            if ftype == 'gmt':
                                if GMTGrid.getFileGeoDict(
                                        layerfile)[0] == sampledict:
                                    lyr = GMTGrid.load(layerfile)
                                else:
                                    lyr = GMTGrid.load(layerfile,
                                                       sampledict,
                                                       resample=True,
                                                       method=interp,
                                                       doPadding=True)
                            elif ftype == 'esri':
                                if GDALGrid.getFileGeoDict(
                                        layerfile)[0] == sampledict:
                                    lyr = GDALGrid.load(layerfile)
                                else:
                                    lyr = GDALGrid.load(layerfile,
                                                        sampledict,
                                                        resample=True,
                                                        method=interp,
                                                        doPadding=True)
                            else:
                                msg = 'Layer %s (file %s) does not appear to be a valid GMT or ESRI file.' % (
                                    layername, layerfile)
                                raise Exception(msg)
                            self.layerdict[layername] = lyr
            else:
                #first, figure out what kind of file we have (or is it a directory?)
                ftype = getFileType(layerfile)
                interp = self.interpolations[layername]
                if ftype == 'gmt':
                    if GMTGrid.getFileGeoDict(layerfile)[0] == sampledict:
                        lyr = GMTGrid.load(layerfile)
                    else:
                        lyr = GMTGrid.load(layerfile,
                                           sampledict,
                                           resample=True,
                                           method=interp,
                                           doPadding=True)
                elif ftype == 'esri':
                    if GDALGrid.getFileGeoDict(layerfile)[0] == sampledict:
                        lyr = GDALGrid.load(layerfile)
                    else:
                        lyr = GDALGrid.load(layerfile,
                                            sampledict,
                                            resample=True,
                                            method=interp,
                                            doPadding=True)
                else:
                    msg = 'Layer %s (file %s) does not appear to be a valid GMT or ESRI file.' % (
                        layername, layerfile)
                    raise Exception(msg)
                self.layerdict[layername] = lyr

        shapes = {}
        for layername, layer in self.layerdict.items():
            shapes[layername] = layer.getData().shape

        self.nuggets = [str(self.coeffs['b0'])]

        ckeys = list(self.terms.keys())
        ckeys.sort()
        for key in ckeys:
            term = self.terms[key]
            coeff = self.coeffs[key]
            self.nuggets.append('(%g * %s)' % (coeff, term))

        self.equation = ' + '.join(self.nuggets)

        if self.uncert is not None:
            self.nugmin = copy.copy(self.nuggets)
            self.nugmax = copy.copy(self.nuggets)
            # Find the term with the shakemap input and replace for these nuggets
            for k, nug in enumerate(self.nuggets):
                if "self.shakemap.getLayer('pga').getData()" in nug:
                    self.nugmin[k] = self.nugmin[k].replace(
                        "self.shakemap.getLayer('pga').getData()",
                        "(np.exp(np.log(self.shakemap.getLayer('pga').getData())\
                                                             - self.numstd * self.uncert.getLayer('stdpga').getData()))"
                    )
                    self.nugmax[k] = self.nugmax[k].replace(
                        "self.shakemap.getLayer('pga').getData()",
                        "(np.exp(np.log(self.shakemap.getLayer('pga').getData())\
                                                             + self.numstd * self.uncert.getLayer('stdpga').getData()))"
                    )
                elif "self.shakemap.getLayer('pgv').getData()" in nug:
                    self.nugmin[k] = self.nugmin[k].replace(
                        "self.shakemap.getLayer('pgv').getData()",
                        "(np.exp(np.log(self.shakemap.getLayer('pgv').getData())\
                                                             - self.numstd * self.uncert.getLayer('stdpgv').getData()))"
                    )
                    self.nugmax[k] = self.nugmax[k].replace(
                        "self.shakemap.getLayer('pgv').getData()",
                        "(np.exp(np.log(self.shakemap.getLayer('pgv').getData())\
                                                             + self.numstd * self.uncert.getLayer('stdpgv').getData()))"
                    )
                elif "self.shakemap.getLayer('mmi').getData()" in nug:
                    self.nugmin[k] = self.nugmin[k].replace(
                        "self.shakemap.getLayer('mmi').getData()",
                        "(np.exp(np.log(self.shakemap.getLayer('mmi').getData())\
                                                             - self.numstd * self.uncert.getLayer('stdmmi').getData()))"
                    )
                    self.nugmax[k] = self.nugmax[k].replace(
                        "self.shakemap.getLayer('mmi').getData()",
                        "(np.exp(np.log(self.shakemap.getLayer('mmi').getData())\
                                                             + self.numstd * self.uncert.getLayer('stdmmi').getData()))"
                    )
            self.equationmin = ' + '.join(self.nugmin)
            self.equationmax = ' + '.join(self.nugmax)
        else:
            self.equationmin = None
            self.equationmax = None

        self.geodict = self.shakemap.getGeoDict()

        try:
            self.slopemin = float(config[self.model]['slopemin'])
            self.slopemax = float(config[self.model]['slopemax'])
        except:
            print(
                'could not find slopemin and/or slopemax in config, no limits will be applied'
            )
            self.slopemin = 0.
            self.slopemax = 90.
def run_one_old_shakemap(eventid, topo=True, genex=True):
    """
    Convenience method for running old (v 3.5) shakemap with new estimates. This
    allows for us to generate all the products with the old code since the new
    code cannot do this yet, but use the new code for computing the ground
    motions.

    Args:
        eventid (srt): Specifies the id of the event to process.
        topo (bool): Include topography shading?
        genex (bool): Should genex be run?

    Returns:
        dictionary: Each entry is the log file for the different ShakeMap3.5
            calls.

    """
    config = ConfigObj(os.path.join(os.path.expanduser('~'), 'scenarios.conf'))
    shakehome = config['system']['shakehome']
    log = {}
    shakebin = os.path.join(shakehome, 'bin')
    datadir = os.path.join(shakehome, 'data')
    # Read event.xml
    eventdir = os.path.join(datadir, eventid)
    inputdir = os.path.join(eventdir, 'input')
    xml_file = os.path.join(inputdir, 'event.xml')
    # Read in event.xml
    event = read_event_file(xml_file)

    # Read in gmpe set name
    gmpefile = open(os.path.join(inputdir, "gmpe_set_name.txt"), "r")
    set_name = gmpefile.read()
    gmpefile.close()

    # Add scenario-specific fields:
    eventtree = ET.parse(xml_file)
    eventroot = eventtree.getroot()
    for eq in eventroot.iter('earthquake'):
        description = eq.attrib['description']
        directivity = eq.attrib['directivity']
        if 'reference' in eq.attrib.keys():
            reference = eq.attrib['reference']
        else:
            reference = ''

    event['description'] = description
    event['directivity'] = directivity
    event['reference'] = reference

    grd = os.path.join(inputdir, 'pgv_estimates.grd')
    gdict = GMTGrid.getFileGeoDict(grd)[0]

    # Tolerance is a bit hacky but necessary to prevent GMT
    # from barfing becasue it thinks that the estimates files
    # do not cover the desired area sampled by grind's call
    # with grdsample.
    tol = gdict.dx
    W = gdict.xmin + tol
    E = gdict.xmax - tol
    S = gdict.ymin + tol
    N = gdict.ymax - tol

    # Put into grind.conf (W S E N)
    confdir = os.path.join(eventdir, 'config')
    if os.path.isdir(confdir) == False:
        os.mkdir(confdir)

    # need to copy default grind.conf
    default_grind_conf = os.path.join(shakehome, 'config', 'grind.conf')
    grind_conf = os.path.join(confdir, 'grind.conf')
    shutil.copyfile(default_grind_conf, grind_conf)

    # Set strictbound and resolution to match estiamtes.grd files
    with open(grind_conf, 'a') as f:
        f.write('x_grid_interval : %.16f\n' % gdict.dx)
        f.write('y_grid_interval : %.16f\n' % gdict.dy)
        f.write('strictbound : %.9f %.9f %.9f %.9f\n' % (W, S, E, N))

    # Grind
    callgrind = os.path.join(shakebin, 'grind') + \
        ' -event ' + eventid + ' -psa'
    rc, so, se = get_command_output(callgrind)
    log['grind'] = {'rc': rc, 'so': so, 'se': se}

    # Add GMPE set name to info.json
    cmd = os.path.join(shakebin, 'edit_info') + ' -event ' + eventid + \
        ' -tag gmpe_reference' + ' -value ' + set_name
    rc, so, se = get_command_output(cmd)
    log['edit_info'] = {'rc': rc, 'so': so, 'se': se}

    # Tag
    calltag = os.path.join(shakebin, 'tag') + \
        ' -event ' + eventid + ' -name \"' + event['locstring'] + ' - ' + \
        event['description'] + '\"'
    rc, so, se = get_command_output(calltag)
    log['tag'] = {'rc': rc, 'so': so, 'se': se}

    # Copy rock_grid.xml from input to output directory
    rg_scr = os.path.join(inputdir, 'rock_grid.xml')
    rg_dst = os.path.join(eventdir, 'output', 'rock_grid.xml')
    cmd = shutil.copy(rg_scr, rg_dst)

    # Mapping
    if topo is True:
        topostr = '-itopo'
    else:
        topostr = ''
    callmapping = os.path.join(shakebin, 'mapping') + ' -event ' + \
        eventid + ' -timestamp -nohinges ' + topostr
    rc, so, se = get_command_output(callmapping)
    log['mapping'] = {'rc': rc, 'so': so, 'se': se}

    # Genex
    if genex is True:
        callgenex = os.path.join(shakebin, 'genex') + ' -event ' + \
            eventid + ' -metadata -zip -verbose -shape shape -shape hazus'
        rc, so, se = get_command_output(callgenex)
        log['genex'] = {'rc': rc, 'so': so, 'se': se}

    return log
Exemple #34
0
    def calculate(self):
        """Calculate the model

        :returns:
            a dictionary containing the model results and model inputs if saveinputs was set to
            True when class was set up, see <https://github.com/usgs/groundfailure#api-for-model-output> for a
            description of the structure of this output

        """
        X = eval(self.equation)
        P = 1 / (1 + np.exp(-X))
        if 'vs30max' in self.config[self.model].keys():
            vs30 = self.layerdict['vs30'].getData()
            P[vs30 > float(self.config[self.model]['vs30max'])] = 0.0
        if 'minpgv' in self.config[self.model].keys():
            pgv = self.shakemap.getLayer('pgv').getData()
            P[pgv < float(self.config[self.model]['minpgv'])] = 0.0
        if 'coverage' in self.config[self.model].keys():
            eqn = self.config[self.model]['coverage']['eqn']
            ind = copy.copy(P)
            P = eval(eqn)
        if self.uncert is not None:
            print(self.numstd)
            print(type(self.numstd))
            Xmin = eval(self.equationmin)
            Xmax = eval(self.equationmax)
            Pmin = 1 / (1 + np.exp(-Xmin))
            Pmax = 1 / (1 + np.exp(-Xmax))
            if 'vs30max' in self.config[self.model].keys():
                vs30 = self.layerdict['vs30'].getData()
                Pmin[vs30 > float(self.config[self.model]['vs30max'])] = 0.0
                Pmax[vs30 > float(self.config[self.model]['vs30max'])] = 0.0
            if 'minpgv' in self.config[self.model].keys():
                pgv = self.shakemap.getLayer('pgv').getData()
                Pmin[pgv < float(self.config[self.model]['minpgv'])] = 0.0
                Pmax[pgv < float(self.config[self.model]['minpgv'])] = 0.0
            if 'coverage' in self.config[self.model].keys():
                eqnmin = eqn.replace('P', 'Pmin')
                eqnmax = eqn.replace('P', 'Pmax')
                Pmin = eval(eqnmin)
                Pmax = eval(eqnmax)
        if self.slopefile is not None:
            ftype = getFileType(self.slopefile)
            sampledict = self.shakemap.getGeoDict()
            if ftype == 'gmt':
                if GMTGrid.getFileGeoDict(self.slopefile)[0] == sampledict:
                    slope = GMTGrid.load(
                        self.slopefile).getData() / self.slopediv
                else:
                    slope = GMTGrid.load(
                        self.slopefile,
                        sampledict,
                        resample=True,
                        method='linear',
                        doPadding=True).getData() / self.slopediv
                # Apply slope min/max limits
                print('applying slope thresholds')
                P[slope > self.slopemax] = 0.
                P[slope < self.slopemin] = 0.
                if self.uncert is not None:
                    Pmin[slope > self.slopemax] = 0.
                    Pmin[slope < self.slopemin] = 0.
                    Pmax[slope > self.slopemax] = 0.
                    Pmax[slope < self.slopemin] = 0.
            elif ftype == 'esri':
                if GDALGrid.getFileGeoDict(self.slopefile)[0] == sampledict:
                    slope = GDALGrid.load(
                        self.slopefile).getData() / self.slopediv
                else:
                    slope = GDALGrid.load(
                        self.slopefile,
                        sampledict,
                        resample=True,
                        method='linear',
                        doPadding=True).getData() / self.slopediv
                # Apply slope min/max limits
                print('applying slope thresholds')
                P[slope > self.slopemax] = 0.
                P[slope < self.slopemin] = 0.
                if self.uncert is not None:
                    Pmin[slope > self.slopemax] = 0.
                    Pmin[slope < self.slopemin] = 0.
                    Pmax[slope > self.slopemax] = 0.
                    Pmax[slope < self.slopemin] = 0.
            else:
                print(
                    'Slope file does not appear to be a valid GMT or ESRI file, not applying any slope thresholds.'
                    % (self.slopefile))
        else:
            print('No slope file provided, slope thresholds not applied')
        # Stuff into Grid2D object
        temp = self.shakemap.getShakeDict()
        shakedetail = '%s_ver%s' % (temp['shakemap_id'],
                                    temp['shakemap_version'])
        description = {
            'name': self.modelrefs['shortref'],
            'longref': self.modelrefs['longref'],
            'units': 'probability',
            'shakemap': shakedetail,
            'parameters': {
                'slopemin': self.slopemin,
                'slopemax': self.slopemax
            }
        }
        Pgrid = Grid2D(P, self.geodict)
        rdict = collections.OrderedDict()
        rdict['model'] = {
            'grid': Pgrid,
            'label': ('%s Probability') % (self.modeltype.capitalize()),
            'type': 'output',
            'description': description
        }
        if self.uncert is not None:
            rdict['modelmin'] = {
                'grid':
                Grid2D(Pmin, self.geodict),
                'label': ('%s Probability (-%0.1f std ground motion)') %
                (self.modeltype.capitalize(), self.numstd),
                'type':
                'output',
                'description':
                description
            }
            rdict['modelmax'] = {
                'grid':
                Grid2D(Pmax, self.geodict),
                'label': ('%s Probability (+%0.1f std ground motion)') %
                (self.modeltype.capitalize(), self.numstd),
                'type':
                'output',
                'description':
                description
            }

        if self.saveinputs is True:
            for layername, layergrid in list(self.layerdict.items()):
                units = self.units[layername]
                if units is None:
                    units = ''
                rdict[layername] = {
                    'grid': layergrid,
                    'label': '%s (%s)' % (layername, units),
                    'type': 'input',
                    'description': {
                        'units': units,
                        'shakemap': shakedetail
                    }
                }
            for gmused in self.gmused:
                if 'pga' in gmused:
                    units = '%g'
                    getkey = 'pga'
                elif 'pgv' in gmused:
                    units = 'cm/s'
                    getkey = 'pgv'
                elif 'mmi' in gmused:
                    units = 'intensity'
                    getkey = 'mmi'
                else:
                    continue
                    # Layer is derived from several input layers, skip outputting this layer
                if getkey in rdict:
                    continue
                layer = self.shakemap.getLayer(getkey)
                rdict[getkey] = {
                    'grid': layer,
                    'label': '%s (%s)' % (getkey.upper(), units),
                    'type': 'input',
                    'description': {
                        'units': units,
                        'shakemap': shakedetail
                    }
                }
                if self.uncert is not None:
                    layer1 = np.exp(
                        np.log(layer.getData()) -
                        self.uncert.getLayer('std' + getkey).getData())
                    rdict[getkey + 'modelmin'] = {
                        'grid':
                        Grid2D(layer1, self.geodict),
                        'label':
                        '%s - %0.1f std (%s)' %
                        (getkey.upper(), self.numstd, units),
                        'type':
                        'input',
                        'description': {
                            'units': units,
                            'shakemap': shakedetail
                        }
                    }
                    layer2 = np.exp(
                        np.log(layer.getData()) +
                        self.uncert.getLayer('std' + getkey).getData())
                    rdict[getkey + 'modelmax'] = {
                        'grid':
                        Grid2D(layer2, self.geodict),
                        'label':
                        '%s + %0.1f std (%s)' %
                        (getkey.upper(), self.numstd, units),
                        'type':
                        'input',
                        'description': {
                            'units': units,
                            'shakemap': shakedetail
                        }
                    }
        return rdict
Exemple #35
0
    def drawIntensityMap(self,outfolder):
        if self.intensity_colormap is None:
            raise ShakeMapException('MapMaker.setGMTColormap() has not been called.')
        t0 = time.time()
        #resample shakemap to topogrid
        #get the geodict for the topo file
        topodict = GMTGrid.getFileGeoDict(self.topofile)
        #get the geodict for the ShakeMap
        smdict = self.shakemap.getGeoDict()
        #get a geodict that is aligned with topo, but inside shakemap
        sampledict = topodict.getBoundsWithin(smdict)

        self.shakemap = self.shakemap.interpolateToGrid(sampledict)

        gd = self.shakemap.getGeoDict()

        #establish the basemap object
        m = self._setMap(gd)

        #get topo layer and project it
        topogrid = GMTGrid.load(self.topofile,samplegeodict=sampledict,resample=False)
        topodata = topogrid.getData().copy()
        ptopo = self._projectGrid(topodata,m,gd)

        #get intensity layer and project it
        imtdata = self.shakemap.getLayer(self.imt_layer).getData().copy()
        pimt = self._projectGrid(imtdata,m,gd)

        #get the draped intensity data
        draped_hsv = self._getDraped(pimt,ptopo) #where will 10.0 come from

        #draw the draped intensity data
        m.imshow(draped_hsv, interpolation='none',zorder=IMG_ZORDER);

        #draw country/state boundaries
        self._drawBoundaries(m)

        #draw whatever road data is available
        self._drawRoads(m)
        
        #draw lakes
        self._drawLakes(m,gd)

        #draw oceans (pre-processed with islands taken out)
        t1 = time.time()
        self._drawOceans(m,gd)
        t2 = time.time()
        print('%.1f seconds to render oceans.' % (t2-t1))

        #draw coastlines
        self._drawCoastlines(m,gd)

        #draw meridians, parallels, labels, ticks
        self._drawGraticules(m,gd)

        #draw map scale
        scalex = gd.xmin + (gd.xmax-gd.xmin)/5.0
        scaley = gd.ymin + (gd.ymax-gd.ymin)/10.0
        yoff = (0.007*(m.ymax-m.ymin))
        clon = (gd.xmin + gd.xmax)/2.0
        clat = (gd.ymin + gd.ymax)/2.0
        m.drawmapscale(scalex,scaley,clon,clat,length=100,barstyle='fancy',yoffset=yoff,zorder=SCALE_ZORDER)

        #draw fault polygon, if present
        self._drawFault(m) #get the fault loaded

        #draw epicenter
        hlon = self.shakemap.getEventDict()['lon']
        hlat = self.shakemap.getEventDict()['lat']
        m.plot(hlon,hlat,'k*',latlon=True,fillstyle='none',markersize=22,mew=1.2,zorder=EPICENTER_ZORDER);

        #draw cities
        #reduce the number of cities to those whose labels don't collide
        #set up cities
        if self.city_cols is not None:
            self.cities = self.cities.limitByBounds((gd.xmin,gd.xmax,gd.ymin,gd.ymax))
            self.cities = self.cities.limitByGrid(nx=self.city_cols,ny=self.city_rows,
                                                  cities_per_grid=self.cities_per_grid)
            self.cities = self.cities.limitByMapCollision(m)
        self.cities.renderToMap(m.ax,zorder=CITIES_ZORDER)

        #draw title and supertitle
        eventid = self._drawTitle()

        #draw station and macroseismic locations
        self._drawStations(m) #need stationlist object

        #save plot to file
        plt.draw()
        outfile = os.path.join(outfolder,'intensity_%s.pdf' % eventid)
        plt.savefig(outfile)
        tn = time.time()
        print('%.1f seconds to render entire map.' % (tn-t0))
        return outfile
Exemple #36
0
    def drawContourMap(self,outfolder,cmin=None,cmax=None):
        if self.contour_colormap is None:
            raise ShakeMapException('MapMaker.setGMTColormap() has not been called.')
        t0 = time.time()
        #resample shakemap to topogrid
        #get the geodict for the topo file
        topodict = GMTGrid.getFileGeoDict(self.topofile)
        #get the geodict for the ShakeMap
        smdict = self.shakemap.getGeoDict()
        #get a geodict that is aligned with topo, but inside shakemap
        sampledict = topodict.getBoundsWithin(smdict)

        self.shakemap = self.shakemap.interpolateToGrid(sampledict)

        gd = self.shakemap.getGeoDict()

        #establish the basemap object
        m = self._setMap(gd)

        #get topo layer and project it
        topogrid = GMTGrid.load(self.topofile,samplegeodict=sampledict,resample=False)
        topodata = topogrid.getData().copy()
        ptopo = self._projectGrid(topodata,m,gd)

        #get contour layer and project it1
        imtdata = self.shakemap.getLayer(self.contour_layer).getData().copy()
        pimt = self._projectGrid(imtdata,m,gd)

        #get the draped intensity data
        hillshade = self._getShaded(ptopo)

        #draw the draped intensity data
        m.imshow(hillshade, interpolation='none',zorder=IMG_ZORDER);

        #draw the contours of imt data
        xmin = gd.xmin
        if gd.xmax < gd.xmin:
            xmin -= 360
        lons = np.linspace(xmin, gd.xmax, gd.nx)
        lats = np.linspace(gd.ymax, gd.ymin, gd.ny)  # backwards so it plots right side up
        x, y = m(*np.meshgrid(lons,lats))
        pimt = gaussian_filter(pimt,5.0)
        dmin = pimt.min()
        dmax = pimt.max()
        levels = self.getContourLevels(dmin,dmax,self.contour_layer)
        cs = m.contour(x,y,np.flipud(pimt),colors='w',cmap=None,levels=levels,zorder=CONTOUR_ZORDER)
        clabels = plt.clabel(cs,colors='k',fmt='%.1f',fontsize=8.0,zorder=CONTOUR_ZORDER)
        for cl in clabels:
            bbox = dict(boxstyle="round",facecolor='white',edgecolor='w')
            cl.set_bbox(bbox)
            cl.set_zorder(CONTOUR_ZORDER)

        #draw country/state boundaries
        self._drawBoundaries(m)

        #draw lakes
        self._drawLakes(m,gd)

        #draw oceans (pre-processed with islands taken out)
        t1 = time.time()
        self._drawOceans(m,gd)
        t2 = time.time()
        print('%.1f seconds to render oceans.' % (t2-t1))

        #draw coastlines
        self._drawCoastlines(m,gd)

        #draw meridians, parallels, labels, ticks
        self._drawGraticules(m,gd)

        #draw filled symbols for MMI and instrumented measures
        self._drawStations(m,fill=True,imt=self.contour_layer)
        
        #draw map scale
        scalex = gd.xmin + (gd.xmax-gd.xmin)/5.0
        scaley = gd.ymin + (gd.ymax-gd.ymin)/10.0
        yoff = (0.007*(m.ymax-m.ymin))
        clon = (gd.xmin + gd.xmax)/2.0
        clat = (gd.ymin + gd.ymax)/2.0
        m.drawmapscale(scalex,scaley,clon,clat,length=100,barstyle='fancy',yoffset=yoff,zorder=SCALE_ZORDER)

        #draw fault polygon, if present
        self._drawFault(m) #get the fault loaded

        #draw epicenter
        hlon = self.shakemap.getEventDict()['lon']
        hlat = self.shakemap.getEventDict()['lat']
        m.plot(hlon,hlat,'k*',latlon=True,fillstyle='none',markersize=22,mew=1.2,zorder=EPICENTER_ZORDER);

        #draw cities
        #reduce the number of cities to those whose labels don't collide
        #set up cities
        if self.city_cols is not None:
            self.cities = self.cities.limitByBounds((gd.xmin,gd.xmax,gd.ymin,gd.ymax))
            self.cities = self.cities.limitByGrid(nx=self.city_cols,ny=self.city_rows,
                                                  cities_per_grid=self.cities_per_grid)
            self.cities = self.cities.limitByMapCollision(m)
        self.cities.renderToMap(m.ax,zorder=CITIES_ZORDER)

        #draw title and supertitle
        eventid = self._drawTitle(isContour=True)

        #draw whatever road data is available
        #self._drawRoads(m)
        
        #save plot to file
        plt.draw()
        outfile = os.path.join(outfolder,'contour_%s_%s.pdf' % (self.contour_layer,eventid))
        plt.savefig(outfile)
        tn = time.time()
        print('%.1f seconds to render entire map.' % (tn-t0))
        return outfile
    def __init__(self,config,shakefile,model):
        if model not in getLogisticModelNames(config):
            raise Exception('Could not find a model called "%s" in config %s.' % (model,config))
        #do everything here short of calculations - parse config, assemble eqn strings, load data.
        self.model = model
        cmodel = config['logistic_models'][model]
        self.coeffs = validateCoefficients(cmodel)
        self.layers = validateLayers(cmodel)#key = layer name, value = file name
        self.terms,timeField = validateTerms(cmodel,self.coeffs,self.layers)
        self.interpolations = validateInterpolations(cmodel,self.layers)
        self.units = validateUnits(cmodel,self.layers)

        if 'baselayer' not in cmodel:
            raise Exception('You must specify a base layer file in config.')
        if cmodel['baselayer'] not in list(self.layers.keys()):
            raise Exception('You must specify a base layer corresponding to one of the files in the layer section.')

        #get the geodict for the shakemap
        geodict = ShakeGrid.getFileGeoDict(shakefile,adjust='res')
        griddict,eventdict,specdict,fields,uncertainties = getHeaderData(shakefile)
        YEAR = eventdict['event_timestamp'].year
        MONTH = MONTHS[(eventdict['event_timestamp'].month)-1]
        DAY = eventdict['event_timestamp'].day
        HOUR = eventdict['event_timestamp'].hour

        #now find the layer that is our base layer and get the largest bounds we can guaranteed not to exceed shakemap bounds
        basefile = self.layers[cmodel['baselayer']]
        ftype = getFileType(basefile)
        if ftype == 'esri':
            basegeodict = GDALGrid.getFileGeoDict(basefile)
            sampledict = basegeodict.getBoundsWithin(geodict)
        elif ftype == 'gmt':
            basegeodict = GMTGrid.getFileGeoDict(basefile)
            sampledict = basegeodict.getBoundsWithin(geodict)
        else:
            raise Exception('All predictor variable grids must be a valid GMT or ESRI file type')

        #now load the shakemap, resampling and padding if necessary
        self.shakemap = ShakeGrid.load(shakefile,samplegeodict=sampledict,resample=True,doPadding=True,adjust='res')

        #load the predictor layers into a dictionary
        self.layerdict = {} #key = layer name, value = grid object
        for layername,layerfile in self.layers.items():
            if isinstance(layerfile,list):
                for lfile in layerfile:
                    if timeField == 'MONTH':
                        if lfile.find(MONTH) > -1:
                            layerfile = lfile
                            ftype = getFileType(layerfile)
                            interp = self.interpolations[layername]
                            if ftype == 'gmt':
                                lyr = GMTGrid.load(layerfile,sampledict,resample=True,method=interp,doPadding=True)
                            elif ftype == 'esri':
                                lyr = GDALGrid.load(layerfile,sampledict,resample=True,method=interp,doPadding=True)
                            else:
                                msg = 'Layer %s (file %s) does not appear to be a valid GMT or ESRI file.' % (layername,layerfile)
                                raise Exception(msg)
                            self.layerdict[layername] = lyr
            else:
                #first, figure out what kind of file we have (or is it a directory?)
                ftype = getFileType(layerfile)
                interp = self.interpolations[layername]
                if ftype == 'gmt':
                    lyr = GMTGrid.load(layerfile,sampledict,resample=True,method=interp,doPadding=True)
                elif ftype == 'esri':
                    lyr = GDALGrid.load(layerfile,sampledict,resample=True,method=interp,doPadding=True)
                else:
                    msg = 'Layer %s (file %s) does not appear to be a valid GMT or ESRI file.' % (layername,layerfile)
                    raise Exception(msg)
                self.layerdict[layername] = lyr

        shapes = {}
        for layername,layer in self.layerdict.items():
            shapes[layername] = layer.getData().shape

        x = 1
        self.nuggets = [str(self.coeffs['b0'])]
        ckeys = list(self.terms.keys())
        ckeys.sort()
        for key in ckeys:
            term = self.terms[key]
            coeff = self.coeffs[key]
            self.nuggets.append('(%g * %s)' % (coeff, term))

        self.equation = ' + '.join(self.nuggets)
        self.geodict = self.shakemap.getGeoDict()
Exemple #38
0
    def __init__(self, config, shakefile, model, uncertfile=None):
        """Set up the logistic model

        :param config: configobj (config .ini file read in using configobj) defining the model and its inputs
        :type config: dictionary
        :param shakefile: Full file path to shakemap.xml file for the event of interest
        :type shakefile: string
        :param model: Name of model defined in config that should be run for the event of interest
        :type model: string
        :param uncertfile:
        :type uncertfile:

        """
        if model not in getLogisticModelNames(config):
            raise Exception('Could not find a model called "%s" in config %s.' % (model, config))
        #do everything here short of calculations - parse config, assemble eqn strings, load data.

        self.model = model
        cmodel = config['logistic_models'][model]
        self.modeltype = cmodel['gfetype']
        self.coeffs = validateCoefficients(cmodel)
        self.layers = validateLayers(cmodel)  # key = layer name, value = file name
        self.terms, timeField = validateTerms(cmodel, self.coeffs, self.layers)
        self.interpolations = validateInterpolations(cmodel, self.layers)
        self.units = validateUnits(cmodel, self.layers)
        self.gmused = [value for term, value in cmodel['terms'].items() if 'pga' in value.lower() or 'pgv' in
                       value.lower() or 'mmi' in value.lower()]
        self.modelrefs, self.longrefs, self.shortrefs = validateRefs(cmodel)
        if 'baselayer' not in cmodel:
            raise Exception('You must specify a base layer file in config.')
        if cmodel['baselayer'] not in list(self.layers.keys()):
            raise Exception('You must specify a base layer corresponding to one of the files in the layer section.')

        #get the geodict for the shakemap
        geodict = ShakeGrid.getFileGeoDict(shakefile, adjust='res')
        griddict, eventdict, specdict, fields, uncertainties = getHeaderData(shakefile)
        #YEAR = eventdict['event_timestamp'].year
        MONTH = MONTHS[(eventdict['event_timestamp'].month)-1]
        #DAY = eventdict['event_timestamp'].day
        #HOUR = eventdict['event_timestamp'].hour

        #now find the layer that is our base layer and get the largest bounds we can guarantee not to exceed shakemap bounds
        basefile = self.layers[cmodel['baselayer']]
        ftype = getFileType(basefile)
        if ftype == 'esri':
            basegeodict, firstcol = GDALGrid.getFileGeoDict(basefile)
            sampledict = basegeodict.getBoundsWithin(geodict)
        elif ftype == 'gmt':
            basegeodict, firstcol = GMTGrid.getFileGeoDict(basefile)
            sampledict = basegeodict.getBoundsWithin(geodict)
        else:
            raise Exception('All predictor variable grids must be a valid GMT or ESRI file type')

        #now load the shakemap, resampling and padding if necessary
        self.shakemap = ShakeGrid.load(shakefile, samplegeodict=sampledict, resample=True, doPadding=True, adjust='res')

        # take uncertainties into account
        if uncertfile is not None:
            try:
                self.uncert = ShakeGrid.load(uncertfile, samplegeodict=sampledict, resample=True, doPadding=True,
                                             adjust='res')
            except:
                print('Could not read uncertainty file, ignoring uncertainties')
                self.uncert = None
        else:
            self.uncert = None

        #load the predictor layers into a dictionary
        self.layerdict = {}  # key = layer name, value = grid object
        for layername, layerfile in self.layers.items():
            if isinstance(layerfile, list):
                for lfile in layerfile:
                    if timeField == 'MONTH':
                        if lfile.find(MONTH) > -1:
                            layerfile = lfile
                            ftype = getFileType(layerfile)
                            interp = self.interpolations[layername]
                            if ftype == 'gmt':
                                lyr = GMTGrid.load(layerfile, sampledict, resample=True, method=interp, doPadding=True)
                            elif ftype == 'esri':
                                lyr = GDALGrid.load(layerfile, sampledict, resample=True, method=interp, doPadding=True)
                            else:
                                msg = 'Layer %s (file %s) does not appear to be a valid GMT or ESRI file.' % (layername, layerfile)
                                raise Exception(msg)
                            self.layerdict[layername] = lyr
            else:
                #first, figure out what kind of file we have (or is it a directory?)
                ftype = getFileType(layerfile)
                interp = self.interpolations[layername]
                if ftype == 'gmt':
                    lyr = GMTGrid.load(layerfile, sampledict, resample=True, method=interp, doPadding=True)
                elif ftype == 'esri':
                    lyr = GDALGrid.load(layerfile, sampledict, resample=True, method=interp, doPadding=True)
                else:
                    msg = 'Layer %s (file %s) does not appear to be a valid GMT or ESRI file.' % (layername, layerfile)
                    raise Exception(msg)
                self.layerdict[layername] = lyr

        shapes = {}
        for layername, layer in self.layerdict.items():
            shapes[layername] = layer.getData().shape

        self.nuggets = [str(self.coeffs['b0'])]

        ckeys = list(self.terms.keys())
        ckeys.sort()
        for key in ckeys:
            term = self.terms[key]
            coeff = self.coeffs[key]
            self.nuggets.append('(%g * %s)' % (coeff, term))

        self.equation = ' + '.join(self.nuggets)

        if self.uncert is not None:
            self.nugmin = copy.copy(self.nuggets)
            self.nugmax = copy.copy(self.nuggets)
            # Find the term with the shakemap input and replace for these nuggets
            for k, nug in enumerate(self.nuggets):
                if "self.shakemap.getLayer('pga').getData()" in nug:
                    self.nugmin[k] = self.nugmin[k].replace("self.shakemap.getLayer('pga').getData()", "(np.exp(np.log(self.shakemap.getLayer('pga').getData()) - self.uncert.getLayer('stdpga').getData()))")
                    self.nugmax[k] = self.nugmax[k].replace("self.shakemap.getLayer('pga').getData()", "(np.exp(np.log(self.shakemap.getLayer('pga').getData()) + self.uncert.getLayer('stdpga').getData()))")
                elif "self.layerdict['pgv'].getData()" in nug:
                    self.nugmin[k] = self.nugmin[k].replace("self.shakemap.getLayer('pgv').getData()", "(np.exp(np.log(self.shakemap.getLayer('pgv').getData()) - self.uncert.getLayer('stdpgv').getData()))")
                    self.nugmax[k] = self.nugmax[k].replace("self.shakemap.getLayer('pgv').getData()", "(np.exp(np.log(self.shakemap.getLayer('pgv').getData()) + self.uncert.getLayer('stdpgv').getData()))")
                elif "self.layerdict['mmi'].getData()" in nug:
                    self.nugmin[k] = self.nugmin[k].replace("self.shakemap.getLayer('mmi').getData()", "(np.exp(np.log(self.shakemap.getLayer('mmi').getData()) - self.uncert.getLayer('stdmmi').getData()))")
                    self.nugmax[k] = self.nugmax[k].replace("self.shakemap.getLayer('mmi').getData()", "(np.exp(np.log(self.shakemap.getLayer('mmi').getData()) + self.uncert.getLayer('stdmmi').getData()))")
            self.equationmin = ' + '.join(self.nugmin)
            self.equationmax = ' + '.join(self.nugmax)
        else:
            self.equationmin = None
            self.equationmax = None

        self.geodict = self.shakemap.getGeoDict()

        try:
            self.slopemin = float(config['logistic_models'][model]['slopemin'])
            self.slopemax = float(config['logistic_models'][model]['slopemax'])
        except:
            print('could not find slopemin and/or slopemax in config, no limits will be applied')
            self.slopemin = 0.
            self.slopemax = 90.
Exemple #39
0
def model_test_simple():
    A = 4  #ccode for afghanistan
    J = 392  #ccode for japan
    R = 1  #rural code
    U = 2  #urban code
    #create a 5x5 population data set with 1000 people in each cell
    popdata = np.ones((5, 5)) * 1000.0
    #create a mixed grid of afghanistan and japan (have very different inventory,collapse, and fatality rates.)
    isodata = np.array([[A, A, A, A, A], [A, A, A, A, A], [A, A, A, J, J],
                        [J, J, J, J, J], [J, J, J, J, J]],
                       dtype=np.int16)
    #make a mix of urban and rural cells
    urbdata = np.array([[R, R, R, R, R], [R, U, U, U, R], [R, U, U, U, U],
                        [U, U, U, R, R], [R, R, R, R, R]],
                       dtype=np.int16)
    mmidata = np.array([[6, 7, 8, 9, 6], [7, 8, 9, 6, 7], [8, 9, 6, 6, 7],
                        [8, 9, 6, 7, 8], [9, 6, 7, 8, 9]],
                       dtype=np.float32)
    homedir = os.path.dirname(
        os.path.abspath(__file__))  #where is this script?
    invfile = os.path.join(homedir, '..', 'data', 'semi_inventory.hdf')
    colfile = os.path.join(homedir, '..', 'data', 'semi_collapse_mmi.hdf')
    fatfile = os.path.join(homedir, '..', 'data', 'semi_casualty.hdf')
    workfile = os.path.join(homedir, '..', 'data', 'semi_workforce.hdf')
    growthfile = os.path.join(homedir, '..', 'data',
                              'WPP2015_POP_F02_POPULATION_GROWTH_RATE.xls')
    geodict = GeoDict({
        'xmin': 0.5,
        'xmax': 4.5,
        'ymin': 0.5,
        'ymax': 4.5,
        'dx': 1.0,
        'dy': 1.0,
        'nx': 5,
        'ny': 5
    })

    popgrid = GMTGrid(popdata, geodict)
    isogrid = GMTGrid(isodata, geodict)
    urbgrid = GMTGrid(urbdata, geodict)
    popyear = 2016
    layers = {'mmi': mmidata}
    eventdict = {
        'event_id': '1234',
        'magnitude': 7.5,
        'lat': 34.2,
        'lon': 118.2,
        'depth': 10.0,
        'event_timestamp': datetime(2016, 1, 1, 0, 0, 0),
        'event_description': 'test data',
        'event_network': 'us'
    }
    shakedict = {
        'event_id': '1234',
        'shakemap_id': '1234',
        'shakemap_version': 1,
        'code_version': '1.0',
        'process_timestamp': datetime.utcnow(),
        'shakemap_originator': 'us',
        'map_status': 'RELEASED',
        'shakemap_event_type': 'SCENARIO'
    }
    uncdict = {'mmi': (1.0, 1)}
    mmigrid = ShakeGrid(layers, geodict, eventdict, shakedict, uncdict)

    popfile = isofile = urbfile = shakefile = ''
    try:
        #make some temporary files
        f, popfile = tempfile.mkstemp()
        os.close(f)
        f, isofile = tempfile.mkstemp()
        os.close(f)
        f, urbfile = tempfile.mkstemp()
        os.close(f)
        f, shakefile = tempfile.mkstemp()
        os.close(f)

        popgrid.save(popfile)
        isogrid.save(isofile)
        urbgrid.save(urbfile)
        mmigrid.save(shakefile)

        semi = SemiEmpiricalFatality.fromDefault()
        losses, resfat, nonresfat = semi.getLosses(shakefile)
        assert losses == 85
        print('Semi-empirical model calculations appear to be done correctly.')
    except:
        print(
            'There is an error attempting to do semi-empirical loss calculations.'
        )
    finally:
        files = [popfile, isofile, urbfile, shakefile]
        for fname in files:
            if os.path.isfile(fname):
                os.remove(fname)
Exemple #40
0
def quickcut(filename,
             gdict,
             tempname=None,
             extrasamp=5.,
             method='bilinear',
             precise=True,
             cleanup=True,
             verbose=False,
             override=False):
    """
    Use gdal to trim a large global file down quickly so mapio can read it
    efficiently. (Cannot read Shakemap.xml files, must save as .bil filrst)

    Args:
        filename (str): File path to original input file (raster).
        gdict (geodict): Geodictionary to cut around and align with.
        tempname (str): File path to desired location of clipped part of
            filename.
        extrasamp (int): Number of extra cells to cut around each edge of
            geodict to have resampling buffer for future steps.
        method (str): If resampling is necessary, method to use.
        precise (bool): If true, will resample to the gdict as closely as
            possible, if False it will just roughly cut around the area of
            interest without changing resolution
        cleanup (bool): if True, delete tempname after reading it back in
        verbose (bool): if True, prints more details
        override (bool): if True, if filename extent is not fully contained by
            gdict, read in the entire file (only used for ShakeMaps)

    Returns: New grid2D layer

    Note: This function uses the subprocess approach because ``gdal.Translate``
        doesn't hang on the command until the file is created which causes
        problems in the next steps.
    """
    if gdict.xmax < gdict.xmin:
        raise Exception('quickcut: your geodict xmax is smaller than xmin')

    try:
        filegdict = GDALGrid.getFileGeoDict(filename)
    except:
        try:
            filegdict = GMTGrid.getFileGeoDict(filename)
        except:
            raise Exception('Cannot get geodict for %s' % filename)

    if tempname is None:
        tempdir = tempfile.mkdtemp()
        tempname = os.path.join(tempdir, 'junk.tif')
        deltemp = True
    else:
        tempdir = None
        deltemp = False

    # if os.path.exists(tempname):
    #     os.remove(tempname)
    #     print('Temporary file already there, removing file')

    filegdict = filegdict[0]

    # Get the right methods for mapio (method) and gdal (method2)
    if method == 'linear':
        method2 = 'bilinear'
    if method == 'nearest':
        method2 = 'near'
    if method == 'bilinear':
        method = 'linear'
        method2 = 'bilinear'
    if method == 'near':
        method = 'nearest'
        method2 = 'near'
    else:
        method2 = method

    if filegdict != gdict:
        # First cut without resampling
        tempgdict = GeoDict.createDictFromBox(gdict.xmin,
                                              gdict.xmax,
                                              gdict.ymin,
                                              gdict.ymax,
                                              filegdict.dx,
                                              filegdict.dy,
                                              inside=True)

        try:
            egdict = filegdict.getBoundsWithin(tempgdict)

            ulx = egdict.xmin - extrasamp * egdict.dx
            uly = egdict.ymax + extrasamp * egdict.dy
            lrx = egdict.xmax + (extrasamp + 1) * egdict.dx
            lry = egdict.ymin - (extrasamp + 1) * egdict.dy

            cmd = 'gdal_translate -a_srs EPSG:4326 -of GTiff -projwin %1.8f \
            %1.8f %1.8f %1.8f -r %s %s %s' % (ulx, uly, lrx, lry, method2,
                                              filename, tempname)
        except Exception as e:
            if override:
                # When ShakeMap is being loaded, sometimes they won't align
                # right because it's already cut to the area, so just load
                # the whole file
                cmd = 'gdal_translate -a_srs EPSG:4326 -of GTiff -r %s %s %s' \
                      % (method2, filename, tempname)
            else:
                raise Exception('Failed to cut layer: %s' % e)

        rc, so, se = get_command_output(cmd)
        if not rc:
            raise Exception(se.decode())
        else:
            if verbose:
                print(so.decode())

        newgrid2d = GDALGrid.load(tempname)
        if precise:
            # Resample to exact geodictionary
            newgrid2d = newgrid2d.interpolate2(gdict, method=method)
        if cleanup:
            os.remove(tempname)

        if deltemp:
            shutil.rmtree(tempdir)

    else:
        ftype = GMTGrid.getFileType(filename)
        if ftype != 'unknown':
            newgrid2d = GMTGrid.load(filename)
        elif filename.endswith('.xml'):
            newgrid2d = ShakeGrid.load(filename)
        else:
            newgrid2d = GDALGrid.load(filename)

    return newgrid2d
Exemple #41
0
    def calcMetrics(self, eventid, stations=None, labels=None, config=None,
                    streams=None, stream_label=None, rupture_file=None,
                    calc_station_metrics=True, calc_waveform_metrics=True):
        """
        Calculate waveform and/or station metrics for a set of waveforms.

        Args:
            eventid (str):
                ID of event to search for in ASDF file.
            stations (list):
                List of stations to create metrics for.
            labels (list):
                List of processing labels to create metrics for.
            config (dict):
                Configuration dictionary.
            streams (StreamCollection):
                Optional StreamCollection object to create metrics for.
            stream_label (str):
                Label to be used in the metrics path when providing a
                StreamCollection.
            rupture_file (str):
                Path pointing to the rupture file.
            calc_station_metrics (bool):
                Whether to calculate station metrics. Default is True.
            calc_waveform_metrics (bool):
                Whether to calculate waveform metrics. Default is True.
        """
        if not self.hasEvent(eventid):
            fmt = 'No event matching %s found in workspace.'
            raise KeyError(fmt % eventid)

        if streams is None:
            streams = self.getStreams(
                eventid, stations=stations, labels=labels)

        event = self.getEvent(eventid)

        # Load the rupture file
        origin = Origin({
            'id': event.id,
            'netid': '',
            'network': '',
            'lat': event.latitude,
            'lon': event.longitude,
            'depth': event.depth_km,
            'locstring': '',
            'mag': event.magnitude,
            'time': event.time
        })
        rupture = get_rupture(origin, rupture_file)

        vs30_grids = None
        if config is not None:
            if 'vs30' in config['metrics']:
                vs30_grids = config['metrics']['vs30']
                for vs30_name in vs30_grids:
                    vs30_grids[vs30_name]['grid_object'] = GMTGrid.load(
                        vs30_grids[vs30_name]['file'])

        for stream in streams:
            instrument = stream.get_id()
            logging.info('Calculating stream metrics for %s...' % instrument)

            try:
                summary = StationSummary.from_config(
                    stream, event=event, config=config,
                    calc_waveform_metrics=calc_waveform_metrics,
                    calc_station_metrics=calc_station_metrics,
                    rupture=rupture, vs30_grids=vs30_grids)
            except BaseException as pgme:
                fmt = ('Could not create stream metrics for event %s,'
                       'instrument %s: "%s"')
                logging.warning(fmt % (eventid, instrument, str(pgme)))
                continue

            if calc_waveform_metrics and stream.passed:
                xmlstr = summary.get_metric_xml()
                if stream_label is not None:
                    tag = '%s_%s' % (eventid, stream_label)
                else:
                    tag = stream.tag
                metricpath = '/'.join([
                    format_netsta(stream[0].stats),
                    format_nslit(stream[0].stats, stream.get_inst(), tag),
                ])
                self.insert_aux(xmlstr, 'WaveFormMetrics', metricpath)

            if calc_station_metrics:
                xmlstr = summary.get_station_xml()
                metricpath = '/'.join([
                    format_netsta(stream[0].stats),
                    format_nslit(stream[0].stats, stream.get_inst(), eventid)
                ])
                self.insert_aux(xmlstr, 'StationMetrics', metricpath)
Exemple #42
0
        gridtype = 'gmt'
    except Exception,error:
        try:
            fdict = GDALGrid.getFileGeoDict(gridfile)
            gridtype = 'esri'
        except:
            pass
    if gridtype is None:
        raise Exception('File "%s" does not appear to be either a GMT grid or an ESRI grid.' % gridfile)
    xmin = xmin - fdict.dx*3
    xmax = xmax + fdict.dx*3
    ymin = ymin - fdict.dy*3
    ymax = ymax + fdict.dy*3
    bounds = (xmin,xmax,ymin,ymax)
    if gridtype == 'gmt':
        fgeodict = GMTGrid.getFileGeoDict(gridfile)
    else:
        fgeodict = GDALGrid.getFileGeoDict(gridfile)
    dx,dy = (fgeodict.dx,fgeodict.dy)
    sdict = GeoDict.createDictFromBox(xmin,xmax,ymin,ymax,dx,dy)
    if gridtype == 'gmt':
        grid = GMTGrid.load(gridfile,samplegeodict=sdict,resample=False,method=method,doPadding=True)
    else:
        grid = GDALGrid.load(gridfile,samplegeodict=sdict,resample=False,method=method,doPadding=True)

    return sampleFromGrid(grid,xypoints)

def sampleFromGrid(grid,xypoints,method='nearest'):
    """
    Sample 2D grid object at each of a set of XY (decimal degrees) points.
    :param grid:
Exemple #43
0
    def calculate(self, saveinputs=False, slopefile=None, slopediv=1.):
        """Calculate the model

        :param saveinputs: if True, saves all the input layers as Grid2D objects in addition to the model
          if false, it will just output the model
        :type saveinputs: boolean
        :param slopefile: optional file path to slopefile that will be resampled to the other input files for applying thresholds
        :type slopefile: string
        :param slopediv: number to divide slope by to get to degrees (usually will be default
          of 1.)
        :type slopediv: float

        :returns:
            a dictionary containing the model results and model inputs if saveinputs was set to
            True, see <https://github.com/usgs/groundfailure#api-for-model-output> for a
            description of the structure of this output

        """
        X = eval(self.equation)
        P = 1/(1 + np.exp(-X))
        if self.uncert is not None:
            Xmin = eval(self.equationmin)
            Xmax = eval(self.equationmax)
            Pmin = 1/(1 + np.exp(-Xmin))
            Pmax = 1/(1 + np.exp(-Xmax))
        if slopefile is not None:
            ftype = getFileType(slopefile)
            sampledict = self.shakemap.getGeoDict()
            if ftype == 'gmt':
                slope = GMTGrid.load(slopefile, sampledict, resample=True, method='linear', doPadding=True).getData()/slopediv
                # Apply slope min/max limits
                print('applying slope thresholds')
                P[slope > self.slopemax] = 0.
                P[slope < self.slopemin] = 0.
                if self.uncert is not None:
                    Pmin[slope > self.slopemax] = 0.
                    Pmin[slope < self.slopemin] = 0.
                    Pmax[slope > self.slopemax] = 0.
                    Pmax[slope < self.slopemin] = 0.
            elif ftype == 'esri':
                slope = GDALGrid.load(slopefile, sampledict, resample=True, method='linear', doPadding=True).getData()/slopediv
                # Apply slope min/max limits
                print('applying slope thresholds')
                P[slope > self.slopemax] = 0.
                P[slope < self.slopemin] = 0.
                if self.uncert is not None:
                    Pmin[slope > self.slopemax] = 0.
                    Pmin[slope < self.slopemin] = 0.
                    Pmax[slope > self.slopemax] = 0.
                    Pmax[slope < self.slopemin] = 0.
            else:
                print('Slope file does not appear to be a valid GMT or ESRI file, not applying any slope thresholds.' % (slopefile))
        else:
            print('No slope file provided, slope thresholds not applied')
        # Stuff into Grid2D object
        temp = self.shakemap.getShakeDict()
        shakedetail = '%s_ver%s' % (temp['shakemap_id'], temp['shakemap_version'])
        description = {'name': self.modelrefs['shortref'], 'longref': self.modelrefs['longref'], 'units': 'probability',
                       'shakemap': shakedetail, 'parameters': {'slopemin': self.slopemin, 'slopemax': self.slopemax}}
        Pgrid = Grid2D(P, self.geodict)
        rdict = collections.OrderedDict()
        rdict['model'] = {'grid': Pgrid,
                          'label': ('%s Probability') % (self.modeltype.capitalize()),
                          'type': 'output',
                          'description': description}
        if self.uncert is not None:
            rdict['modelmin'] = {'grid': Grid2D(Pmin, self.geodict),
                                 'label': ('%s Probability (-1 std ground motion)') % (self.modeltype.capitalize()),
                                 'type': 'output',
                                 'description': description}
            rdict['modelmax'] = {'grid': Grid2D(Pmax, self.geodict),
                                 'label': ('%s Probability (+1 std ground motion)') % (self.modeltype.capitalize()),
                                 'type': 'output',
                                 'description': description}

        if saveinputs is True:
            for layername, layergrid in list(self.layerdict.items()):
                units = self.units[layername]
                rdict[layername] = {'grid': layergrid,
                                    'label': '%s (%s)' % (layername, units),
                                    'type': 'input',
                                    'description': {'units': units, 'shakemap': shakedetail}}
            for gmused in self.gmused:
                if 'pga' in gmused:
                    units = '%g'
                    getkey = 'pga'
                if 'pgv' in gmused:
                    units = 'cm/s'
                    getkey = 'pgv'
                if 'mmi' in gmused:
                    units = 'intensity'
                    getkey = 'mmi'
                layer = self.shakemap.getLayer(getkey)
                rdict[gmused] = {'grid': layer,
                                 'label': '%s (%s)' % (getkey.upper(), units),
                                 'type': 'input',
                                 'description': {'units': units, 'shakemap': shakedetail}}
                if self.uncert is not None:
                    layer1 = np.exp(np.log(layer.getData()) - self.uncert.getLayer('std'+getkey).getData())
                    rdict[gmused + '-1std'] = {'grid': Grid2D(layer1, self.geodict),
                                               'label': '%s (%s)' % (getkey.upper()+' -1 std', units),
                                               'type': 'input',
                                               'description': {'units': units, 'shakemap': shakedetail}}
                    layer2 = np.exp(np.log(layer.getData()) + self.uncert.getLayer('std'+getkey).getData())
                    rdict[gmused + '+1std'] = {'grid': Grid2D(layer2, self.geodict),
                                               'label': '%s (%s)' % (getkey.upper()+' +1 std', units),
                                               'type': 'input',
                                               'description': {'units': units, 'shakemap': shakedetail}}

        return rdict
Exemple #44
0
def hazus_liq(shakefile,
              config,
              uncertfile=None,
              saveinputs=False,
              modeltype=None,
              displmodel=None,
              probtype=None,
              bounds=None):
    """
    Method for computing the probability of liquefaction using the Hazus method
    using the Wills et al. (2015) Vs30 map of California to define the
    susceptibility classes and the Fan et al. global water table model. 
    """
    layers = config['hazus_liq_cal']['layers']
    vs30_file = layers['vs30']['file']
    wtd_file = layers['watertable']['file']
    shkgdict = ShakeGrid.getFileGeoDict(shakefile)
    fgeodict = GMTGrid.getFileGeoDict(vs30_file)[0]

    #---------------------------------------------------------------------------
    # Loading
    #---------------------------------------------------------------------------
    shakemap = ShakeGrid.load(shakefile,
                              fgeodict,
                              resample=True,
                              method='linear',
                              doPadding=True)
    PGA = shakemap.getLayer('pga').getData() / 100  # convert to g
    griddict, eventdict, specdict, fields, uncertainties = getHeaderData(
        shakefile)
    mag = eventdict['magnitude']

    # Correction factor for moment magnitudes other than M=7.5
    k_m = 0.0027 * mag**3 - 0.0267 * mag**2 - 0.2055 * mag + 2.9188

    #---------------------------------------------------------------------------
    # Susceptibility from Vs30
    #---------------------------------------------------------------------------
    vs30_grid = GMTGrid.load(vs30_file)

    vs30 = vs30_grid.getData()
    p_ml = np.zeros_like(vs30)
    a = np.zeros_like(vs30)
    b = np.zeros_like(vs30)
    for k, v in config['hazus_liq_cal']['parameters'].items():
        ind = np.where(vs30 == float(v[0]))
        if v[1] == "VH":
            p_ml[ind] = 0.25
            a[ind] = 9.09
            b[ind] = -0.82
        if v[1] == "H":
            p_ml[ind] = 0.2
            a[ind] = 7.67
            b[ind] = -0.92
        if v[1] == "M":
            p_ml[ind] = 0.1
            a[ind] = 6.67
            b[ind] = -1.0
        if v[1] == "L":
            p_ml[ind] = 0.05
            a[ind] = 5.57
            b[ind] = -1.18
        if v[1] == "VL":
            p_ml[ind] = 0.02
            a[ind] = 4.16
            b[ind] = -1.08

    # Conditional liquefaction probability for a given susceptibility category
    # at a specified PGA
    p_liq_pga = a * PGA + b
    p_liq_pga = p_liq_pga.clip(min=0, max=1)

    #---------------------------------------------------------------------------
    # Water table
    #---------------------------------------------------------------------------
    wtd_grid = GMTGrid.load(wtd_file,
                            fgeodict,
                            resample=True,
                            method=layers['watertable']['interpolation'],
                            doPadding=True)
    tmp = wtd_grid._data
    tmp = np.nan_to_num(tmp)

    # Convert to ft
    wt_ft = tmp * 3.28084

    # Correction factor for groundwater depths other than five feet
    k_w = 0.022 * wt_ft + 0.93

    #---------------------------------------------------------------------------
    # Combine to get conditional liquefaction probability
    #---------------------------------------------------------------------------
    p_liq_sc = p_liq_pga * p_ml / k_m / k_w

    #---------------------------------------------------------------------------
    # Turn output and inputs into into grids and put in maplayers dictionary
    #---------------------------------------------------------------------------
    maplayers = collections.OrderedDict()

    temp = shakemap.getShakeDict()
    shakedetail = '%s_ver%s' % (temp['shakemap_id'], temp['shakemap_version'])
    modelsref = config['hazus_liq_cal']['shortref']
    modellref = config['hazus_liq_cal']['longref']
    modeltype = 'Hazus/Wills'
    maplayers['model'] = {
        'grid': GDALGrid(p_liq_sc, fgeodict),
        'label': 'Probability',
        'type': 'output',
        'description': {
            'name': modelsref,
            'longref': modellref,
            'units': 'coverage',
            'shakemap': shakedetail,
            'parameters': {
                'modeltype': modeltype
            }
        }
    }

    if saveinputs is True:
        maplayers['pga'] = {
            'grid': GDALGrid(PGA, fgeodict),
            'label': 'PGA (g)',
            'type': 'input',
            'description': {
                'units': 'g',
                'shakemap': shakedetail
            }
        }
        maplayers['vs30'] = {
            'grid': GDALGrid(vs30, fgeodict),
            'label': 'Vs30 (m/s)',
            'type': 'input',
            'description': {
                'units': 'm/s'
            }
        }
        maplayers['wtd'] = {
            'grid': GDALGrid(wtd_grid._data, fgeodict),
            'label': 'wtd (m)',
            'type': 'input',
            'description': {
                'units': 'm'
            }
        }
    return maplayers
Exemple #45
0
def make_test_semi_model(ccode, timeofday, density, popvalue, mmi):
    """Run the semi-empirical model for a single value of input.  Intended for testing purposes.

    :param ccode:
      Two letter ISO country code ('US', 'JP', etc.) to be used to extract inventory, collapse rates, etc.
    :param timeofday:
      One of 'day','night' - used to determine residential/non-residental population distribution and casualty rates.
    :param density:
      One of semimodel.URBAN (2) or semimodel.RURAL (1).
    :param popvalue:
      Scalar population value to multiply by inventory, collapse, and fatality rates.
    :param mmi:
      MMI value used to extract collapse rates in given country code.
    :returns:
      Tuple of:
        1) Total number of fatalities
        2) Dictionary of residential fatalities per building type, per country.
        3) Dictionary of non-residential fatalities per building type, per country.
    """
    country = Country()
    cdict = country.getCountry(ccode)
    ucode = cdict['ISON']
    geodict = GeoDict({
        'xmin': 0.5,
        'xmax': 4.5,
        'ymin': 0.5,
        'ymax': 4.5,
        'dx': 1.0,
        'dy': 1.0,
        'nx': 5,
        'ny': 5
    })
    if timeofday == 'day':
        etime = datetime(2016, 1, 1, 12, 0, 0)  #noon
    elif timeofday == 'transit':
        etime = datetime(2016, 1, 1, 18, 0, 0)  #6 pm
    else:
        etime = datetime(2016, 1, 1, 0, 0, 0)  #midnight
    eventdict = {
        'event_id': '1234',
        'magnitude': 7.5,
        'lat': 0.0,
        'lon': 0.0,
        'depth': 10.0,
        'event_timestamp': etime,
        'event_description': 'test data',
        'event_network': 'us'
    }
    shakedict = {
        'event_id': '1234',
        'shakemap_id': '1234',
        'shakemap_version': 1,
        'code_version': '1.0',
        'process_timestamp': datetime.utcnow(),
        'shakemap_originator': 'us',
        'map_status': 'RELEASED',
        'shakemap_event_type': 'SCENARIO'
    }
    uncdict = {'mmi': (1.0, 1)}
    popdata = np.ones((2, 2), dtype=np.float32) * (popvalue) / 4
    isodata = np.ones((2, 2), dtype=np.int16) * ucode
    urbdata = np.ones((2, 2), dtype=np.int16) * density
    mmidata = np.ones((2, 2), dtype=np.float32) * mmi
    geodict = GeoDict({
        'xmin': 0.5,
        'xmax': 1.5,
        'ymin': 0.5,
        'ymax': 1.5,
        'dx': 1.0,
        'dy': 1.0,
        'nx': 2,
        'ny': 2
    })
    popgrid = GMTGrid(popdata, geodict)
    isogrid = GMTGrid(isodata, geodict)
    urbgrid = GMTGrid(urbdata, geodict)
    popyear = 2016
    layers = {'mmi': mmidata}
    mmigrid = ShakeGrid(layers, geodict, eventdict, shakedict, uncdict)
    popfile = isofile = urbfile = shakefile = ''
    popsum = None
    newresfat = None
    newnresfat = None
    try:
        #make some temporary files
        f, popfile = tempfile.mkstemp()
        os.close(f)
        f, isofile = tempfile.mkstemp()
        os.close(f)
        f, urbfile = tempfile.mkstemp()
        os.close(f)
        f, shakefile = tempfile.mkstemp()
        os.close(f)

        popgrid.save(popfile)
        isogrid.save(isofile)
        urbgrid.save(urbfile)
        mmigrid.save(shakefile)

        semi = SemiEmpiricalFatality.fromDefault()
        semi.setGlobalFiles(popfile, popyear, urbfile, isofile)
        t, resfat, nonresfat = semi.getLosses(shakefile)
        popsum = 0
        newresfat = {ccode: {}}
        newnonresfat = {ccode: {}}
        for key, value in resfat[ccode].items():
            if value < 1:
                value = np.floor(value)
            newresfat[ccode][key] = value / 4.0
            popsum += value / 4.0
        for key, value in nonresfat[ccode].items():
            newnonresfat[ccode][key] = value / 4.0
            if value < 1:
                value = np.floor(value)
            popsum += value / 4.0
        popsum = int(popsum)
    finally:
        files = [popfile, isofile, urbfile, shakefile]
        for fname in files:
            if os.path.isfile(fname):
                os.remove(fname)
    return (popsum, newresfat, newnonresfat)