示例#1
0
def test_grid_hdf_container():
    f,fname = tempfile.mkstemp()
    os.close(f)
    try:
        #test grid container
        container = GridHDFContainer.create(fname)

        # before we put anything in here, let's make sure we get empty lists from
        # all of the methods that are supposed to return lists of stuff.
        assert container.getGrids() == []
        
        #test grid2d
        geodict = GeoDict.createDictFromBox(-118.5,-114.5,32.1,36.7,0.01,0.02)
        nrows,ncols = geodict.ny,geodict.nx
        data = np.random.rand(nrows,ncols)
        metadata = {'name':'Gandalf',
                    'color':'white',
                    'powers':'magic'}
        grid = Grid2D(data,geodict)
        container.setGrid('testgrid',grid,metadata=metadata)
        outgrid,outmetadata = container.getGrid('testgrid')
        np.testing.assert_array_equal(outgrid.getData(),data)
        assert outgrid.getGeoDict() == geodict
        assert outmetadata == metadata

        #set another grid without compression
        geodict = GeoDict.createDictFromBox(-119.5,-115.5,32.3,37.7,0.01,0.02)
        nrows,ncols = geodict.ny,geodict.nx
        data = np.random.rand(nrows,ncols)
        metadata = {'name':'Legolas',
                    'color':'green',
                    'powers':'stealth'}
        grid2 = Grid2D(data,geodict)
        container.setGrid('testgrid2',grid2,metadata=metadata,compression=False)
        outgrid2,outmetadata2 = container.getGrid('testgrid2')
        np.testing.assert_array_equal(outgrid2.getData(),data)
        assert outgrid2.getGeoDict() == geodict
        assert outmetadata2 == metadata
        
        #test getGridNames()
        names = container.getGrids()
        assert sorted(names) == ['testgrid','testgrid2']

        #test looking for a grid that does not exist
        try:
            container.getGrid('foo')
        except LookupError as le:
            pass

        #test dropping a grid
        container.dropGrid('testgrid2')

        container.close()
        container2 = GridHDFContainer.load(fname)
        names = container2.getGrids()
        assert sorted(names) == ['testgrid']
    except:
        assert 1==2
    finally:
        os.remove(fname)
示例#2
0
def getNoDataGrid(predictors,xmin,xmax,ymin,ymax):
    txmin = xmin
    txmax = xmax
    tymin = ymin
    tymax = ymax
    mindx = 9999999999
    mindy = 9999999999
    #figure out bounds enclosing all files
    for predname,predfile in predictors.items():
        if not os.path.isfile(predfile):
            continue
        ftype = getFileType(predfile)
        if ftype == 'shapefile':
            f = fiona.open(predfile,'r')
            bxmin,bymin,bxmax,bymax = f.bounds
            f.close()
            if bxmin < txmin:
                txmin = bxmin
            if bxmax > txmax:
                txmax = bxmax
            if bymin < tymin:
                tymin = bymin
            if bymax > tymax:
                tymax = bymax
        elif ftype == 'grid':
            gridtype = getGridType(predfile)
            if gridtype is None:
                raise Exception('File "%s" does not appear to be either a GMT grid or an ESRI grid.' % gridfile)
            fdict = getFileGeoDict(predfile,gridtype)
            if fdict.dx < mindx:
                mindx = fdict.dx
            if fdict.dy < mindy:
                mindy = fdict.dy
            if fdict.xmin < txmin:
                txmin = fdict.xmin
            if fdict.xmax > txmax:
                txmax = txmax
            if fdict.ymin < tymin:
                tymin = tymin
            if fdict.ymax > tymax:
                tymax = tymax
    sdict = GeoDict.createDictFromBox(txmin,txmax,tymin,tymax,mindx,mindy)
    nanarray = np.zeros((sdict.ny,sdict.nx),dtype=np.int8)
    for predname,predfile in predictors.items():
        if not os.path.isfile(predfile):
            continue
        ftype = getFileType(predfile)
        if ftype == 'shapefile':
            shapes = list(fiona.open(predfile,'r'))
            grid = Grid2D.rasterizeFromGeometry(shapes,sdict)
        else:
            gridtype = getGridType(predfile)
            if gridtype == 'gmt':
                grid = GMTGrid.load(predfile,samplegeodict=sdict,resample=True,method='nearest',doPadding=True)
            else:
                grid = GDALGrid.load(predfile,samplegeodict=sdict,resample=True,method='nearest',doPadding=True)
        nangrid = np.isnan(grid.getData())
        nanarray = nanarray | nangrid
    nangrid = Grid2D(data=nanarray,geodict=sdict)
    return nangrid
示例#3
0
def sampleGridFile(gridfile, xypoints, method='nearest'):
    """Sample grid file (ESRI or GMT format) at each of a set of XY (decimal degrees) points.

    :param gridfile:
      Name of ESRI or GMT grid format file from which to sample values.
    :param xypoints:
      2D numpy array of XY points, decimal degrees.
    :param method:
      Interpolation method, either 'nearest' or 'linear'.
    :returns:
      1D numpy array of grid values at each of input XY points.
    """
    xmin = np.min(xypoints[:, 0])
    xmax = np.max(xypoints[:, 0])
    ymin = np.min(xypoints[:, 1])
    ymax = np.max(xypoints[:, 1])
    gridtype = None
    try:
        fdict = GMTGrid.getFileGeoDict(gridfile)
        gridtype = 'gmt'
    except Exception as error:
        try:
            fdict = GDALGrid.getFileGeoDict(gridfile)
            gridtype = 'esri'
        except:
            pass
    if gridtype is None:
        raise Exception(
            'File "%s" does not appear to be either a GMT grid or an ESRI grid.'
            % gridfile)
    xmin = xmin - fdict.dx * 3
    xmax = xmax + fdict.dx * 3
    ymin = ymin - fdict.dy * 3
    ymax = ymax + fdict.dy * 3
    #bounds = (xmin, xmax, ymin, ymax)
    if gridtype == 'gmt':
        fgeodict = GMTGrid.getFileGeoDict(gridfile)
    else:
        fgeodict = GDALGrid.getFileGeoDict(gridfile)
    dx, dy = (fgeodict.dx, fgeodict.dy)
    sdict = GeoDict.createDictFromBox(xmin, xmax, ymin, ymax, dx, dy)
    if gridtype == 'gmt':
        grid = GMTGrid.load(gridfile,
                            samplegeodict=sdict,
                            resample=False,
                            method=method,
                            doPadding=True)
    else:
        grid = GDALGrid.load(gridfile,
                             samplegeodict=sdict,
                             resample=False,
                             method=method,
                             doPadding=True)

    return sampleFromGrid(grid, xypoints)
示例#4
0
    def createFromBounds(cls,
                         xmin,
                         xmax,
                         ymin,
                         ymax,
                         dx,
                         dy,
                         defaultVs30=686.0,
                         vs30File=None,
                         vs30measured_grid=None,
                         backarc=False,
                         padding=False,
                         resample=False):
        """Create a Sites object by defining a center point, resolution, extent, and Vs30 values.

        :param xmin:
          X coordinate of left edge of bounds.
        :param xmax:
          X coordinate of right edge of bounds.
        :param ymin:
          Y coordinate of bottom edge of bounds.
        :param ymax:
          Y coordinate of top edge of bounds.
        :param dx:
          Resolution of desired grid in X direction.
        :param dy:
          Resolution of desired grid in Y direction.
        :param defaultVs30:
          Default Vs30 value to use if vs30File not specified.
        :param vs30File:
          Name of GMT or GDAL format grid file containing Vs30 values.
        :param vs30measured_grid:
          Boolean grid indicating whether Vs30 values were measured or derived (i.e., from slope)
        :param backarc:
          Boolean indicating whether event is on the backarc as defined here: 
          http://earthquake.usgs.gov/learn/glossary/?term=backarc
        :param padding:
          Boolean indicating whether or not to pad resulting Vs30 grid out to edges of input
          bounds.  If False, grid will be clipped to the extent of the input file.
        :param resample:
          Boolean indicating whether or not the grid should be resampled.
        """
        geodict = GeoDict.createDictFromBox(xmin, xmax, ymin, ymax, dx, dy)
        if vs30File is not None:
            vs30grid = cls._create(geodict, defaultVs30, vs30File, padding,
                                   resample)
        else:
            griddata = np.ones(
                (geodict.ny, geodict.nx), dtype=np.float64) * defaultVs30
            vs30grid = Grid2D(griddata, geodict)
        return cls(vs30grid,
                   vs30measured_grid=vs30measured_grid,
                   backarc=backarc,
                   defaultVs30=defaultVs30)
示例#5
0
def test_interpolate():
    geodict = GeoDict({'xmin': 0.5, 'xmax': 6.5, 'ymin': 1.5,
                       'ymax': 6.5, 'dx': 1.0, 'dy': 1.0, 'ny': 6, 'nx': 7})
    data = np.arange(14, 56).reshape(6, 7)

    for method in ['nearest', 'linear', 'cubic']:
        print('Testing interpolate with method "%s"...' % method)
        grid = Grid2D(data, geodict)
        sampledict = GeoDict({'xmin': 3.0, 'xmax': 4.0,
                              'ymin': 3.0, 'ymax': 4.0,
                              'dx': 1.0, 'dy': 1.0,
                              'ny': 2, 'nx': 2})
        grid = grid.interpolateToGrid(sampledict, method=method)
        tgrid = grid.interpolate2(sampledict, method=method)
        if method == 'nearest':
            output = np.array([[30.0, 31.0], [37.0, 38.0]])
        elif method == 'linear':
            output = np.array([[34., 35.], [41., 42.]])
        elif method == 'cubic':
            output = np.array([[34., 35.], [41., 42.]])
        else:
            pass
        np.testing.assert_almost_equal(grid.getData(), output)
        print('Passed interpolate with method "%s".' % method)
        np.testing.assert_almost_equal(tgrid.getData(), output)
        print('Passed interpolate2 with method "%s".' % method)

    # speed test of interpolateToGrid and interpolate2
    geodict = GeoDict.createDictFromBox(0, 10, 0, 10, 0.01, 0.01)
    data = np.random.rand(geodict.ny, geodict.nx)
    grid = Grid2D(data, geodict)
    sampledict = GeoDict.createDictFromBox(2, 8, 2, 8, 0.098, 0.098)
    t1 = time.time()
    grid2 = grid.interpolateToGrid(sampledict, method='linear')
    t2 = time.time()
    grid3 = grid.interpolate2(sampledict, method='linear')
    t3 = time.time()
    # np.testing.assert_almost_equal(grid2._data.sum(),grid3._data.sum())
    print('scipy method: %.3f seconds' % (t2-t1))
    print('gdal  method: %.3f seconds' % (t3-t2))
示例#6
0
def big_test():
    xmin = -180
    xmax = -170
    ymin = 30
    ymax = 40
    dx = 0.0083
    dy = 0.0083
    gd = GeoDict.createDictFromBox(xmin, xmax, ymin, ymax, dx, dy)
    data = np.random.rand(gd.ny, gd.nx)
    grid = Grid2D(data, gd)
    fname = os.path.join(os.path.expanduser('~'), 'tempfile.grd')
    write(grid, fname, 'hdf')
    print(fname)
    src = rasterio.open(fname, 'r')
示例#7
0
def test_cut():
    geodict = GeoDict({'xmin': 0.5, 'xmax': 4.5, 'ymin': 0.5,
                       'ymax': 4.5, 'dx': 1.0, 'dy': 1.0, 'ny': 5, 'nx': 5})
    data = np.arange(0, 25).reshape(5, 5)

    print('Testing data extraction...')
    grid = Grid2D(data, geodict)
    xmin, xmax, ymin, ymax = (2.5, 3.5, 2.5, 3.5)
    newgrid = grid.cut(xmin, xmax, ymin, ymax)
    output = np.array([[7, 8], [12, 13]])
    np.testing.assert_almost_equal(newgrid.getData(), output)
    print('Passed data extraction...')

    print('Testing data trimming with resampling...')
    # make a more complicated test using getboundswithin
    data = np.arange(0, 84).reshape(7, 12)
    geodict = GeoDict({'xmin': -180, 'xmax': 150,
                       'ymin': -90, 'ymax': 90,
                       'dx': 30, 'dy': 30,
                       'nx': 12, 'ny': 7})
    grid = Grid2D(data, geodict)
    sampledict = GeoDict.createDictFromBox(-75,
                                           45, -45, 75, geodict.dx, geodict.dy)
    cutdict = geodict.getBoundsWithin(sampledict)
    newgrid = grid.cut(cutdict.xmin, cutdict.xmax, cutdict.ymin, cutdict.ymax)
    output = np.array([[16, 17, 18, 19],
                       [28, 29, 30, 31],
                       [40, 41, 42, 43],
                       [52, 53, 54, 55]])
    np.testing.assert_almost_equal(newgrid.getData(), output)
    print('Passed data trimming with resampling...')

    print('Test cut with self-alignment...')
    geodict = GeoDict({'xmin': 0.5, 'xmax': 4.5,
                       'ymin': 0.5, 'ymax': 6.5,
                       'dx': 1.0, 'dy': 1.0,
                       'nx': 5, 'ny': 7})
    data = np.arange(0, 35).astype(np.float32).reshape(7, 5)
    grid = Grid2D(data, geodict)
    cutxmin = 1.7
    cutxmax = 3.7
    cutymin = 1.7
    cutymax = 5.7
    cutgrid = grid.cut(cutxmin, cutxmax, cutymin, cutymax, align=True)
    output = np.array([[7, 8],
                       [12, 13],
                       [17, 18],
                       [22, 23]])
    np.testing.assert_almost_equal(cutgrid.getData(), output)
    print('Passed cut with self-alignment.')
示例#8
0
def big_test():
    xmin = -180
    xmax = -170
    ymin = 30
    ymax = 40
    dx = 0.0083
    dy = 0.0083
    gd = GeoDict.createDictFromBox(xmin, xmax, ymin, ymax, dx, dy)
    data = np.random.rand(gd.ny, gd.nx)
    grid = Grid2D(data, gd)
    fname = os.path.join(os.path.expanduser('~'), 'tempfile.grd')
    write(grid, fname, 'hdf')
    print(fname)
    src = rasterio.open(fname, 'r')
示例#9
0
def sampleGridFile(gridfile,xypoints,method='nearest'):
    """
    Sample grid file (ESRI or GMT format) at each of a set of XY (decimal degrees) points.
    :param gridfile:
      Name of ESRI or GMT grid format file from which to sample values.
    :param xypoints:
      2D numpy array of XY points, decimal degrees.
    :param method:
      Interpolation method, either 'nearest' or 'linear'.
    :returns:
      1D numpy array of grid values at each of input XY points.
    """
    if not len(xypoints):
        return np.array([])
    xmin = np.min(xypoints[:,0])
    xmax = np.max(xypoints[:,0])
    ymin = np.min(xypoints[:,1])
    ymax = np.max(xypoints[:,1])
    gridtype = None
    try:
        fdict,tmp = GMTGrid.getFileGeoDict(gridfile)
        gridtype = 'gmt'
    except Exception as error:
        try:
            fdict,tmp = GDALGrid.getFileGeoDict(gridfile)
            gridtype = 'esri'
        except:
            pass
    if gridtype is None:
        raise Exception('File "%s" does not appear to be either a GMT grid or an ESRI grid.' % gridfile)
    xmin = xmin - fdict.dx*3
    xmax = xmax + fdict.dx*3
    ymin = ymin - fdict.dy*3
    ymax = ymax + fdict.dy*3
    bounds = (xmin,xmax,ymin,ymax)
    if gridtype == 'gmt':
        fgeodict,tmp = GMTGrid.getFileGeoDict(gridfile)
    else:
        fgeodict,tmp = GDALGrid.getFileGeoDict(gridfile)
    dx,dy = (fgeodict.dx,fgeodict.dy)
    sdict = GeoDict.createDictFromBox(xmin,xmax,ymin,ymax,dx,dy)
    if gridtype == 'gmt':
        grid = GMTGrid.load(gridfile,samplegeodict=sdict,resample=True,method=method,doPadding=True)
    else:
        grid = GDALGrid.load(gridfile,samplegeodict=sdict,resample=True,method=method,doPadding=True)

    return sampleFromGrid(grid,xypoints)
示例#10
0
    def fromBounds(cls, xmin, xmax, ymin, ymax, dx, dy, defaultVs30=686.0,
                         vs30File=None, vs30measured_grid=None,
                         backarc=None, padding=False, resample=False):
        """
        Create a Sites object by defining a center point, resolution, extent, 
        and Vs30 values.

        :param xmin:
            X coordinate of left edge of bounds.
        :param xmax:
            X coordinate of right edge of bounds.
        :param ymin:
            Y coordinate of bottom edge of bounds.
        :param ymax:
            Y coordinate of top edge of bounds.
        :param dx:
            Resolution of desired grid in X direction.
        :param dy:
            Resolution of desired grid in Y direction.
        :param defaultVs30:
            Default Vs30 value to use if vs30File not specified.
        :param vs30File:
            Name of GMT or GDAL format grid file containing Vs30 values.
        :param vs30measured_grid:
            Boolean grid indicating whether Vs30 values were measured or derived 
            (i.e., from slope)
        :param backarc:
            Boolean array indicating whether site is in the subduction 
            `backarc <http://earthquake.usgs.gov/learn/glossary/?term=backarc>`__.
        :param padding:
            Boolean indicating whether or not to pad resulting Vs30 grid out to
            edges of input bounds. If False, grid will be clipped to the extent
            of the input file.
        :param resample:
            Boolean indicating whether or not the grid should be resampled.
        """
        geodict = GeoDict.createDictFromBox(xmin, xmax, ymin, ymax, dx, dy)
        if vs30File is not None:
            vs30grid = cls._create(geodict, defaultVs30,
                                   vs30File, padding, resample)
        else:
            griddata = np.ones((geodict.ny, geodict.nx),
                               dtype=np.float64) * defaultVs30
            vs30grid = Grid2D(griddata, geodict)
        return cls(vs30grid, vs30measured_grid=vs30measured_grid,
                   backarc=backarc, defaultVs30=defaultVs30)
示例#11
0
def test_mapmaker_contour():
    homedir = os.path.dirname(os.path.abspath(
        __file__))  # where is this script?
    shakedir = os.path.abspath(os.path.join(homedir, '..', '..', '..'))
    out_file = os.path.join(shakedir, 'tests', 'data',
                            'containers', 'northridge',
                            'shake_result.hdf')
    container = ShakeMapOutputContainer.load(out_file)
    topofile = os.path.join(homedir, '..', '..', 'data', 'install', 'data',
                            'mapping', 'CA_topo.grd')

    info = container.getMetadata()
    xmin = info['output']['map_information']['min']['longitude']
    xmax = info['output']['map_information']['max']['longitude']
    ymin = info['output']['map_information']['min']['latitude']
    ymax = info['output']['map_information']['max']['latitude']
    xmin = float(xmin) - 0.1
    xmax = float(xmax) + 0.1
    ymin = float(ymin) - 0.1
    ymax = float(ymax) + 0.1
    dy = float(info['output']['map_information']
               ['grid_spacing']['latitude'])
    dx = float(info['output']['map_information']
               ['grid_spacing']['longitude'])
    sampledict = GeoDict.createDictFromBox(xmin, xmax, ymin, ymax, dx, dy)
    topogrid = GMTGrid.load(topofile,
                            samplegeodict=sampledict,
                            resample=False)

    oceanfile = os.path.join(homedir, '..', '..', 'data', 'install', 'data',
                             'mapping', 'northridge_ocean.json')
    outpath = mkdtemp()
    filter_size = 10
    try:
        pdf, png = draw_contour(container, 'PGA', topogrid, oceanfile,
                                outpath, 'NEIC', filter_size)
        print(pdf)
    except Exception:
        assert 1 == 2
    finally:
        shutil.rmtree(outpath)
示例#12
0
def read_user_file_test(fname, xmin, xmax, ymin, ymax):
    gd = get_file_geodict(fname)
    sample = GeoDict.createDictFromBox(xmin, xmax, ymin, ymax, gd.dx, gd.dy)
    t1 = time.time()
    grid = read(fname, samplegeodict=sample)
    t2 = time.time()
    nrows, ncols = grid._data.shape
    npixels = nrows*ncols
    print('%.2f seconds to read %i pixels using h5py' % (t2-t1, npixels))

    west, east, south, north = (-105.00416666665,
                                -102.98750000804999,
                                34.98750000805,
                                37.00416666665)
    src = rasterio.open(fname, 'r')
    window = src.window(west, south, east, north)
    t1 = time.time()
    data = src.read(window=window)
    t2 = time.time()
    print('%.2f seconds to read %i pixels using rasterio' % (t2-t1, npixels))
    ratio = grid._data.sum()/data.sum()
    print('Ratio of h5py data to rasterio data is %.4f' % ratio)
    src.close()
示例#13
0
def test_shapes():
    gd = GeoDict.createDictFromBox(100.0, 102.0, 32.0, 34.0, 0.08, 0.08)

    #pass in scalar values
    inrow, incol = (10, 10)
    lat, lon = gd.getLatLon(inrow, incol)  #should get scalar results
    assert np.isscalar(lat) and np.isscalar(lon)

    #pass in array values
    inrow = np.array([10, 11, 12])
    incol = np.array([10, 11, 12])
    lat, lon = gd.getLatLon(inrow, incol)  #should get array results
    c1 = isinstance(lat, np.ndarray) and lat.shape == inrow.shape
    c2 = isinstance(lon, np.ndarray) and lon.shape == incol.shape
    assert c1 and c2

    #this should fail, because inputs are un-dimensioned numpy arrays
    inrow = np.array(10)
    incol = np.array(10)
    try:
        lat, lon = gd.getLatLon(inrow, incol)  #should get array results
        assert 1 == 0  #this should never happen
    except DataSetException as dse:
        pass
示例#14
0
def test_shapes():
    gd = GeoDict.createDictFromBox(100.0, 102.0, 32.0, 34.0, 0.08, 0.08)

    # pass in scalar values
    inrow, incol = (10, 10)
    lat, lon = gd.getLatLon(inrow, incol)  # should get scalar results
    assert np.isscalar(lat) and np.isscalar(lon)

    # pass in array values
    inrow = np.array([10, 11, 12])
    incol = np.array([10, 11, 12])
    lat, lon = gd.getLatLon(inrow, incol)  # should get array results
    c1 = isinstance(lat, np.ndarray) and lat.shape == inrow.shape
    c2 = isinstance(lon, np.ndarray) and lon.shape == incol.shape
    assert c1 and c2

    # this should fail, because inputs are un-dimensioned numpy arrays
    inrow = np.array(10)
    incol = np.array(10)
    try:
        lat, lon = gd.getLatLon(inrow, incol)  # should get array results
        assert 1 == 0  # this should never happen
    except DataSetException as dse:
        pass
示例#15
0
    def execute(self):
        """
        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)
        if container.getDataType() != 'grid':
            raise NotImplementedError('mapping module can only operate on '
                                      'gridded data, not sets of points')

        # get the path to the products.conf file, load the config
        config_file = os.path.join(install_path, 'config', 'products.conf')
        spec_file = get_configspec('products')
        validator = get_custom_validator()
        config = ConfigObj(config_file, configspec=spec_file)
        results = config.validate(validator)
        check_extra_values(config, self.logger)
        if not isinstance(results, bool) or not results:
            config_error(config, results)

        # create contour files
        self.logger.debug('Mapping...')

        # get the filter size from the products.conf
        filter_size = config['products']['contour']['filter_size']

        # get the operator setting from config
        operator = config['products']['mapping']['operator']

        # get all of the pieces needed for the mapping functions
        layers = config['products']['mapping']['layers']
        if 'topography' in layers and layers['topography'] != '':
            topofile = layers['topography']
        else:
            topofile = None
        if 'roads' in layers and layers['roads'] != '':
            roadfile = layers['roads']
        else:
            roadfile = None
        if 'faults' in layers and layers['faults'] != '':
            faultfile = layers['faults']
        else:
            faultfile = None

        # Get the number of parallel workers
        max_workers = config['products']['mapping']['max_workers']

        # Reading HDF5 files currently takes a long time, due to poor
        # programming in MapIO.  To save us some time until that issue is
        # resolved, we'll coarsely subset the topo grid once here and pass
        # it into both mapping functions
        # get the bounds of the map
        info = container.getMetadata()
        xmin = info['output']['map_information']['min']['longitude']
        xmax = info['output']['map_information']['max']['longitude']
        ymin = info['output']['map_information']['min']['latitude']
        ymax = info['output']['map_information']['max']['latitude']
        dy = float(
            info['output']['map_information']['grid_spacing']['latitude'])
        dx = float(
            info['output']['map_information']['grid_spacing']['longitude'])
        padx = 5 * dx
        pady = 5 * dy
        sxmin = float(xmin) - padx
        sxmax = float(xmax) + padx
        symin = float(ymin) - pady
        symax = float(ymax) + pady

        sampledict = GeoDict.createDictFromBox(sxmin, sxmax, symin, symax, dx,
                                               dy)
        if topofile:
            topogrid = read(topofile, samplegeodict=sampledict, resample=False)
        else:
            tdata = np.full([sampledict.ny, sampledict.nx], 0.0)
            topogrid = Grid2D(data=tdata, geodict=sampledict)

        model_config = container.getConfig()

        imtlist = container.getIMTs()

        textfile = os.path.join(
            get_data_path(), 'mapping',
            'map_strings.' + config['products']['mapping']['language'])
        text_dict = get_text_strings(textfile)
        if config['products']['mapping']['fontfamily'] != '':
            matplotlib.rcParams['font.family'] = \
                config['products']['mapping']['fontfamily']
            matplotlib.rcParams['axes.unicode_minus'] = False

        allcities = Cities.fromDefault()
        states_provs = None
        countries = None
        oceans = None
        lakes = None
        extent = (float(xmin), float(ymin), float(xmax), float(ymax))
        if 'CALLED_FROM_PYTEST' not in os.environ:
            states_provs = cfeature.NaturalEarthFeature(
                category='cultural',
                name='admin_1_states_provinces_lines',
                scale='10m',
                facecolor='none')
            states_provs = list(states_provs.intersecting_geometries(extent))
            if len(states_provs) > 300:
                states_provs = None
            else:
                states_provs = cfeature.NaturalEarthFeature(
                    category='cultural',
                    name='admin_1_states_provinces_lines',
                    scale='10m',
                    facecolor='none')

            countries = cfeature.NaturalEarthFeature(category='cultural',
                                                     name='admin_0_countries',
                                                     scale='10m',
                                                     facecolor='none')

            oceans = cfeature.NaturalEarthFeature(category='physical',
                                                  name='ocean',
                                                  scale='10m',
                                                  facecolor=WATERCOLOR)

            lakes = cfeature.NaturalEarthFeature(category='physical',
                                                 name='lakes',
                                                 scale='10m',
                                                 facecolor=WATERCOLOR)

        if faultfile is not None:
            faults = ShapelyFeature(Reader(faultfile).geometries(),
                                    ccrs.PlateCarree(),
                                    facecolor='none')
        else:
            faults = None

        if roadfile is not None:
            roads = ShapelyFeature(Reader(roadfile).geometries(),
                                   ccrs.PlateCarree(),
                                   facecolor='none')
            if len(list(roads.intersecting_geometries(extent))) > 200:
                roads = None
            else:
                roads = ShapelyFeature(Reader(roadfile).geometries(),
                                       ccrs.PlateCarree(),
                                       facecolor='none')
        else:
            roads = None

        alist = []
        for imtype in imtlist:
            component, imtype = imtype.split('/')
            comp = container.getComponents(imtype)[0]
            d = {
                'imtype': imtype,
                'topogrid': topogrid,
                'allcities': allcities,
                'states_provinces': states_provs,
                'countries': countries,
                'oceans': oceans,
                'lakes': lakes,
                'roads': roads,
                'faults': faults,
                'datadir': datadir,
                'operator': operator,
                'filter_size': filter_size,
                'info': info,
                'component': comp,
                'imtdict': container.getIMTGrids(imtype, comp),
                'ruptdict': copy.deepcopy(container.getRuptureDict()),
                'stationdict': container.getStationDict(),
                'config': model_config,
                'tdict': text_dict
            }
            alist.append(d)
            if imtype == 'MMI':
                g = copy.deepcopy(d)
                g['imtype'] = 'thumbnail'
                alist.append(g)
                h = copy.deepcopy(d)
                h['imtype'] = 'overlay'
                alist.append(h)
                self.contents.addFile('intensityMap', 'Intensity Map',
                                      'Map of macroseismic intensity.',
                                      'intensity.jpg', 'image/jpeg')
                self.contents.addFile('intensityMap', 'Intensity Map',
                                      'Map of macroseismic intensity.',
                                      'intensity.pdf', 'application/pdf')
                self.contents.addFile('intensityThumbnail',
                                      'Intensity Thumbnail',
                                      'Thumbnail of intensity map.',
                                      'pin-thumbnail.png', 'image/png')
                self.contents.addFile(
                    'intensityOverlay', 'Intensity Overlay and World File',
                    'Macroseismic intensity rendered as a '
                    'PNG overlay and associated world file',
                    'intensity_overlay.png', 'image/png')
                self.contents.addFile(
                    'intensityOverlay', 'Intensity Overlay and World File',
                    'Macroseismic intensity rendered as a '
                    'PNG overlay and associated world file',
                    'intensity_overlay.pngw', 'text/plain')
            else:
                fileimt = oq_to_file(imtype)
                self.contents.addFile(fileimt + 'Map',
                                      fileimt.upper() + ' Map',
                                      'Map of ' + imtype + '.',
                                      fileimt + '.jpg', 'image/jpeg')
                self.contents.addFile(fileimt + 'Map',
                                      fileimt.upper() + ' Map',
                                      'Map of ' + imtype + '.',
                                      fileimt + '.pdf', 'application/pdf')

        if max_workers > 0:
            with cf.ProcessPoolExecutor(max_workers=max_workers) as ex:
                results = ex.map(make_map, alist)
                list(results)
        else:
            for adict in alist:
                make_map(adict)

        container.close()
示例#16
0
def test_rupture_depth(interactive=False):
    DIP = 17.0
    WIDTH = 20.0
    GRIDRES = 0.1

    names = ['single', 'double', 'triple',
             'concave', 'concave_simple', 'ANrvSA']
    means = [3.1554422780092461, 2.9224454569459781,
             3.0381968625073563, 2.0522694624400271,
             2.4805390352818755, 2.8740121776209673]
    stds = [2.1895293825074575, 2.0506459673526174,
            2.0244588429154402, 2.0112565876976416,
            2.1599789955270019, 1.6156220309120068]
    xp0list = [np.array([118.3]),
               np.array([10.1, 10.1]),
               np.array([10.1, 10.1, 10.3]),
               np.array([10.9, 10.5, 10.9]),
               np.array([10.9, 10.6]),
               np.array([-76.483, -76.626, -76.757, -76.99, -77.024, -76.925,
                         -76.65, -76.321, -75.997, -75.958])]
    xp1list = [np.array([118.3]),
               np.array([10.1, 10.3]),
               np.array([10.1, 10.3, 10.1]),
               np.array([10.5, 10.9, 11.3]),
               np.array([10.6, 10.9]),
               np.array([-76.626, -76.757, -76.99, -77.024, -76.925, -76.65,
                         -76.321, -75.997, -75.958, -76.006])]
    yp0list = [np.array([34.2]),
               np.array([34.2, 34.5]),
               np.array([34.2, 34.5, 34.8]),
               np.array([34.2, 34.5, 34.8]),
               np.array([35.1, 35.2]),
               np.array([-52.068, -51.377, -50.729, -49.845, -49.192, -48.507,
                         -47.875, -47.478, -47.08, -46.422])]
    yp1list = [np.array([34.5]),
               np.array([34.5, 34.8]),
               np.array([34.5, 34.8, 35.1]),
               np.array([34.5, 34.8, 34.6]),
               np.array([35.2, 35.4]),
               np.array([-51.377, -50.729, -49.845, -49.192, -48.507, -47.875,
                         -47.478, -47.08, -46.422, -45.659])]

    for i in range(0, len(xp0list)):
        xp0 = xp0list[i]
        xp1 = xp1list[i]
        yp0 = yp0list[i]
        yp1 = yp1list[i]
        name = names[i]
        mean_value = means[i]
        std_value = stds[i]

        zp = np.zeros(xp0.shape)
        strike = azimuth(xp0[0], yp0[0], xp1[-1], yp1[-1])
        widths = np.ones(xp0.shape) * WIDTH
        dips = np.ones(xp0.shape) * DIP
        strike = [strike]

    origin = Origin({'id': 'test',
                     'lon': 0, 'lat': 0,
                     'depth': 5.0, 'mag': 7.0, 'netid': 'us',
                     'network': '', 'locstring': '',
                     'time': HistoricTime.utcfromtimestamp(time.time())})

    rupture = QuadRupture.fromTrace(
        xp0, yp0, xp1, yp1, zp, widths, dips, origin, strike=strike)

    # make a grid of points over both quads, ask for depths
    ymin = np.nanmin(rupture.lats)
    ymax = np.nanmax(rupture.lats)
    xmin = np.nanmin(rupture.lons)
    xmax = np.nanmax(rupture.lons)

    xmin = np.floor(xmin * (1 / GRIDRES)) / (1 / GRIDRES)
    xmax = np.ceil(xmax * (1 / GRIDRES)) / (1 / GRIDRES)
    ymin = np.floor(ymin * (1 / GRIDRES)) / (1 / GRIDRES)
    ymax = np.ceil(ymax * (1 / GRIDRES)) / (1 / GRIDRES)
    geodict = GeoDict.createDictFromBox(
        xmin, xmax, ymin, ymax, GRIDRES, GRIDRES)
    nx = geodict.nx
    ny = geodict.ny
    depths = np.zeros((ny, nx))
    for row in range(0, ny):
        for col in range(0, nx):
            lat, lon = geodict.getLatLon(row, col)
            depth = rupture.getDepthAtPoint(lat, lon)
            depths[row, col] = depth

    np.testing.assert_almost_equal(np.nanmean(depths), mean_value)
    np.testing.assert_almost_equal(np.nanstd(depths), std_value)

    if interactive:
        fig, axes = plt.subplots(nrows=2, ncols=1)
        ax1, ax2 = axes
        xdata = np.append(xp0, xp1[-1])
        ydata = np.append(yp0, yp1[-1])
        plt.sca(ax1)
        plt.plot(xdata, ydata, 'b')
        plt.sca(ax2)
        im = plt.imshow(depths, cmap='viridis_r')  # noqa
        ch = plt.colorbar()  # noqa
        fname = os.path.join(os.path.expanduser('~'),
                             'quad_%s_test.png' % name)
        print('Saving image for %s quad test... %s' % (name, fname))
        plt.savefig(fname)
        plt.close()
示例#17
0
def test_project():
    # test projecting a grid that wraps the 180 meridian
    gd = GeoDict.createDictFromBox(175, -175, -5, 5, 1.0, 1.0)
    ncells = gd.ny * gd.nx
    data = np.arange(0.0, ncells).reshape(gd.ny, gd.nx)
    grid = GDALGrid(data, gd)
    projstr = "+proj=merc +lat_ts=55 +lon_0=180 +ellps=WGS84"
    newgrid = grid.project(projstr, method='nearest')
    proj = pyproj.Proj(projstr)
    # what would the ul/lr corners be?
    ulx, uly = proj(grid._geodict.xmin, grid._geodict.ymax)
    lrx, lry = proj(grid._geodict.xmax, grid._geodict.ymin)
    # what if we back-project?
    newxmin, newymax = proj(newgrid._geodict.xmin,
                            newgrid._geodict.ymax, inverse=True)
    newxmax, newymin = proj(newgrid._geodict.xmax,
                            newgrid._geodict.ymin, inverse=True)
    x = 1

    # test simple projection
    data = np.array([[0, 0, 1, 0, 0],
                     [0, 0, 1, 0, 0],
                     [1, 1, 1, 1, 1],
                     [0, 0, 1, 0, 0],
                     [0, 0, 1, 0, 0]], dtype=np.int32)
    geodict = {'xmin': 50, 'xmax': 50.4, 'ymin': 50,
               'ymax': 50.4, 'dx': 0.1, 'dy': 0.1, 'nx': 5, 'ny': 5}
    gd = GeoDict(geodict)
    grid = GDALGrid(data, gd)
    projstr = "+proj=utm +zone=40 +north +ellps=WGS84 +datum=WGS84 +units=m +no_defs "
    newgrid = grid.project(projstr, method='nearest')

    try:
        tdir = tempfile.mkdtemp()
        outfile = os.path.join(tdir, 'output.bil')
        grid.save(outfile)
        with rasterio.open(outfile) as src:
            aff = get_affine(src)
            data = src.read(1)
            src_crs = CRS().from_string(GeoDict.DEFAULT_PROJ4).to_dict()
            dst_crs = CRS().from_string(projstr).to_dict()
            nrows, ncols = data.shape
            left = aff.xoff
            top = aff.yoff
            right, bottom = aff * (ncols-1, nrows-1)
            dst_transform, width, height = calculate_default_transform(src_crs, dst_crs,
                                                                       ncols, nrows,
                                                                       left, bottom,
                                                                       right, top)
            destination = np.zeros((height, width))
            reproject(data,
                      destination,
                      src_transform=aff,
                      src_crs=src_crs,
                      dst_transform=dst_transform,
                      dst_crs=dst_crs,
                      src_nodata=src.nodata,
                      dst_nodata=np.nan,
                      resampling=Resampling.nearest)
            x = 1
    except:
        pass
    finally:
        shutil.rmtree(tdir)
示例#18
0
def test():
    # these values taken from the shakemap header of:
    # http://earthquake.usgs.gov/realtime/product/shakemap/ak12496371/ak/1453829475592/download/grid.xml

    print('Testing various dictionaries for consistency...')

    print('Testing consistent dictionary...')
    # this should pass, and will serve as the comparison from now on
    gdict = {'xmin': -160.340600, 'xmax': -146.340600,
             'ymin': 54.104700, 'ymax': 65.104700,
             'dx': 0.025000, 'dy': 0.025000,
             'ny': 441, 'nx': 561}
    gd = GeoDict(gdict)
    print('Consistent dictionary passed.')

    print('Testing dictionary with inconsistent resolution...')
    # this should pass
    gdict = {'xmin': -160.340600, 'xmax': -146.340600,
             'ymin': 54.104700, 'ymax': 65.104700,
             'dx': 0.026000, 'dy': 0.026000,
             'ny': 441, 'nx': 561}
    gd3 = GeoDict(gdict, adjust='res')
    assert gd3 == gd
    print('Dimensions modification passed.')

    print('Testing dictionary with inconsistent lower right corner...')
    # this should pass
    gdict = {'xmin': -160.340600, 'xmax': -146.350600,
             'ymin': 54.103700, 'ymax': 65.104700,
             'dx': 0.025000, 'dy': 0.025000,
             'ny': 441, 'nx': 561}
    gd4 = GeoDict(gdict, adjust='bounds')
    assert gd4 == gd
    print('Corner modification passed.')

    print('Testing to make sure lat/lon and row/col calculations are correct...')
    # make sure the lat/lon row/col calculations are correct
    ndec = int(np.abs(np.log10(GeoDict.EPS)))
    lat, lon = gd.getLatLon(0, 0)
    dlat = np.abs(lat-gd.ymax)
    dlon = np.abs(lon-gd.xmin)
    assert dlat < GeoDict.EPS and dlon < GeoDict.EPS
    row, col = gd.getRowCol(lat, lon)
    assert row == 0 and col == 0

    lat, lon = gd.getLatLon(gd.ny-1, gd.nx-1)
    dlat = np.abs(lat-gd.ymin)
    dlon = np.abs(lon-gd.xmax)
    assert dlat < GeoDict.EPS and dlon < GeoDict.EPS
    row, col = gd.getRowCol(lat, lon)
    assert row == (gd.ny-1) and col == (gd.nx-1)
    print('lat/lon and row/col calculations are correct.')

    print('Testing a dictionary for a global grid...')
    # this is the file geodict for Landscan - should pass muster
    globaldict = {'nx': 43200,
                  'ny': 20880,
                  'dx': 0.00833333333333,
                  'xmax': 179.99583333318935,
                  'xmin': -179.99583333333334,
                  'dy': 0.00833333333333,
                  'ymax': 83.99583333326376,
                  'ymin': -89.99583333333334}
    gd5 = GeoDict(globaldict)
    lat, lon = gd5.getLatLon(gd5.ny-1, gd5.nx-1)
    dlat = np.abs(lat-gd5.ymin)
    dlon = np.abs(lon-gd5.xmax)
    assert dlat < GeoDict.EPS and dlon < GeoDict.EPS
    print('Global grid is internally consistent.')

    # Test class methods for creating a GeoDict
    print('Testing whether GeoDict creator class methods work...')
    xmin = -121.05333277776235
    xmax = -116.03833388890432
    ymin = 32.138334444506171
    ymax = 36.286665555493826
    dx = 0.0083333333333333332
    dy = 0.0083333333333333332
    gd6 = GeoDict.createDictFromBox(
        xmin, xmax, ymin, ymax, dx, dy, inside=False)
    assert gd6.xmax > xmax
    assert gd6.ymin < ymin
    print('Created dictionary (outside) is correct.')
    gd7 = GeoDict.createDictFromBox(
        xmin, xmax, ymin, ymax, dx, dy, inside=True)
    assert gd7.xmax < xmax
    assert gd7.ymin > ymin
    print('Created dictionary (inside) is correct.')
    xspan = 2.5
    yspan = 2.5
    gd8 = GeoDict.createDictFromCenter(xmin, ymin, dx, dy, xspan, yspan)
    print('Created dictionary (from center point) is valid.')

    print('Testing a geodict with dx/dy values that are NOT the same...')
    xmin, xmax, ymin, ymax = (-121.06166611109568, -116.03000055557099,
                              32.130001111172838, 36.294998888827159)
    dx, dy = (0.009999722214505959, 0.009999444413578534)
    td = GeoDict.createDictFromBox(xmin, xmax, ymin, ymax, dx, dy)
    print('Passed testing a geodict with dx/dy values that are NOT the same...')

    # test getBoundsWithin
    # use global grid, and then a shakemap grid that we can get
    print('Testing getBoundsWithin...')
    grussia = {'xmin': 155.506400, 'xmax': 161.506400,
               'ymin': 52.243000, 'ymax': 55.771000,
               'dx': 0.016667, 'dy': 0.016642,
               'nx': 361, 'ny': 213}
    gdrussia = GeoDict(grussia, adjust='res')
    sampledict = gd5.getBoundsWithin(gdrussia)
    xSmaller = sampledict.xmin > grussia['xmin'] and sampledict.xmax < grussia['xmax']
    ySmaller = sampledict.ymin > grussia['ymin'] and sampledict.ymax < grussia['ymax']
    assert xSmaller and ySmaller
    assert gd5.isAligned(sampledict)
    print('getBoundsWithin returned correct result.')

    print('Testing isAligned() method...')
    gd = GeoDict({'xmin': 0.5, 'xmax': 3.5,
                  'ymin': 0.5, 'ymax': 3.5,
                  'dx': 1.0, 'dy': 1.0,
                  'nx': 4, 'ny': 4})

    inside_aligned = GeoDict({'xmin': 1.5, 'xmax': 2.5,
                              'ymin': 1.5, 'ymax': 2.5,
                              'dx': 1.0, 'dy': 1.0,
                              'nx': 2, 'ny': 2})
    inside_not_aligned = GeoDict({'xmin': 2.0, 'xmax': 3.0,
                                  'ymin': 2.0, 'ymax': 3.0,
                                  'dx': 1.0, 'dy': 1.0,
                                  'nx': 2, 'ny': 2})
    assert gd.isAligned(inside_aligned)
    assert not gd.isAligned(inside_not_aligned)
    print('Passed isAligned() method...')

    print('Testing getAligned method...')
    popdict = GeoDict({'dx': 0.00833333333333,
                       'dy': 0.00833333333333,
                       'nx': 43200,
                       'ny': 20880,
                       'xmax': 179.99583333318935,
                       'xmin': -179.99583333333334,
                       'ymax': 83.99583333326376,
                       'ymin': -89.99583333333334})
    sampledict = GeoDict({'dx': 0.008333333333333333,
                          'dy': 0.008336693548387094,
                          'nx': 601,
                          'ny': 497,
                          'xmax': -116.046,
                          'xmin': -121.046,
                          'ymax': 36.2785,
                          'ymin': 32.1435})
    aligndict = popdict.getAligned(sampledict)
    assert popdict.isAligned(aligndict)

    print('Testing geodict intersects method...')
    gd1 = GeoDict({'xmin': 0.5, 'xmax': 3.5,
                   'ymin': 0.5, 'ymax': 3.5,
                   'dx': 1.0, 'dy': 1.0,
                   'nx': 4, 'ny': 4})

    print('Testing geodict intersects method...')
    gd2 = GeoDict({'xmin': 2.5, 'xmax': 5.5,
                   'ymin': 2.5, 'ymax': 5.5,
                   'dx': 1.0, 'dy': 1.0,
                   'nx': 4, 'ny': 4})
    gd3 = GeoDict({'xmin': 4.5, 'xmax': 7.5,
                   'ymin': 4.5, 'ymax': 7.5,
                   'dx': 1.0, 'dy': 1.0,
                   'nx': 4, 'ny': 4})
    gd4 = GeoDict({'xmin': 1.5, 'xmax': 2.5,
                   'ymin': 1.5, 'ymax': 2.5,
                   'dx': 1.0, 'dy': 1.0,
                   'nx': 2, 'ny': 2})
    assert gd1.intersects(gd2)
    assert not gd1.intersects(gd3)
    print('Passed intersects method...')

    print('Testing geodict intersects method with real geographic data...')
    gda = GeoDict({'ymax': 83.62083333333263, 'nx': 43201,
                   'ny': 20835, 'dx': 0.00833333333333,
                   'dy': 0.00833333333333, 'xmin': -179.99583333333334,
                   'ymin': -89.99583333326461, 'xmax': -179.99583333347732})
    gdb = GeoDict({'ymax': 28.729166666619193, 'nx': 300,
                   'ny': 264, 'dx': 0.00833333333333,
                   'dy': 0.00833333333333, 'xmin': 84.08749999989436,
                   'ymin': 26.537499999953404, 'xmax': 86.57916666656007})
    assert gda.intersects(gdb)
    print('Passed geodict intersects method with real geographic data.')

    print('Testing geodict doesNotContain method...')
    assert gd1.doesNotContain(gd3)
    assert not gd1.doesNotContain(gd4)

    print('Passed doesNotContain method...')

    print('Testing geodict contains method...')

    assert gd1.contains(gd4)
    assert not gd1.contains(gd3)
    print('Passed contains method...')
示例#19
0
            gridtype = 'esri'
        except:
            pass
    if gridtype is None:
        raise Exception('File "%s" does not appear to be either a GMT grid or an ESRI grid.' % gridfile)
    xmin = xmin - fdict.dx*3
    xmax = xmax + fdict.dx*3
    ymin = ymin - fdict.dy*3
    ymax = ymax + fdict.dy*3
    bounds = (xmin,xmax,ymin,ymax)
    if gridtype == 'gmt':
        fgeodict = GMTGrid.getFileGeoDict(gridfile)
    else:
        fgeodict = GDALGrid.getFileGeoDict(gridfile)
    dx,dy = (fgeodict.dx,fgeodict.dy)
    sdict = GeoDict.createDictFromBox(xmin,xmax,ymin,ymax,dx,dy)
    if gridtype == 'gmt':
        grid = GMTGrid.load(gridfile,samplegeodict=sdict,resample=False,method=method,doPadding=True)
    else:
        grid = GDALGrid.load(gridfile,samplegeodict=sdict,resample=False,method=method,doPadding=True)

    return sampleFromGrid(grid,xypoints)

def sampleFromGrid(grid,xypoints,method='nearest'):
    """
    Sample 2D grid object at each of a set of XY (decimal degrees) points.
    :param grid:
      Grid2D object at which to sample data.
    :param xypoints:
      2D numpy array of XY points, decimal degrees.
    :param method:
示例#20
0
def get_exposures(grid,
                  pop_file,
                  shakefile=None,
                  shakethreshtype=None,
                  shakethresh=None,
                  probthresh=None):
    """
    Get exposure-based statistics.

    Args:
        grid: Model grid.
        pop_file (str):  Path to the landscan population grid.
        shakefile (str): Optional, path to shakemap file to use for ground
            motion threshold.
        shakethreshtype(str): Optional, Type of ground motion to use for
            shakethresh, 'pga', 'pgv', or 'mmi'.
        shakethresh: Optional, Float or list of shaking thresholds in %g for
            pga, cm/s for pgv, float for mmi.
        probthresh: Optional, None or float, exclude any cells with probabilities
            less than or equal to this value

    Returns:
        dict: Dictionary with keys named exp_pop_# where # is the shakethresh
    """

    # If probthresh defined, zero out any areas less than or equal to probthresh
    # before proceeding

    if probthresh is not None:
        origdata = grid.getData()
        moddat = origdata.copy()
        moddat[moddat <= probthresh] = 0.0
        moddat[np.isnan(origdata)] = float('nan')
    else:
        moddat = grid.getData()

    mdict = grid.getGeoDict()

    # Cut out area from population file
    popcut = quickcut(pop_file,
                      mdict,
                      precise=False,
                      extrasamp=2.,
                      method='nearest')
    popdat = popcut.getData()
    pdict = popcut.getGeoDict()

    # Pad grid with nans to beyond extent of pdict
    pad_dict = {}
    pad_dict['padleft'] = int(
        np.abs(np.ceil((mdict.xmin - pdict.xmin) / mdict.dx)))
    pad_dict['padright'] = int(
        np.abs(np.ceil((pdict.xmax - mdict.xmax) / mdict.dx)))
    pad_dict['padbottom'] = int(
        np.abs(np.ceil((mdict.ymin - pdict.ymin) / mdict.dy)))
    pad_dict['padtop'] = int(
        np.abs(np.ceil((pdict.ymax - mdict.ymax) / mdict.dy)))
    padgrid, mdict2 = Grid2D.padGrid(moddat, mdict, pad_dict)  # padds with inf
    padgrid[np.isinf(padgrid)] = float('nan')  # change to pad with nan
    padgrid = Grid2D(data=padgrid, geodict=mdict2)  # Turn into grid2d object

    # Resample model grid so as to be the nearest integer multiple of popdict
    factor = np.round(pdict.dx / mdict2.dx)

    # Create geodictionary that is a factor of X higher res but otherwise
    # identical
    ndict = GeoDict.createDictFromBox(pdict.xmin, pdict.xmax, pdict.ymin,
                                      pdict.ymax, pdict.dx / factor,
                                      pdict.dy / factor)

    # Resample
    grid2 = padgrid.interpolate2(ndict, method='linear')

    # Get proportion of each cell that has values (to account properly
    # for any nans)
    prop = block_reduce(~np.isnan(grid2.getData().copy()),
                        block_size=(int(factor), int(factor)),
                        cval=float('nan'),
                        func=np.sum) / (factor**2.)

    # Now block reduce to same geodict as popfile
    modresamp = block_reduce(grid2.getData().copy(),
                             block_size=(int(factor), int(factor)),
                             cval=float('nan'),
                             func=np.nanmean)

    exp_pop = {}
    if shakefile is not None:
        # Resample shakefile to population grid
        # , doPadding=True, padValue=0.)
        shakemap = ShakeGrid.load(shakefile, resample=False)
        shakemap = shakemap.getLayer(shakethreshtype)
        shakemap = shakemap.interpolate2(pdict)
        shkdat = shakemap.getData()
        for shaket in shakethresh:
            threshmult = shkdat > shaket
            threshmult = threshmult.astype(float)
            exp_pop['exp_pop_%1.2fg' % (shaket / 100., )] = np.nansum(
                popdat * prop * modresamp * threshmult)

    else:
        exp_pop['exp_pop_0.00g'] = np.nansum(popdat * prop * modresamp)

    return exp_pop
示例#21
0
    def execute(self):
        """
        Raises:
            NotADirectoryError: When the event data directory does not exist.
            FileNotFoundError: When the the shake_result HDF file does not
                exist.
        """
        install_path, data_path = get_config_paths()
        datadir = os.path.join(data_path, self._eventid, 'current', 'products')
        if not os.path.isdir(datadir):
            raise NotADirectoryError('%s is not a valid directory.' % datadir)
        datafile = os.path.join(datadir, 'shake_result.hdf')
        if not os.path.isfile(datafile):
            raise FileNotFoundError('%s does not exist.' % datafile)

        # Open the ShakeMapOutputContainer and extract the data
        container = ShakeMapOutputContainer.load(datafile)
        if container.getDataType() != 'grid':
            raise NotImplementedError('uncertaintymaps module can only '
                                      'operate on gridded data, not sets of '
                                      'points')

        # get the path to the products.conf file, load the config
        config_file = os.path.join(install_path, 'config', 'products.conf')
        spec_file = get_configspec('products')
        validator = get_custom_validator()
        config = ConfigObj(config_file, configspec=spec_file)
        results = config.validate(validator)
        check_extra_values(config, self.logger)
        if not isinstance(results, bool) or not results:
            config_error(config, results)

        # create contour files
        self.logger.debug('Uncertainty mapping...')

        # get the operator setting from config
        operator = config['products']['mapping']['operator']

        # get all of the pieces needed for the uncertainty mapping functions
        layers = config['products']['mapping']['layers']
        if 'countries' in layers and layers['countries'] != '':
            countries_file = layers['countries']
        else:
            countries_file = None
        if 'states_provs' in layers and layers['states_provs'] != '':
            states_provs_file = layers['states_provs']
        else:
            states_provs_file = None
        if 'oceans' in layers and layers['oceans'] != '':
            oceans_file = layers['oceans']
        else:
            oceans_file = None
        if 'lakes' in layers and layers['lakes'] != '':
            lakes_file = layers['lakes']
        else:
            lakes_file = None

        # Get the number of parallel workers
        max_workers = config['products']['mapping']['max_workers']

        # Reading HDF5 files currently takes a long time, due to poor
        # programming in MapIO.  To save us some time until that issue is
        # resolved, we'll coarsely subset the topo grid once here and pass
        # it into both mapping functions
        # get the bounds of the map
        info = container.getMetadata()
        xmin = info['output']['map_information']['min']['longitude']
        xmax = info['output']['map_information']['max']['longitude']
        ymin = info['output']['map_information']['min']['latitude']
        ymax = info['output']['map_information']['max']['latitude']
        dy = float(
            info['output']['map_information']['grid_spacing']['latitude'])
        dx = float(
            info['output']['map_information']['grid_spacing']['longitude'])
        padx = 5 * dx
        pady = 5 * dy
        sxmin = float(xmin) - padx
        sxmax = float(xmax) + padx
        symin = float(ymin) - pady
        symax = float(ymax) + pady

        sampledict = GeoDict.createDictFromBox(sxmin, sxmax, symin, symax, dx,
                                               dy)
        tdata = np.full([sampledict.ny, sampledict.nx], 0.0)
        topogrid = Grid2D(data=tdata, geodict=sampledict)

        model_config = container.getConfig()

        imtlist = container.getIMTs()

        textfile = os.path.join(
            get_data_path(), 'mapping',
            'map_strings.' + config['products']['mapping']['language'])
        text_dict = get_text_strings(textfile)
        if config['products']['mapping']['fontfamily'] != '':
            matplotlib.rcParams['font.family'] = \
                config['products']['mapping']['fontfamily']
            matplotlib.rcParams['axes.unicode_minus'] = False

        allcities = Cities.fromDefault()
        states_provs = None
        countries = None
        oceans = None
        lakes = None
        faults = None
        roads = None
        if states_provs_file is not None:
            states_provs = ShapelyFeature(
                Reader(states_provs_file).geometries(),
                ccrs.PlateCarree(),
                facecolor='none')
        elif 'CALLED_FROM_PYTEST' not in os.environ:
            states_provs = cfeature.NaturalEarthFeature(
                category='cultural',
                name='admin_1_states_provinces_lines',
                scale='10m',
                facecolor='none')
            # The feature constructor doesn't necessarily download the
            # data, but we want it to so that multiple threads don't
            # try to do it at once when they actually access the data.
            # So below we just call the geometries() method to trigger
            # the download if necessary.
            _ = states_provs.geometries()

        if countries_file is not None:
            countries = ShapelyFeature(Reader(countries_file).geometries(),
                                       ccrs.PlateCarree(),
                                       facecolor='none')
        elif 'CALLED_FROM_PYTEST' not in os.environ:
            countries = cfeature.NaturalEarthFeature(category='cultural',
                                                     name='admin_0_countries',
                                                     scale='10m',
                                                     facecolor='none')
            _ = countries.geometries()

        if oceans_file is not None:
            oceans = ShapelyFeature(Reader(oceans_file).geometries(),
                                    ccrs.PlateCarree(),
                                    facecolor=WATERCOLOR)
        elif 'CALLED_FROM_PYTEST' not in os.environ:
            oceans = cfeature.NaturalEarthFeature(category='physical',
                                                  name='ocean',
                                                  scale='10m',
                                                  facecolor=WATERCOLOR)
            _ = oceans.geometries()

        if lakes_file is not None:
            lakes = ShapelyFeature(Reader(lakes_file).geometries(),
                                   ccrs.PlateCarree(),
                                   facecolor=WATERCOLOR)
        elif 'CALLED_FROM_PYTEST' not in os.environ:
            lakes = cfeature.NaturalEarthFeature(category='physical',
                                                 name='lakes',
                                                 scale='10m',
                                                 facecolor=WATERCOLOR)
            _ = lakes.geometries()

        alist = []
        llogo = config['products']['mapping'].get('license_logo') or None
        ltext = config['products']['mapping'].get('license_text') or None
        for imtype in imtlist:
            component, imtype = imtype.split('/')
            comp = container.getComponents(imtype)[0]
            d = {
                'imtype': imtype,
                'topogrid': topogrid,
                'allcities': allcities,
                'states_provinces': states_provs,
                'countries': countries,
                'oceans': oceans,
                'lakes': lakes,
                'roads': roads,
                'roadcolor': layers['roadcolor'],
                'roadwidth': layers['roadwidth'],
                'faults': faults,
                'faultcolor': layers['faultcolor'],
                'faultwidth': layers['faultwidth'],
                'datadir': datadir,
                'operator': operator,
                'filter_size': 0,
                'info': info,
                'component': comp,
                'imtdict': container.getIMTGrids(imtype, comp),
                'ruptdict': copy.deepcopy(container.getRuptureDict()),
                'stationdict': container.getStationDict(),
                'config': model_config,
                'tdict': text_dict,
                'display_magnitude': self.display_magnitude,
                'pdf_dpi': config['products']['mapping']['pdf_dpi'],
                'img_dpi': config['products']['mapping']['img_dpi'],
                'license_logo': llogo,
                'license_text': ltext,
            }
            alist.append(d)

            #
            # Populate the contents.xml
            #
            for key in ('std', 'phi', 'tau'):
                if key not in d['imtdict'] or d['imtdict'][key] is None:
                    continue

                if key == 'std':
                    ext = '_sigma'
                    utype = ' Total'
                elif key == 'phi':
                    ext = '_phi'
                    utype = ' Within-event'
                else:
                    ext = '_tau'
                    utype = ' Between-event'

                if imtype == 'MMI':
                    fileimt = 'intensity'
                else:
                    fileimt = oq_to_file(imtype)

                self.contents.addFile(
                    fileimt + ext + 'UncertaintyMap',
                    fileimt.upper() + utype + ' Uncertainty Map',
                    'Map of ' + imtype + utype + ' uncertainty.',
                    fileimt + ext + '.jpg', 'image/jpeg')
                self.contents.addFile(
                    fileimt + ext + 'UncertaintyMap',
                    fileimt.upper() + utype + ' Uncertainty Map',
                    'Map of ' + imtype + utype + ' uncertainty.',
                    fileimt + ext + '.pdf', 'application/pdf')

        if max_workers > 0:
            with cf.ProcessPoolExecutor(max_workers=max_workers) as ex:
                results = ex.map(make_map, alist)
                list(results)
        else:
            for adict in alist:
                make_map(adict)

        container.close()
示例#22
0
def getYesPoints(pshapes, proj, dx, nmax, touch_center=True):
    """
    Collect x/y coordinates of all points within hazard coverage polygons at
    desired resolution.

    Args:
        pshapes: Sequence of orthographically projected shapes.
        proj: PyProj projection object used to transform input shapes.
        dx: Float resolution of grid at which to sample points, must be round
            number.
        nmax: Threshold maximum number of points in total data mesh.
        touch_center: Boolean indicating whether presence of polygon in each
            grid cell is enough to turn that into a yes pixel (False) or
            if the center of the grid cell must intersect a polygon (True).

    Returns:
        tuple: (yespoints, nrows, ncols, xvar, yvar, idx) where:
          - yespoints: numpy 2-D array of X/Y coordinates inside hazard polygons.
          - nrows: number of rows of resulting mesh
          - ncols: number of columns of resulting mesh
          - xvar: numpy array of x coordinate centers of columns
          - yvar: numpy array of y coordinate centers of rows
          - idx: 1D array of indices where yes pixels are located
            (use np.unravel_index to unpack to 2D array)

    """

    mxmin = 9e10
    mxmax = -9e10
    mymin = 9e10
    mymax = -9e10
    for pshape in pshapes:
        pxmin, pymin, pxmax, pymax = pshape.bounds
        if pxmin < mxmin:
            mxmin = pxmin
        if pxmax > mxmax:
            mxmax = pxmax
        if pymin < mymin:
            mymin = pymin
        if pymax > mymax:
            mymax = pymax

    if not touch_center:
        geodict = GeoDict.createDictFromBox(mxmin, mxmax, mymin, mymax, dx, dx)
        img = rasterizeShapes(pshapes, geodict)
        # now get the numpy array of x/y coordinates where covgrid == 1
        idx = np.where(img == 1)[0]
        x, y = np.unravel_index(idx, (geodict.ny, geodict.nx))
        yespoints = list(zip(x.flatten(), y.flatten()))
        nrows = geodict.ny
        ncols = geodict.nx
        xvar = np.arange(geodict.xmin, geodict.xmax + geodict.dx, geodict.dx)
        yvar = np.arange(geodict.ymin, geodict.ymax + geodict.dy, geodict.dy)
    else:
        xvar = np.arange(mxmin, mxmax + dx, dx)
        yvar = np.arange(mymin, mymax + dx, dx)
        ncols = len(xvar)
        nrows = len(yvar)
        if nmax is not None:
            if ncols * nrows > nmax:
                aspect = ncols / nrows
                ncols = np.sqrt(nmax * aspect)
                nrows = nmax / ncols
                ncols = int(ncols)
                nrows = int(nrows)
                # re-calculate dx here...
                tdx = (mxmax - mxmin) / ncols
                tdy = (mymax - mymin) / nrows
                dx = np.max(tdx, tdy)
                xvar = np.arange(mxmin, mxmax + dx, dx)
                yvar = np.arange(mymin, mymax + dx, dx)

        # Get the "yes" points to sample from
        yespoints = []
        idx = []
        shapeidx = 0
        if pshapes[0].type == 'Polygon':
            # loop over shapes, projecting each one, then get the sample points
            for pshape in pshapes:
                if not shapeidx % 1000:
                    print('Searching polygon %i of %i' %
                          (shapeidx, len(pshapes)))
                shapeidx += 1
                pxmin, pymin, pxmax, pymax = pshape.bounds
                leftcol = np.where((pxmin - xvar) >= 0)[0].argmax()
                rightcol = np.where((xvar - pxmax) >= 0)[0][0]
                bottomrow = np.where((pymin - yvar) >= 0)[0].argmax()
                toprow = np.where((yvar - pymax) >= 0)[0][0]
                xp = np.arange(xvar[leftcol], xvar[rightcol] + dx, dx)
                yp = np.arange(yvar[bottomrow], yvar[toprow] + dx, dx)
                xmesh, ymesh = np.meshgrid(xp, yp)
                xy = list(zip(xmesh.flatten(), ymesh.flatten()))
                for point in xy:
                    ix = np.where(xvar == point[0])[0][0]
                    iy = np.where(yvar == point[1])[0][0]
                    if pshape.contains(Point(point)):
                        yespoints.append(point)
                        idx.append(
                            np.ravel_multi_index((iy, ix), (nrows, ncols),
                                                 mode='raise',
                                                 order='C'))
        else:
            yespoints = []
            for pshape in pshapes:
                yespoints.append(pshape.coords[0])

    return (np.array(yespoints), nrows, ncols, xvar, yvar, idx)
示例#23
0
def getYesPoints(pshapes,proj,dx,nmax,touch_center=True):
    """
    Collect x/y coordinates of all points within hazard coverage polygons at desired resolution.
    :param pshapes:
      Sequence of orthographically projected shapes.
    :param proj:
      PyProj projection object used to transform input shapes
    :param dx:
      Float resolution of grid at which to sample points, must be round number
    :param nmax:
      Threshold maximum number of points in total data mesh.
    :param touch_center:
      Boolean indicating whether presence of polygon in each grid cell is enough to turn that
      into a yes pixel.  Setting this to false presumes that the dx is relatively large, such
      that creating a grid at that resolution will not tax the resources of the system.
    :returns:
      - numpy 2-D array of X/Y coordinates inside hazard polygons.
      - number of rows of resulting mesh
      - number of columns of resulting mesh
      - numpy array of x coordinate centers of columns
      - numpy array of y coordinate centers of rows
      - 1D array of indices where yes pixels are located (use np.unravel_index to unpack to 2D array)
    """
    mxmin = 9e10
    mxmax = -9e10
    mymin = 9e10
    mymax = -9e10
    for pshape in pshapes:
        pxmin,pymin,pxmax,pymax = pshape.bounds
        if pxmin < mxmin:
            mxmin = pxmin
        if pxmax > mxmax:
            mxmax = pxmax
        if pymin < mymin:
            mymin = pymin
        if pymax > mymax:
            mymax = pymax

    #
    if not touch_center:
        geodict = GeoDict.createDictFromBox(mxmin,mxmax,mymin,mymax,dx,dx)
        img = rasterizeShapes(pshapes,geodict)
        #now get the numpy array of x/y coordinates where covgrid == 1
        idx = np.where(img == 1)[0]
        x,y = np.unravel_index(idx,(geodict.ny,geodict.nx))
        yespoints = list(zip(x.flatten(),y.flatten()))
        nrows = geodict.ny
        ncols = geodict.nx
        #Create the sequence of column and row centers
        xvar = np.arange(geodict.xmin,geodict.xmax+geodict.dx,geodict.dx)
        yvar = np.arange(geodict.ymin,geodict.ymax+geodict.dy,geodict.dy)
    else:
        xvar = np.arange(mxmin,mxmax+dx,dx)
        yvar = np.arange(mymin,mymax+dx,dx)
        ncols = len(xvar)
        nrows = len(yvar)
        if nmax is not None:
            if ncols*nrows > nmax:
                aspect = ncols/nrows
                ncols = np.sqrt(nmax*aspect)
                nrows = nmax/ncols
                ncols = int(ncols)
                nrows = int(nrows)
                #re-calculate dx here...
                tdx = (mxmax-mxmin)/ncols
                tdy = (mymax-mymin)/nrows
                dx = np.max([tdx,tdy])
                xvar = np.arange(mxmin,mxmax+dx,dx)
                yvar = np.arange(mymin,mymax+dx,dx)

        #Get the "yes" points to sample from
        #here, we're in the situation
        yespoints = []
        idx = []
        shapeidx = 0
        if pshapes[0].type == 'Polygon':
            #loop over shapes, projecting each one, then get the sample points
            for pshape in pshapes:
                if not shapeidx % 1000:
                    print('Searching polygon %i of %i' % (shapeidx,len(pshapes)))
                shapeidx += 1
                pxmin,pymin,pxmax,pymax = pshape.bounds
                leftcol = np.where((pxmin - xvar) >= 0)
                rightcol = np.where((xvar - pxmax) >= 0)
                if len(leftcol[0]) and len(rightcol[0]):
                    leftcol = leftcol[0].argmax()
                    rightcol = rightcol[0].argmax()

                bottomrow = np.where((pymin - yvar) >= 0)
                toprow = np.where((yvar - pymax) >= 0)
                if len(bottomrow[0]) and len(toprow[0]):
                    bottomrow = bottomrow[0].argmax()
                    toprow = toprow[0][0]

                xp = np.arange(xvar[leftcol],xvar[rightcol]+dx,dx)
                yp = np.arange(yvar[bottomrow],yvar[toprow]+dx,dx)
                xmesh,ymesh = np.meshgrid(xp,yp)
                xy = list(zip(xmesh.flatten(),ymesh.flatten()))
                for point in xy:
                    ix = np.where(xvar == point[0])
                    iy = np.where(yvar == point[1])
                    if len(ix[0]) and len(iy[0]):
                        ix = ix[0][0]
                        try:
                            iy = iy[0][0]
                        except:
                            foo = 1
                        if pshape.contains(Point(point)):
                            yespoints.append(point)
                            idx.append(np.ravel_multi_index((iy,ix),(nrows,ncols),mode='raise',order='C'))
                            
        else:
            yespoints = []
            for pshape in pshapes:
                yespoints.append(pshape.coords[0])
            
    return (np.array(yespoints),nrows,ncols,xvar,yvar,idx)
示例#24
0
def godt2008(shakefile,
             config,
             uncertfile=None,
             saveinputs=False,
             displmodel=None,
             bounds=None,
             slopediv=100.,
             codiv=10.,
             numstd=None,
             trimfile=None):
    """
    This function runs the Godt and others (2008) global method for a given
    ShakeMap. The Factor of Safety is calculated using infinite slope analysis
    assumuing dry conditions. The method uses threshold newmark displacement
    and estimates areal coverage by doing the calculations for each slope
    quantile.

    Args:
        shakefile (str): Path to shakemap xml file.
        config (ConfigObj): ConfigObj of config file containing inputs required
            for running the model
        uncertfile (str): Path to shakemap uncertainty xml file (optional).
        saveinputs (bool): Whether or not to return the model input layers,
            False (default) returns only the model output (one layer).
        displmodel (str): Newmark displacement regression model to use

            * ``'J_PGA'`` (default) -- PGA-based model, equation 6 from
              Jibson (2007).
            * ``'J_PGA_M'`` -- PGA and M-based model, equation 7 from
              Jibson (2007).
            * ``'RS_PGA_M'`` -- PGA and M-based model from from Rathje and
              Saygili (2009).
            * ``'RS_PGA_PGV'`` -- PGA and PGV-based model, equation 6
              from Saygili and Rathje (2008).

        bounds (dict): Optional dictionary with keys 'xmin', 'xmax', 'ymin',
            'ymax' that defines a subset of the shakemap area to compute.
        slopediv (float): Divide slope by this number to get slope in degrees
            (Verdin datasets need to be divided by 100).
        codiv (float): Divide cohesion input layer by this number
            (For Godt method, need to divide by 10 because that is how it was
            calibrated).
        numstd (float): Number of (+/-) standard deviations to use if
            uncertainty is computed (uncertfile must be supplied).
        trimfile (str): shapefile of earth's land masses to trim offshore areas
            of model

    Returns:
        dict: Dictionary containing output and input layers (if
        saveinputs=True):

        .. code-block:: python

            {
                'grid': mapio grid2D object,
                'label': 'label for colorbar and top line of subtitle',
                'type': 'output or input to model',
                'description': {'name': 'short reference of model',
                                'longref': 'full model reference',
                                'units': 'units of output',
                                'shakemap': 'information about shakemap used',
                                'event_id': 'shakemap event id',
                                'parameters': 'dictionary of model parameters
                                               used'

                }
            }

    Raises:
         NameError: when unable to parse the config correctly (probably a
             formatting issue in the configfile) or when unable to find the
             shakefile (Shakemap filepath) -- these cause program to end.

    """
    # TODO:
    #    - Add 'all' -- averages Dn from all four equations, add term to
    #      convert PGA and PGV to Ia and use other equations, add Ambraseys and
    #      Menu (1988) option.

    # Empty refs
    slopesref = 'unknown'
    slopelref = 'unknown'
    cohesionlref = 'unknown'
    cohesionsref = 'unknown'
    frictionsref = 'unknown'
    frictionlref = 'unknown'
    modellref = 'unknown'
    modelsref = 'unknown'

    # See if trimfile exists
    if trimfile is not None:
        if not os.path.exists(trimfile):
            print('trimfile defined does not exist: %s\n'
                  'Ocean will not be trimmed' % trimfile)
            trimfile = None
        if os.path.splitext(trimfile)[1] != '.shp':
            print('trimfile must be a shapefile, ocean will not be trimmed')
            trimfile = None

    # Parse config
    try:  # May want to add error handling so if refs aren't given, just
        # includes unknown
        slopefilepath = config['godt_2008']['layers']['slope']['filepath']
        slopeunits = config['godt_2008']['layers']['slope']['units']
        cohesionfile = config['godt_2008']['layers']['cohesion']['file']
        cohesionunits = config['godt_2008']['layers']['cohesion']['units']
        frictionfile = config['godt_2008']['layers']['friction']['file']
        frictionunits = config['godt_2008']['layers']['friction']['units']

        thick = float(config['godt_2008']['parameters']['thick'])
        uwt = float(config['godt_2008']['parameters']['uwt'])
        nodata_cohesion = \
            float(config['godt_2008']['parameters']['nodata_cohesion'])
        nodata_friction = \
            float(config['godt_2008']['parameters']['nodata_friction'])
        dnthresh = float(config['godt_2008']['parameters']['dnthresh'])
        fsthresh = float(config['godt_2008']['parameters']['fsthresh'])
        acthresh = float(config['godt_2008']['parameters']['acthresh'])
        try:
            slopemin = float(config['godt_2008']['parameters']['slopemin'])
        except:
            slopemin = 0.01
            print('No slopemin found in config file, using 0.01 deg '
                  'for slope minimum')
    except Exception as e:
        raise NameError('Could not parse configfile, %s' % e)

    if displmodel is None:
        try:
            displmodel = config['godt_2008']['parameters']['displmodel']
        except:
            print('No regression model specified, using default of J_PGA_M')
            displmodel = 'J_PGA_M'

    # TO DO: ADD ERROR CATCHING ON UNITS, MAKE SURE THEY ARE WHAT THEY SHOULD
    #        BE FOR THIS MODEL

    try:  # Try to fetch source information from config
        modelsref = config['godt_2008']['shortref']
        modellref = config['godt_2008']['longref']
        slopesref = config['godt_2008']['layers']['slope']['shortref']
        slopelref = config['godt_2008']['layers']['slope']['longref']
        cohesionsref = config['godt_2008']['layers']['cohesion']['shortref']
        cohesionlref = config['godt_2008']['layers']['cohesion']['longref']
        frictionsref = config['godt_2008']['layers']['friction']['shortref']
        frictionlref = config['godt_2008']['layers']['friction']['longref']
    except:
        print('Was not able to retrieve all references from config file. '
              'Continuing')

    # Figure out how/if need to cut anything
    geodict = ShakeGrid.getFileGeoDict(shakefile)  # , adjust='res')
    if bounds is not None:  # Make sure bounds are within ShakeMap Grid
        if geodict.xmin < geodict.xmax:  # only if signs are not opposite
            if (geodict.xmin > bounds['xmin'] or geodict.xmax < bounds['xmax']
                    or geodict.ymin > bounds['ymin']
                    or geodict.ymax < bounds['ymax']):
                print('Specified bounds are outside shakemap area, using '
                      'ShakeMap bounds instead.')
                bounds = None

    if bounds is not None:
        tempgdict = GeoDict.createDictFromBox(bounds['xmin'],
                                              bounds['xmax'],
                                              bounds['ymin'],
                                              bounds['ymax'],
                                              geodict.dx,
                                              geodict.dy,
                                              inside=False)
        # If Shakemap geodict crosses 180/-180 line, fix geodict so things don't break
        if geodict.xmin > geodict.xmax:
            if tempgdict.xmin < 0:
                geodict._xmin -= 360.
            else:
                geodict._xmax += 360.
        geodict = geodict.getBoundsWithin(tempgdict)

    basegeodict, firstcol = GDALGrid.getFileGeoDict(
        os.path.join(slopefilepath, 'slope_min.bil'))
    if basegeodict == geodict:
        sampledict = geodict
    else:
        sampledict = basegeodict.getBoundsWithin(geodict)

    # Do we need to subdivide baselayer?
    if 'divfactor' in config['godt_2008'].keys():
        divfactor = float(config['godt_2008']['divfactor'])
        if divfactor != 1.:
            # adjust sampledict so everything will be resampled (cut one cell
            # of each edge so will be inside bounds)
            newxmin = sampledict.xmin - sampledict.dx/2. + \
                sampledict.dx/(2.*divfactor) + sampledict.dx
            newymin = sampledict.ymin - sampledict.dy/2. + \
                sampledict.dy/(2.*divfactor) + sampledict.dy
            newxmax = sampledict.xmax + sampledict.dx/2. - \
                sampledict.dx/(2.*divfactor) - sampledict.dx
            newymax = sampledict.ymax + sampledict.dy/2. - \
                sampledict.dy/(2.*divfactor) - sampledict.dy
            newdx = sampledict.dx / divfactor
            newdy = sampledict.dy / divfactor

            sampledict = GeoDict.createDictFromBox(newxmin,
                                                   newxmax,
                                                   newymin,
                                                   newymax,
                                                   newdx,
                                                   newdy,
                                                   inside=True)

    tmpdir = tempfile.mkdtemp()

    # Load in ShakeMap and get new geodictionary
    temp = ShakeGrid.load(shakefile)  # , adjust='res')
    junkfile = os.path.join(tmpdir, 'temp.bil')
    GDALGrid.copyFromGrid(temp.getLayer('pga')).save(junkfile)
    pga = quickcut(junkfile, sampledict, precise=True, method='bilinear')
    os.remove(junkfile)
    GDALGrid.copyFromGrid(temp.getLayer('pgv')).save(junkfile)
    pgv = quickcut(junkfile, sampledict, precise=True, method='bilinear')
    os.remove(junkfile)
    # Update geodictionary
    sampledict = pga.getGeoDict()

    t2 = temp.getEventDict()
    M = t2['magnitude']
    event_id = t2['event_id']
    shakedict = temp.getShakeDict()
    del (temp)

    # read in uncertainty if present
    if uncertfile is not None:
        try:
            temp = ShakeGrid.load(uncertfile)  # , adjust='res')
            GDALGrid.copyFromGrid(temp.getLayer('stdpga')).save(junkfile)
            uncertpga = quickcut(junkfile,
                                 sampledict,
                                 precise=True,
                                 method='bilinear',
                                 override=True)
            os.remove(junkfile)
            GDALGrid.copyFromGrid(temp.getLayer('stdpgv')).save(junkfile)
            uncertpgv = quickcut(junkfile,
                                 sampledict,
                                 precise=True,
                                 method='bilinear',
                                 override=True)
            os.remove(junkfile)
        except:
            print('Could not read uncertainty file, ignoring uncertainties')
            uncertfile = None
        if numstd is None:
            numstd = 1.

    # Read in all the slope files, divide all by 100 to get to slope in
    # degrees (because input files are multiplied by 100.)
    slopes = []
    quantiles = [
        'slope_min.bil', 'slope10.bil', 'slope30.bil', 'slope50.bil',
        'slope70.bil', 'slope90.bil', 'slope_max.bil'
    ]
    for quant in quantiles:
        tmpslp = quickcut(os.path.join(slopefilepath, quant), sampledict)
        tgd = tmpslp.getGeoDict()
        if tgd != sampledict:
            raise Exception('Input layers are not aligned to same geodict')
        else:
            slopes.append(tmpslp.getData() / slopediv)

    slopestack = np.dstack(slopes)

    # Change any zero slopes to a very small number to avoid dividing by
    # zero later
    slopestack[slopestack == 0] = 1e-8

    # Read in the cohesion and friction files and duplicate layers so they
    # are same shape as slope structure

    tempco = quickcut(cohesionfile, sampledict, method='near')
    tempco = tempco.getData()[:, :, np.newaxis] / codiv
    cohesion = np.repeat(tempco, 7, axis=2)
    cohesion[cohesion == -999.9] = nodata_cohesion
    cohesion = np.nan_to_num(cohesion)
    cohesion[cohesion == 0] = nodata_cohesion

    tempfric = quickcut(frictionfile, sampledict, method='near')
    tempfric = tempfric.getData().astype(float)[:, :, np.newaxis]
    friction = np.repeat(tempfric, 7, axis=2)
    friction[friction == -9999] = nodata_friction
    friction = np.nan_to_num(friction)
    friction[friction == 0] = nodata_friction

    # Do the calculations using Jibson (2007) PGA only model for Dn
    FS = (cohesion / (uwt * thick * np.sin(slopestack * (np.pi / 180.))) +
          np.tan(friction * (np.pi / 180.)) / np.tan(slopestack *
                                                     (np.pi / 180.)))
    FS[FS < fsthresh] = fsthresh

    # Compute critical acceleration, in g
    # This gives ac in g, equations that multiply by g give ac in m/s2
    Ac = (FS - 1) * np.sin(slopestack * (np.pi / 180.)).astype(float)
    Ac[Ac < acthresh] = acthresh

    # Get PGA in g (PGA is %g in ShakeMap, convert to g)
    PGA = np.repeat(pga.getData()[:, :, np.newaxis] / 100., 7,
                    axis=2).astype(float)
    if 'PGV' in displmodel:  # Load in PGV also, in cm/sec
        PGV = np.repeat(pgv.getData()[:, :, np.newaxis], 7,
                        axis=2).astype(float)
    else:
        PGV = None

    if uncertfile is not None:
        stdpga = np.repeat(uncertpga.getData()[:, :, np.newaxis], 7,
                           axis=2).astype(float)
        stdpgv = np.repeat(uncertpgv.getData()[:, :, np.newaxis], 7,
                           axis=2).astype(float)
        # estimate PGA +- 1std
        PGAmin = np.exp(np.log(PGA * 100) - numstd * stdpga) / 100
        PGAmax = np.exp(np.log(PGA * 100) + numstd * stdpga) / 100
        if 'PGV' in displmodel:
            PGVmin = np.exp(np.log(PGV) - numstd * stdpgv)
            PGVmax = np.exp(np.log(PGV) + numstd * stdpgv)
        else:
            PGVmin = None
            PGVmax = None

    # Ignore errors so still runs when Ac > PGA, just leaves nan instead
    # of crashing.
    np.seterr(invalid='ignore')

    Dn, logDnstd, logtype = NMdisp(Ac, PGA, model=displmodel, M=M, PGV=PGV)
    if uncertfile is not None:
        Dnmin, logDnstdmin, logtype = NMdisp(Ac,
                                             PGAmin,
                                             model=displmodel,
                                             M=M,
                                             PGV=PGVmin)
        Dnmax, logDnstdmax, logtype = NMdisp(Ac,
                                             PGAmax,
                                             model=displmodel,
                                             M=M,
                                             PGV=PGVmax)

    PROB = Dn.copy()
    PROB[PROB < dnthresh] = 0.
    PROB[PROB >= dnthresh] = 1.
    PROB = np.sum(PROB, axis=2)
    if uncertfile is not None:
        PROBmin = Dnmin.copy()
        PROBmin[PROBmin <= dnthresh] = 0.
        PROBmin[PROBmin > dnthresh] = 1.
        PROBmin = np.sum(PROBmin, axis=2)
        PROBmax = Dnmax.copy()
        PROBmax[PROBmax <= dnthresh] = 0.
        PROBmax[PROBmax > dnthresh] = 1.
        PROBmax = np.sum(PROBmax, axis=2)

    PROB[PROB == 1.] = 0.01
    PROB[PROB == 2.] = 0.10
    PROB[PROB == 3.] = 0.30
    PROB[PROB == 4.] = 0.50
    PROB[PROB == 5.] = 0.70
    PROB[PROB == 6.] = 0.90
    PROB[PROB == 7.] = 0.99

    if uncertfile is not None:
        PROBmin[PROBmin == 1.] = 0.01
        PROBmin[PROBmin == 2.] = 0.10
        PROBmin[PROBmin == 3.] = 0.30
        PROBmin[PROBmin == 4.] = 0.50
        PROBmin[PROBmin == 5.] = 0.70
        PROBmin[PROBmin == 6.] = 0.90
        PROBmin[PROBmin == 7.] = 0.99
        PROBmax[PROBmax == 1.] = 0.01
        PROBmax[PROBmax == 2.] = 0.10
        PROBmax[PROBmax == 3.] = 0.30
        PROBmax[PROBmax == 4.] = 0.50
        PROBmax[PROBmax == 5.] = 0.70
        PROBmax[PROBmax == 6.] = 0.90
        PROBmax[PROBmax == 7.] = 0.99

    if slopemin is not None:
        PROB[slopestack[:, :, 6] <= slopemin] = 0.
        # uncert too
        if uncertfile is not None:
            PROBmin[slopestack[:, :, 6] <= slopemin] = 0.
            PROBmax[slopestack[:, :, 6] <= slopemin] = 0.

    # Turn output and inputs into into grids and put in mapLayers dictionary
    maplayers = collections.OrderedDict()

    shakedetail = '%s_ver%s' % (shakedict['shakemap_id'],
                                shakedict['shakemap_version'])

    description = {
        'name': modelsref,
        'longref': modellref,
        'units': 'Proportion of Area Affected',
        'shakemap': shakedetail,
        'event_id': event_id,
        'parameters': {
            'displmodel': displmodel,
            'thickness_m': thick,
            'unitwt_kNm3': uwt,
            'dnthresh_cm': dnthresh,
            'acthresh_g': acthresh,
            'fsthresh': fsthresh,
            'modeltype': 'Landslide'
        }
    }
    PROBgrid = GDALGrid(PROB, sampledict)
    if trimfile is not None:
        PROBgrid = trim_ocean(PROBgrid, trimfile)

    maplayers['model'] = {
        'grid': PROBgrid,
        'label': 'Landslide - Proportion of Area Affected',
        'type': 'output',
        'description': description
    }

    if uncertfile is not None:
        PROBmingrid = GDALGrid(PROBmin, sampledict)
        PROBmaxgrid = GDALGrid(PROBmax, sampledict)
        if trimfile is not None:
            PROBmingrid = trim_ocean(PROBmingrid, trimfile)
            PROBmaxgrid = trim_ocean(PROBmaxgrid, trimfile)
        maplayers['modelmin'] = {
            'grid': PROBmingrid,
            'label': 'Landslide Probability-%1.2fstd' % numstd,
            'type': 'output',
            'description': description
        }
        maplayers['modelmax'] = {
            'grid': PROBmaxgrid,
            'label': 'Landslide Probability+%1.2fstd' % numstd,
            'type': 'output',
            'description': description
        }

    if saveinputs is True:
        maplayers['pga'] = {
            'grid': GDALGrid(PGA[:, :, 0], sampledict),
            'label': 'PGA (g)',
            'type': 'input',
            'description': {
                'units': 'g',
                'shakemap': shakedetail
            }
        }
        if 'PGV' in displmodel:
            maplayers['pgv'] = {
                'grid': GDALGrid(PGV[:, :, 0], sampledict),
                'label': 'PGV (cm/s)',
                'type': 'input',
                'description': {
                    'units': 'cm/s',
                    'shakemap': shakedetail
                }
            }
        maplayers['minFS'] = {
            'grid': GDALGrid(np.min(FS, axis=2), sampledict),
            'label': 'Min Factor of Safety',
            'type': 'input',
            'description': {
                'units': 'unitless'
            }
        }
        maplayers['max slope'] = {
            'grid': GDALGrid(slopestack[:, :, -1], sampledict),
            'label': r'Maximum slope ($^\circ$)',
            'type': 'input',
            'description': {
                'units': 'degrees',
                'name': slopesref,
                'longref': slopelref
            }
        }
        maplayers['cohesion'] = {
            'grid': GDALGrid(cohesion[:, :, 0], sampledict),
            'label': 'Cohesion (kPa)',
            'type': 'input',
            'description': {
                'units': 'kPa (adjusted)',
                'name': cohesionsref,
                'longref': cohesionlref
            }
        }
        maplayers['friction angle'] = {
            'grid': GDALGrid(friction[:, :, 0], sampledict),
            'label': r'Friction angle ($^\circ$)',
            'type': 'input',
            'description': {
                'units': 'degrees',
                'name': frictionsref,
                'longref': frictionlref
            }
        }
        if uncertfile is not None:
            maplayers['pgamin'] = {
                'grid': GDALGrid(PGAmin[:, :, 0], sampledict),
                'label': 'PGA - %1.2fstd (g)' % numstd,
                'type': 'input',
                'description': {
                    'units': 'g',
                    'shakemap': shakedetail
                }
            }
            maplayers['pgamax'] = {
                'grid': GDALGrid(PGAmax[:, :, 0], sampledict),
                'label': 'PGA + %1.2fstd (g)' % numstd,
                'type': 'input',
                'description': {
                    'units': 'g',
                    'shakemap': shakedetail
                }
            }
        if 'PGV' in displmodel:
            if uncertfile is not None:
                maplayers['pgvmin'] = {
                    'grid': GDALGrid(PGVmin[:, :, 0], sampledict),
                    'label': 'PGV - %1.2fstd (cm/s)' % numstd,
                    'type': 'input',
                    'description': {
                        'units': 'cm/s',
                        'shakemap': shakedetail
                    }
                }
                maplayers['pgvmax'] = {
                    'grid': GDALGrid(PGVmax[:, :, 0], sampledict),
                    'label': 'PGV + %1.2fstd (cm/s)' % numstd,
                    'type': 'input',
                    'description': {
                        'units': 'cm/s',
                        'shakemap': shakedetail
                    }
                }

    shutil.rmtree(tmpdir)

    return maplayers
示例#25
0
def main(args):
    events = search(starttime=args.start,
                    endtime=args.end,
                    minlatitude=args.latmin,
                    maxlatitude=args.latmax,
                    minlongitude=args.lonmin,
                    maxlongitude=args.lonmax,
                    producttype='shakemap',
                    maxmagnitude=args.magRange[1],
                    minmagnitude=args.magRange[0])
    print('%i events found containing ShakeMaps.' % len(events))

    # Create the GeoDict to which the ShakeMaps will be resampled
    stack_dict = GeoDict.createDictFromBox(args.lonmin, args.lonmax,
                                           args.latmin, args.latmax,
                                           args.resolution, args.resolution)
    nrows, ncols = stack_dict.ny, stack_dict.nx
    imts = {}
    layer_names = {}
    event_info = {}
    layer_count = {}
    ic = 0
    for event in events:
        tnow = datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S')
        if ic % 10 == 0:
            print('%s: Attempting to fetch ShakeMap for %s (%i of %i)' %
                  (tnow, event.id, ic, len(events)))
        ic += 1
        event_info[event.id] = event.toDict()
        try:
            detail = event.getDetailEvent()
        except Exception as e:
            fmt = 'Could not retrieve detail data for event %s, error "%s". Skipping.'
            print(fmt % (event.id, str(e)))
            continue
        if not detail.hasProduct('shakemap'):
            print(
                'Event %s appears not to have a ShakeMap after all... skipping.'
                % detail.id)
        shakemap = detail.getProducts('shakemap')[0]
        try:
            f, gridfile = tempfile.mkstemp()
            os.close(f)
            shakemap.getContent('grid.xml', gridfile)
            shakegrid = ShakeGrid.load(gridfile,
                                       samplegeodict=stack_dict,
                                       resample=True,
                                       doPadding=True)
            imtlist = list(shakegrid.getLayerNames())

            # remove the things that are not ground motions
            kill_list = ['stdpga', 'urat', 'svel']
            for layer in kill_list:
                if layer in imtlist:
                    imtlist.remove(layer)

            for imt in imtlist:
                imtdata = shakegrid.getLayer(imt).getData()
                if imt not in imts:
                    imts[imt] = np.zeros((nrows, ncols, len(events)))
                    layer_count[imt] = 0
                    idx = 0
                    layer_names[imt] = [event.id]
                else:
                    idx = layer_count[imt] + 1
                    layer_names[imt].append(event.id)
                    layer_count[imt] = layer_count[imt] + 1
                imts[imt][:, :, idx] = imtdata
        except Exception as e:
            print('Error fetching ShakeMap grid from %s -  "%s".  Skipping.' %
                  (event.id, str(e)))
        finally:
            os.remove(gridfile)

    # make sure all imts have valid grids in each vertical layer
    # trim off any layers that don't have any data in them.
    for imtname, imtcube in imts.items():
        height_diff = len(events) - (layer_count[imtname] + 1)
        if height_diff:
            top_layer = layer_count[imtname]
            imts[imtname] = imtcube[:, :, 0:top_layer]

    # now create an HDF file, and stuff our data and metadata into it
    stack_file = GridHDFContainer.create(args.outputfile)
    stack_file.setDictionary('layer_names', layer_names)
    stack_file.setDictionary('event', event_info)
    metadata = stack_dict.asDict()
    for imtname, imtcube in imts.items():
        stack_file.setArray(imtname,
                            imtcube,
                            metadata=metadata,
                            compression=True)

    stack_file.close()
示例#26
0
def test_rupture_depth(interactive=False):
    DIP = 17.0
    WIDTH = 20.0
    GRIDRES = 0.1

    names = ['single', 'double', 'triple',
             'concave', 'concave_simple', 'ANrvSA']
    means = [3.1554422780092461, 2.9224454569459781,
             3.0381968625073563, 2.0522694624400271,
             2.4805390352818755, 2.8740121776209673]
    stds = [2.1895293825074575, 2.0506459673526174,
            2.0244588429154402, 2.0112565876976416,
            2.1599789955270019, 1.6156220309120068]
    xp0list = [np.array([118.3]),
               np.array([10.1, 10.1]),
               np.array([10.1, 10.1, 10.3]),
               np.array([10.9, 10.5, 10.9]),
               np.array([10.9, 10.6]),
               np.array([-76.483, -76.626, -76.757, -76.99, -77.024, -76.925,
                         -76.65, -76.321, -75.997, -75.958])]
    xp1list = [np.array([118.3]),
               np.array([10.1, 10.3]),
               np.array([10.1, 10.3, 10.1]),
               np.array([10.5, 10.9, 11.3]),
               np.array([10.6, 10.9]),
               np.array([-76.626, -76.757, -76.99, -77.024, -76.925, -76.65,
                         -76.321, -75.997, -75.958, -76.006])]
    yp0list = [np.array([34.2]),
               np.array([34.2, 34.5]),
               np.array([34.2, 34.5, 34.8]),
               np.array([34.2, 34.5, 34.8]),
               np.array([35.1, 35.2]),
               np.array([-52.068, -51.377, -50.729, -49.845, -49.192, -48.507,
                         -47.875, -47.478, -47.08, -46.422])]
    yp1list = [np.array([34.5]),
               np.array([34.5, 34.8]),
               np.array([34.5, 34.8, 35.1]),
               np.array([34.5, 34.8, 34.6]),
               np.array([35.2, 35.4]),
               np.array([-51.377, -50.729, -49.845, -49.192, -48.507, -47.875,
                         -47.478, -47.08, -46.422, -45.659])]

    for i in range(0, len(xp0list)):
        xp0 = xp0list[i]
        xp1 = xp1list[i]
        yp0 = yp0list[i]
        yp1 = yp1list[i]
        name = names[i]
        mean_value = means[i]
        std_value = stds[i]

        zp = np.zeros(xp0.shape)
        strike = azimuth(xp0[0], yp0[0], xp1[-1], yp1[-1])
        widths = np.ones(xp0.shape) * WIDTH
        dips = np.ones(xp0.shape) * DIP
        strike = [strike]
        origin = Origin({'eventsourcecode': 'test', 'lat': 0, 'lon': 0,
                         'depth': 5.0, 'mag': 7.0})
        rupture = QuadRupture.fromTrace(
            xp0, yp0, xp1, yp1, zp, widths, dips, origin, strike=strike)

        # make a grid of points over both quads, ask for depths
        ymin = np.nanmin(rupture.lats)
        ymax = np.nanmax(rupture.lats)
        xmin = np.nanmin(rupture.lons)
        xmax = np.nanmax(rupture.lons)

        xmin = np.floor(xmin * (1 / GRIDRES)) / (1 / GRIDRES)
        xmax = np.ceil(xmax * (1 / GRIDRES)) / (1 / GRIDRES)
        ymin = np.floor(ymin * (1 / GRIDRES)) / (1 / GRIDRES)
        ymax = np.ceil(ymax * (1 / GRIDRES)) / (1 / GRIDRES)
        geodict = GeoDict.createDictFromBox(
            xmin, xmax, ymin, ymax, GRIDRES, GRIDRES)
        nx = geodict.nx
        ny = geodict.ny
        depths = np.zeros((ny, nx))
        for row in range(0, ny):
            for col in range(0, nx):
                lat, lon = geodict.getLatLon(row, col)
                depth = rupture.getDepthAtPoint(lat, lon)
                depths[row, col] = depth

        np.testing.assert_almost_equal(np.nanmean(depths), mean_value)
        np.testing.assert_almost_equal(np.nanstd(depths), std_value)

        if interactive:
            fig, axes = plt.subplots(nrows=2, ncols=1)
            ax1, ax2 = axes
            xdata = np.append(xp0, xp1[-1])
            ydata = np.append(yp0, yp1[-1])
            plt.sca(ax1)
            plt.plot(xdata, ydata, 'b')
            plt.sca(ax2)
            im = plt.imshow(depths, cmap='viridis_r')  # noqa
            ch = plt.colorbar()  # noqa
            fname = os.path.join(os.path.expanduser('~'),
                                 'quad_%s_test.png' % name)
            print('Saving image for %s quad test... %s' % (name, fname))
            plt.savefig(fname)
            plt.close()
示例#27
0
def check_input_extents(config, shakefile=None, bounds=None):
    """Make sure all input files exist and cover the extent desired

    Args:
        config: configObj of a single model
        shakefile: path to ShakeMap grid.xml file (used for bounds). If not
            provided, bounds must be provided
        bounds: dictionary of bounds with keys: 'xmin', 'xmax', 'ymin', 'ymax'

    Returns:
        tuple containing:
            notcovered: list of files that do not cover the entire area
                defined by bounds or shakefile
            newbounds: new dictionary of bounds of subarea of original
                bounds or shakefile extent that is covered by all input files
    """
    if shakefile is None and bounds is None:
        raise Exception('Must define either a shakemap file or bounds')
    modelname = config.keys()[0]
    # Make dummy geodict to use
    if bounds is None:
        evdict = ShakeGrid.getFileGeoDict(shakefile)
    else:
        evdict = GeoDict.createDictFromBox(
            bounds['xmin'], bounds['xmax'],
            bounds['ymin'], bounds['ymax'],
            0.00001, 0.00001, inside=False)

    # Check extents of all input layers
    notcovered = []
    notcovgdicts = []
    newbounds = None
    for item, value in config[modelname]['layers'].items():
        if 'file' in value.keys():
            filelook = value['file']
            if getFileType(filelook) == 'gmt':
                tmpgd, _ = GMTGrid.getFileGeoDict(filelook)
            else:
                tmpgd, _ = GDALGrid.getFileGeoDict(filelook)
            # See if tempgd contains evdict
            contains = tmpgd.contains(evdict)
            if not contains:
                notcovered.append(filelook)
                notcovgdicts.append(tmpgd)
                # print(filelook)
    if len(notcovered) > 0:
        # Figure out what bounds COULD be run
        xmins = [gd.xmin for gd in notcovgdicts]
        xmaxs = [gd.xmax for gd in notcovgdicts]
        ymins = [gd.ymin for gd in notcovgdicts]
        ymaxs = [gd.ymax for gd in notcovgdicts]

        # Set in by a buffer of 0.05 degrees because mapio doesn't like 
        # when bounds are exactly the same for getboundswithin
        newbounds = dict(xmin=evdict.xmin + 0.05,
                         xmax=evdict.xmax - 0.05,
                         ymin=evdict.ymin + 0.05,
                         ymax=evdict.ymax - 0.05)
        # Which one is the problem?
        if evdict.xmin < np.max(xmins):
            newbounds['xmin'] = np.max(xmins) + 0.05
        if evdict.xmax > np.min(xmaxs):
            newbounds['xmax'] = np.min(xmaxs) - 0.05
        if evdict.ymin < np.max(ymins):
            newbounds['ymin'] = np.max(ymins) + 0.05
        if evdict.ymax > np.min(ymaxs):
            newbounds['ymax'] = np.min(ymaxs) - 0.05

        # See if this is a possible extent
        try:
            test = GeoDict.createDictFromBox(
                newbounds['xmin'], newbounds['xmax'],
                newbounds['ymin'], newbounds['ymax'],
                0.00001, 0.00001, inside=False)
        except BaseException:
            print('Cannot make new bounds that will work')
            newbounds = None

    return notcovered, newbounds
示例#28
0
    def __init__(self,
                 shakefile,
                 config,
                 uncertfile=None,
                 saveinputs=False,
                 slopefile=None,
                 bounds=None,
                 slopemod=None,
                 trimfile=None):
        """
        Sets up the logistic model

        Args:
            shakefile (str): Path to shakemap grid.xml file for the event.
            config: configobj object defining the model and its inputs. Only
                one model should be described in each config file.
            uncertfile (str): Path to uncertainty.xml file.
            saveinputs (bool): Save input layers as Grid2D objects in addition
                to the model? If false (the default), it will just output the
                model.
            slopefile (str): Optional path to slopefile that will be resampled
                to the other input files for applying thresholds. OVERWRITES
                VALUE IN CONFIG.
            bounds (dict): Default of None uses ShakeMap boundaries, otherwise
                a dictionary of boundaries to cut to like

                .. code-block:: python

                    bounds = {
                        'xmin': lonmin, 'xmax': lonmax,
                        'ymin': latmin, 'ymax': latmax
                    }
            slopemod (str): How slope input should be modified to be in
                degrees: e.g., ``np.arctan(slope) * 180. / np.pi`` or
                ``slope/100.`` (note that this may be in the config file
                already).
            trimfile (str): shapefile of earth's landmasses to use to cut
                offshore areas.
        """
        mnames = getLogisticModelNames(config)
        if len(mnames) == 0:
            raise Exception('No config file found or problem with config '
                            'file format')
        if len(mnames) > 1:
            raise Exception('Config file contains more than one model which '
                            'is no longer allowed, update your config file '
                            'to the newer format')

        self.model = mnames[0]
        self.config = config
        cmodel = config[self.model]
        self.modeltype = cmodel['gfetype']
        self.coeffs = validateCoefficients(cmodel)
        # key = layer name, value = file name
        self.layers = validateLayers(cmodel)
        self.terms, timeField = validateTerms(cmodel, self.coeffs, self.layers)
        self.interpolations = validateInterpolations(cmodel, self.layers)
        self.units = validateUnits(cmodel, self.layers)
        self.gmused = [
            value for term, value in cmodel['terms'].items()
            if 'pga' in value.lower() or 'pgv' in value.lower()
            or 'mmi' in value.lower()
        ]
        self.modelrefs, self.longrefs, self.shortrefs = validateRefs(cmodel)
        #self.numstd = numstd
        self.clips = validateClips(cmodel, self.layers, self.gmused)
        self.notes = ''

        if cmodel['baselayer'] not in list(self.layers.keys()):
            raise Exception('You must specify a base layer corresponding to '
                            'one of the files in the layer section.')
        self.saveinputs = saveinputs
        if slopefile is None:
            try:
                self.slopefile = cmodel['slopefile']
            except:
                # print('Slopefile not specified in config, no slope '
                #      'thresholds will be applied\n')
                self.slopefile = None
        else:
            self.slopefile = slopefile
        if slopemod is None:
            try:
                self.slopemod = cmodel['slopemod']
            except:
                self.slopemod = None

        # See if trimfile exists
        if trimfile is not None:
            if not os.path.exists(trimfile):
                print('trimfile defined does not exist: %s\nOcean will not be '
                      'trimmed' % trimfile)
                self.trimfile = None
            elif os.path.splitext(trimfile)[1] != '.shp':
                print('trimfile must be a shapefile, ocean will not be '
                      'trimmed')
                self.trimfile = None
            else:
                self.trimfile = trimfile
        else:
            self.trimfile = None

        # Get month of event
        griddict, eventdict, specdict, fields, uncertainties = \
            getHeaderData(shakefile)
        MONTH = MONTHS[(eventdict['event_timestamp'].month) - 1]

        # Figure out how/if need to cut anything
        geodict = ShakeGrid.getFileGeoDict(shakefile, adjust='res')
        if bounds is not None:  # Make sure bounds are within ShakeMap Grid
            if geodict.xmin < geodict.xmax:  # only if signs are not opposite
                if (geodict.xmin > bounds['xmin']
                        or geodict.xmax < bounds['xmax']
                        or geodict.ymin > bounds['ymin']
                        or geodict.ymax < bounds['ymax']):
                    print('Specified bounds are outside shakemap area, using '
                          'ShakeMap bounds instead.')
                    bounds = None

        if bounds is not None:
            tempgdict = GeoDict.createDictFromBox(bounds['xmin'],
                                                  bounds['xmax'],
                                                  bounds['ymin'],
                                                  bounds['ymax'],
                                                  geodict.dx,
                                                  geodict.dy,
                                                  inside=False)
            # If Shakemap geodict crosses 180/-180 line, fix geodict so things don't break
            if geodict.xmin > geodict.xmax:
                if tempgdict.xmin < 0:
                    geodict._xmin -= 360.
                else:
                    geodict._xmax += 360.
            gdict = geodict.getBoundsWithin(tempgdict)
        else:
            gdict = geodict

        # Now find the layer that is our base layer and get the largest bounds
        # we can guarantee not to exceed shakemap bounds
        basefile = self.layers[cmodel['baselayer']]
        ftype = getFileType(basefile)
        if ftype == 'esri':
            basegeodict, firstcol = GDALGrid.getFileGeoDict(basefile)
            if basegeodict == gdict:
                sampledict = gdict
            else:
                sampledict = basegeodict.getBoundsWithin(gdict)
        elif ftype == 'gmt':
            basegeodict, firstcol = GMTGrid.getFileGeoDict(basefile)
            if basegeodict == gdict:
                sampledict = gdict
            else:
                sampledict = basegeodict.getBoundsWithin(gdict)
        else:
            raise Exception('All predictor variable grids must be a valid '
                            'GMT or ESRI file type.')

        # Do we need to subdivide baselayer?
        if 'divfactor' in self.config[self.model].keys():
            divfactor = float(self.config[self.model]['divfactor'])
            if divfactor != 1.:
                # adjust sampledict so everything will be resampled
                newxmin = sampledict.xmin - sampledict.dx / \
                    2. + sampledict.dx/(2.*divfactor)
                newymin = sampledict.ymin - sampledict.dy / \
                    2. + sampledict.dy/(2.*divfactor)
                newxmax = sampledict.xmax + sampledict.dx / \
                    2. - sampledict.dx/(2.*divfactor)
                newymax = sampledict.ymax + sampledict.dy / \
                    2. - sampledict.dy/(2.*divfactor)
                newdx = sampledict.dx / divfactor
                newdy = sampledict.dy / divfactor

                sampledict = GeoDict.createDictFromBox(newxmin,
                                                       newxmax,
                                                       newymin,
                                                       newymax,
                                                       newdx,
                                                       newdy,
                                                       inside=True)

        # Find slope thresholds, if applicable
        self.slopemin = 'none'
        self.slopemax = 'none'
        if self.slopefile is not None:
            try:
                self.slopemin = float(config[self.model]['slopemin'])
                self.slopemax = float(config[self.model]['slopemax'])
            except:
                print('Could not find slopemin and/or slopemax in config, '
                      'limits. No slope thresholds will be applied.')
                self.slopemin = 'none'
                self.slopemax = 'none'

        # Make temporary directory for hdf5 pytables file storage
        self.tempdir = tempfile.mkdtemp()

        # now load the shakemap, resampling and padding if necessary
        temp = ShakeGrid.load(shakefile)  # , adjust='res')
        self.shakedict = temp.getShakeDict()
        self.eventdict = temp.getEventDict()
        self.shakemap = {}

        # Read both PGA and PGV in, may need them for thresholds
        for gm in ['pga', 'pgv']:
            junkfile = os.path.join(self.tempdir, 'temp.bil')
            GDALGrid.copyFromGrid(temp.getLayer(gm)).save(junkfile)
            if gm in self.interpolations.keys():
                intermeth = self.interpolations[gm]
            else:
                intermeth = 'bilinear'
            junkgrid = quickcut(junkfile,
                                sampledict,
                                precise=True,
                                method=intermeth)
            if gm in self.clips:
                junkgrid.setData(
                    np.clip(junkgrid.getData(), self.clips[gm][0],
                            self.clips[gm][1]))
            self.shakemap[gm] = TempHdf(
                junkgrid, os.path.join(self.tempdir, '%s.hdf5' % gm))
            os.remove(junkfile)
        del (temp)

        # get updated geodict
        sampledict = junkgrid.getGeoDict()

        # take uncertainties into account, if available
        if uncertfile is not None:
            self.uncert = {}
            try:
                # Only read in the ones that will be needed
                temp = ShakeGrid.load(uncertfile)
                already = []
                for gm in self.gmused:
                    if 'pgv' in gm:
                        gmsimp = 'pgv'
                    elif 'pga' in gm:
                        gmsimp = 'pga'
                    elif 'mmi' in gm:
                        gmsimp = 'mmi'
                    if gmsimp in already:
                        continue
                    junkfile = os.path.join(self.tempdir, 'temp.bil')
                    GDALGrid.copyFromGrid(temp.getLayer('std%s' %
                                                        gmsimp)).save(junkfile)
                    if gmsimp in self.interpolations.keys():
                        intermeth = self.interpolations[gmsimp]
                    else:
                        intermeth = 'bilinear'
                    junkgrid = quickcut(junkfile,
                                        sampledict,
                                        precise=True,
                                        method=intermeth)
                    if gmsimp in self.clips:
                        junkgrid.setData(
                            np.clip(junkgrid.getData(), self.clips[gmsimp][0],
                                    self.clips[gmsimp][1]))
                    self.uncert['std' + gmsimp] = TempHdf(
                        junkgrid,
                        os.path.join(self.tempdir, 'std%s.hdf5' % gmsimp))
                    already.append(gmsimp)
                    os.remove(junkfile)
                del (temp)
            except:
                print('Could not read uncertainty file, ignoring '
                      'uncertainties')
                self.uncert = None
        else:
            self.uncert = None

        # Load the predictor layers, save as hdf5 temporary files, put file
        # locations into a dictionary.

        # Will be replaced in the next section if a slopefile was defined
        self.nonzero = None

        # key = layer name, value = grid object
        self.layerdict = {}

        didslope = False
        for layername, layerfile in self.layers.items():
            start = timer()
            if isinstance(layerfile, list):
                for lfile in layerfile:
                    if timeField == 'MONTH':
                        if lfile.find(MONTH) > -1:
                            layerfile = lfile
                            ftype = getFileType(layerfile)
                            interp = self.interpolations[layername]
                            temp = quickcut(layerfile,
                                            sampledict,
                                            precise=True,
                                            method=interp)
                            if layername in self.clips:
                                temp.setData(
                                    np.clip(temp.getData(),
                                            self.clips[layername][0],
                                            self.clips[layername][1]))
                            self.layerdict[layername] = TempHdf(
                                temp,
                                os.path.join(self.tempdir,
                                             '%s.hdf5' % layername))
                            del (temp)
            else:
                interp = self.interpolations[layername]
                temp = quickcut(layerfile,
                                sampledict,
                                precise=True,
                                method=interp)
                if layername in self.clips:
                    temp.setData(
                        np.clip(temp.getData(), self.clips[layername][0],
                                self.clips[layername][1]))
                if layername == 'rock':  # Convert unconsolidated sediments to a more reasonable coefficient
                    sub1 = temp.getData()
                    # Change to mixed sed rock coeff
                    sub1[sub1 <= -3.21] = -1.36
                    temp.setData(sub1)
                    self.notes += 'unconsolidated sediment coefficient changed\
                     to -1.36 (weaker) from -3.22 to better reflect that this \
                    unit is not actually strong\n'

                self.layerdict[layername] = TempHdf(
                    temp, os.path.join(self.tempdir, '%s.hdf5' % layername))
                td = temp.getGeoDict()
                if td != sampledict:
                    raise Exception(
                        'Geodictionaries of resampled files do not match')

                if layerfile == self.slopefile:
                    flag = 0
                    if self.slopemin == 'none' and self.slopemax == 'none':
                        flag = 1
                    if self.slopemod is None:
                        slope1 = temp.getData().astype(float)
                        slope = 0
                    else:
                        try:
                            slope = temp.getData().astype(float)
                            slope1 = eval(self.slopemod)
                        except:
                            print('slopemod provided not valid, continuing '
                                  'without slope thresholds.')
                            flag = 1
                    if flag == 0:
                        nonzero = np.array([(slope1 > self.slopemin) &
                                            (slope1 <= self.slopemax)])
                        self.nonzero = nonzero[0, :, :]
                        del (slope1)
                        del (slope)
                    else:
                        # Still remove areas where the slope equals exactly
                        # 0.0 to remove offshore liq areas.
                        nonzero = np.array([slope1 != 0.0])
                        self.nonzero = nonzero[0, :, :]
                        del (slope1)
                    didslope = True
                del (temp)

            print('Loading %s layer: %1.1f sec' % (layername, timer() - start))

        if didslope is False and self.slopefile is not None:
            # Slope didn't get read in yet
            temp = quickcut(self.slopefile,
                            sampledict,
                            precise=True,
                            method='bilinear')
            flag = 0
            if self.slopemin == 'none' and self.slopemax == 'none':
                flag = 1
            if self.slopemod is None:
                slope1 = temp.getData().astype(float)
                slope = 0
            else:
                try:
                    slope = temp.getData().astype(float)
                    slope1 = eval(self.slopemod)
                except:
                    print('slopemod provided not valid, continuing without '
                          'slope thresholds')
                    flag = 1
            if flag == 0:
                nonzero = np.array([
                    (slope1 > self.slopemin) & (slope1 <= self.slopemax)
                ])
                self.nonzero = nonzero[0, :, :]
                del (slope1)
                del (slope)
            else:
                # Still remove areas where the slope equals exactly
                # 0.0 to remove offshore liq areas.
                nonzero = np.array([slope1 != 0.0])
                self.nonzero = nonzero[0, :, :]
                del (slope1)

        self.nuggets = [str(self.coeffs['b0'])]

        ckeys = list(self.terms.keys())
        ckeys.sort()
        for key in ckeys:
            term = self.terms[key]
            coeff = self.coeffs[key]
            self.nuggets.append('(%g * %s)' % (coeff, term))

        self.equation = ' + '.join(self.nuggets)
        self.geodict = sampledict
示例#29
0
def test_output_container():
    geodict = GeoDict.createDictFromBox(-118.5,-114.5,32.1,36.7,0.01,0.02)
    nrows,ncols = geodict.ny,geodict.nx

    #create MMI mean data for maximum component
    mean_mmi_maximum_data = np.random.rand(nrows,ncols)
    mean_mmi_maximum_metadata = {'name':'Gandalf',
                                 'color':'white',
                                 'powers':'magic'}
    mean_mmi_maximum_grid = Grid2D(mean_mmi_maximum_data,geodict)

    #create MMI std data for maximum component
    std_mmi_maximum_data = mean_mmi_maximum_data/10
    std_mmi_maximum_metadata = {'name':'Legolas',
                                'color':'green',
                                'powers':'good hair'}
    std_mmi_maximum_grid = Grid2D(std_mmi_maximum_data,geodict)

    #create MMI mean data for rotd50 component
    mean_mmi_rotd50_data = np.random.rand(nrows,ncols)
    mean_mmi_rotd50_metadata = {'name':'Gimli',
                                 'color':'brown',
                                 'powers':'axing'}
    mean_mmi_rotd50_grid = Grid2D(mean_mmi_rotd50_data,geodict)

    #create MMI std data for rotd50 component
    std_mmi_rotd50_data = mean_mmi_rotd50_data/10
    std_mmi_rotd50_metadata = {'name':'Aragorn',
                                'color':'white',
                                'powers':'scruffiness'}
    std_mmi_rotd50_grid = Grid2D(std_mmi_rotd50_data,geodict)

    #create PGA mean data for maximum component
    mean_pga_maximum_data = np.random.rand(nrows,ncols)
    mean_pga_maximum_metadata = {'name':'Pippin',
                                 'color':'purple',
                                 'powers':'rashness'}
    mean_pga_maximum_grid = Grid2D(mean_pga_maximum_data,geodict)

    #create PGA std data for maximum component
    std_pga_maximum_data = mean_pga_maximum_data/10
    std_pga_maximum_metadata = {'name':'Merry',
                                'color':'grey',
                                'powers':'hunger'}
    std_pga_maximum_grid = Grid2D(std_pga_maximum_data,geodict)

    f,datafile = tempfile.mkstemp()
    os.close(f)
    try:
        container = ShakeMapOutputContainer.create(datafile)
        container.setIMTGrids('mmi',
                         mean_mmi_maximum_grid,mean_mmi_maximum_metadata,
                         std_mmi_maximum_grid,std_mmi_maximum_metadata,
                         component='maximum')
        container.setIMTGrids('mmi',
                         mean_mmi_rotd50_grid,mean_mmi_rotd50_metadata,
                         std_mmi_rotd50_grid,std_mmi_rotd50_metadata,
                         component='rotd50')
        container.setIMTGrids('pga',
                         mean_pga_maximum_grid,mean_pga_maximum_metadata,
                         std_pga_maximum_grid,std_pga_maximum_metadata,
                         component='maximum')

        #get the maximum MMI imt data
        mmi_max_dict = container.getIMTGrids('mmi',component='maximum')
        np.testing.assert_array_equal(mmi_max_dict['mean'].getData(),
                                      mean_mmi_maximum_data)
        np.testing.assert_array_equal(mmi_max_dict['std'].getData(),
                                      std_mmi_maximum_data)
        assert mmi_max_dict['mean_metadata'] == mean_mmi_maximum_metadata
        assert mmi_max_dict['std_metadata'] == std_mmi_maximum_metadata

        #get the rotd50 MMI imt data
        mmi_rot_dict = container.getIMTGrids('mmi',component='rotd50')
        np.testing.assert_array_equal(mmi_rot_dict['mean'].getData(),
                                      mean_mmi_rotd50_data)
        np.testing.assert_array_equal(mmi_rot_dict['std'].getData(),
                                      std_mmi_rotd50_data)
        assert mmi_rot_dict['mean_metadata'] == mean_mmi_rotd50_metadata
        assert mmi_rot_dict['std_metadata'] == std_mmi_rotd50_metadata

        #get list of maximum imts
        max_imts = container.getIMTs(component='maximum')
        assert sorted(max_imts) == ['mmi','pga']

        #get list of components for mmi
        mmi_comps = container.getComponents('mmi')
        assert sorted(mmi_comps) == ['maximum','rotd50']
    except Exception as e:
        raise(e)
    finally:
        os.remove(datafile)
示例#30
0
def test_output_container():
    geodict = GeoDict.createDictFromBox(-118.5, -114.5, 32.1, 36.7, 0.01, 0.02)
    nrows, ncols = geodict.ny, geodict.nx

    # create MMI mean data for maximum component
    mean_mmi_maximum_data = np.random.rand(nrows, ncols)
    mean_mmi_maximum_metadata = {
        'name': 'Gandalf',
        'color': 'white',
        'powers': 'magic'
    }
    mean_mmi_maximum_grid = Grid2D(mean_mmi_maximum_data, geodict)

    # create MMI std data for maximum component
    std_mmi_maximum_data = mean_mmi_maximum_data / 10
    std_mmi_maximum_metadata = {
        'name': 'Legolas',
        'color': 'green',
        'powers': 'good hair'
    }
    std_mmi_maximum_grid = Grid2D(std_mmi_maximum_data, geodict)

    # create MMI mean data for rotd50 component
    mean_mmi_rotd50_data = np.random.rand(nrows, ncols)
    mean_mmi_rotd50_metadata = {
        'name': 'Gimli',
        'color': 'brown',
        'powers': 'axing'
    }
    mean_mmi_rotd50_grid = Grid2D(mean_mmi_rotd50_data, geodict)

    # create MMI std data for rotd50 component
    std_mmi_rotd50_data = mean_mmi_rotd50_data / 10
    std_mmi_rotd50_metadata = {
        'name': 'Aragorn',
        'color': 'white',
        'powers': 'scruffiness'
    }
    std_mmi_rotd50_grid = Grid2D(std_mmi_rotd50_data, geodict)

    # create PGA mean data for maximum component
    mean_pga_maximum_data = np.random.rand(nrows, ncols)
    mean_pga_maximum_metadata = {
        'name': 'Pippin',
        'color': 'purple',
        'powers': 'rashness'
    }
    mean_pga_maximum_grid = Grid2D(mean_pga_maximum_data, geodict)

    # create PGA std data for maximum component
    std_pga_maximum_data = mean_pga_maximum_data / 10
    std_pga_maximum_metadata = {
        'name': 'Merry',
        'color': 'grey',
        'powers': 'hunger'
    }
    std_pga_maximum_grid = Grid2D(std_pga_maximum_data, geodict)

    f, datafile = tempfile.mkstemp()
    os.close(f)
    try:
        container = ShakeMapOutputContainer.create(datafile)
        container.setIMTGrids('mmi',
                              mean_mmi_maximum_grid,
                              mean_mmi_maximum_metadata,
                              std_mmi_maximum_grid,
                              std_mmi_maximum_metadata,
                              component='maximum')
        container.setIMTGrids('mmi',
                              mean_mmi_rotd50_grid,
                              mean_mmi_rotd50_metadata,
                              std_mmi_rotd50_grid,
                              std_mmi_rotd50_metadata,
                              component='rotd50')
        container.setIMTGrids('pga',
                              mean_pga_maximum_grid,
                              mean_pga_maximum_metadata,
                              std_pga_maximum_grid,
                              std_pga_maximum_metadata,
                              component='maximum')

        # get the maximum MMI imt data
        mmi_max_dict = container.getIMTGrids('mmi', component='maximum')
        np.testing.assert_array_equal(mmi_max_dict['mean'].getData(),
                                      mean_mmi_maximum_data)
        np.testing.assert_array_equal(mmi_max_dict['std'].getData(),
                                      std_mmi_maximum_data)
        assert mmi_max_dict['mean_metadata'] == mean_mmi_maximum_metadata
        assert mmi_max_dict['std_metadata'] == std_mmi_maximum_metadata

        # get the rotd50 MMI imt data
        mmi_rot_dict = container.getIMTGrids('mmi', component='rotd50')
        np.testing.assert_array_equal(mmi_rot_dict['mean'].getData(),
                                      mean_mmi_rotd50_data)
        np.testing.assert_array_equal(mmi_rot_dict['std'].getData(),
                                      std_mmi_rotd50_data)
        assert mmi_rot_dict['mean_metadata'] == mean_mmi_rotd50_metadata
        assert mmi_rot_dict['std_metadata'] == std_mmi_rotd50_metadata

        # Check repr method
        assert repr(container) == '''Data type: grid
    use "getIMTGrids" method to access interpolated IMTs
Rupture: None
Config: None
Stations: None
Metadata: None
Available IMTs (components):
    mmi (maximum, rotd50)
    pga (maximum)
'''

        # get list of all imts
        imts = container.getIMTs()

        # get list of maximum imts
        max_imts = container.getIMTs(component='maximum')
        assert sorted(max_imts) == ['mmi', 'pga']

        # get list of components for mmi
        mmi_comps = container.getComponents('mmi')
        assert sorted(mmi_comps) == ['maximum', 'rotd50']

        # Test dropIMT
        imts = container.getIMTs('maximum')
        assert imts == ['mmi', 'pga']
        container.dropIMT('mmi')
        imts = container.getIMTs('maximum')
        assert imts == ['pga']
        container.close()

    except Exception as e:
        raise (e)
    finally:
        os.remove(datafile)
示例#31
0
def test():
    #these values taken from the shakemap header of:
    #http://earthquake.usgs.gov/realtime/product/shakemap/ak12496371/ak/1453829475592/download/grid.xml

    print('Testing various dictionaries for consistency...')

    print('Testing consistent dictionary...')
    #this should pass, and will serve as the comparison from now on
    gdict = {
        'xmin': -160.340600,
        'xmax': -146.340600,
        'ymin': 54.104700,
        'ymax': 65.104700,
        'dx': 0.025000,
        'dy': 0.025000,
        'ny': 441,
        'nx': 561
    }
    gd = GeoDict(gdict)
    print('Consistent dictionary passed.')

    print('Testing dictionary with inconsistent resolution...')
    #this should pass
    gdict = {
        'xmin': -160.340600,
        'xmax': -146.340600,
        'ymin': 54.104700,
        'ymax': 65.104700,
        'dx': 0.026000,
        'dy': 0.026000,
        'ny': 441,
        'nx': 561
    }
    gd3 = GeoDict(gdict, adjust='res')
    assert gd3 == gd
    print('Dimensions modification passed.')

    print('Testing dictionary with inconsistent lower right corner...')
    #this should pass
    gdict = {
        'xmin': -160.340600,
        'xmax': -146.350600,
        'ymin': 54.103700,
        'ymax': 65.104700,
        'dx': 0.025000,
        'dy': 0.025000,
        'ny': 441,
        'nx': 561
    }
    gd4 = GeoDict(gdict, adjust='bounds')
    assert gd4 == gd
    print('Corner modification passed.')

    print(
        'Testing to make sure lat/lon and row/col calculations are correct...')
    #make sure the lat/lon row/col calculations are correct
    ndec = int(np.abs(np.log10(GeoDict.EPS)))
    lat, lon = gd.getLatLon(0, 0)
    dlat = np.abs(lat - gd.ymax)
    dlon = np.abs(lon - gd.xmin)
    assert dlat < GeoDict.EPS and dlon < GeoDict.EPS
    row, col = gd.getRowCol(lat, lon)
    assert row == 0 and col == 0

    lat, lon = gd.getLatLon(gd.ny - 1, gd.nx - 1)
    dlat = np.abs(lat - gd.ymin)
    dlon = np.abs(lon - gd.xmax)
    assert dlat < GeoDict.EPS and dlon < GeoDict.EPS
    row, col = gd.getRowCol(lat, lon)
    assert row == (gd.ny - 1) and col == (gd.nx - 1)
    print('lat/lon and row/col calculations are correct.')

    print('Testing a dictionary for a global grid...')
    #this is the file geodict for Landscan - should pass muster
    globaldict = {
        'nx': 43200,
        'ny': 20880,
        'dx': 0.00833333333333,
        'xmax': 179.99583333318935,
        'xmin': -179.99583333333334,
        'dy': 0.00833333333333,
        'ymax': 83.99583333326376,
        'ymin': -89.99583333333334
    }
    gd5 = GeoDict(globaldict)
    lat, lon = gd5.getLatLon(gd5.ny - 1, gd5.nx - 1)
    dlat = np.abs(lat - gd5.ymin)
    dlon = np.abs(lon - gd5.xmax)
    assert dlat < GeoDict.EPS and dlon < GeoDict.EPS
    print('Global grid is internally consistent.')

    #Test class methods for creating a GeoDict
    print('Testing whether GeoDict creator class methods work...')
    xmin = -121.05333277776235
    xmax = -116.03833388890432
    ymin = 32.138334444506171
    ymax = 36.286665555493826
    dx = 0.0083333333333333332
    dy = 0.0083333333333333332
    gd6 = GeoDict.createDictFromBox(xmin,
                                    xmax,
                                    ymin,
                                    ymax,
                                    dx,
                                    dy,
                                    inside=False)
    assert gd6.xmax > xmax
    assert gd6.ymin < ymin
    print('Created dictionary (outside) is correct.')
    gd7 = GeoDict.createDictFromBox(xmin,
                                    xmax,
                                    ymin,
                                    ymax,
                                    dx,
                                    dy,
                                    inside=True)
    assert gd7.xmax < xmax
    assert gd7.ymin > ymin
    print('Created dictionary (inside) is correct.')
    xspan = 2.5
    yspan = 2.5
    gd8 = GeoDict.createDictFromCenter(xmin, ymin, dx, dy, xspan, yspan)
    print('Created dictionary (from center point) is valid.')

    print('Testing a geodict with dx/dy values that are NOT the same...')
    xmin, xmax, ymin, ymax = (-121.06166611109568, -116.03000055557099,
                              32.130001111172838, 36.294998888827159)
    dx, dy = (0.009999722214505959, 0.009999444413578534)
    td = GeoDict.createDictFromBox(xmin, xmax, ymin, ymax, dx, dy)
    print(
        'Passed testing a geodict with dx/dy values that are NOT the same...')

    #test getBoundsWithin
    #use global grid, and then a shakemap grid that we can get
    print('Testing getBoundsWithin...')
    grussia = {
        'xmin': 155.506400,
        'xmax': 161.506400,
        'ymin': 52.243000,
        'ymax': 55.771000,
        'dx': 0.016667,
        'dy': 0.016642,
        'nx': 361,
        'ny': 213
    }
    gdrussia = GeoDict(grussia, adjust='res')
    sampledict = gd5.getBoundsWithin(gdrussia)
    xSmaller = sampledict.xmin > grussia['xmin'] and sampledict.xmax < grussia[
        'xmax']
    ySmaller = sampledict.ymin > grussia['ymin'] and sampledict.ymax < grussia[
        'ymax']
    assert xSmaller and ySmaller
    assert gd5.isAligned(sampledict)
    print('getBoundsWithin returned correct result.')

    print('Testing isAligned() method...')
    gd = GeoDict({
        'xmin': 0.5,
        'xmax': 3.5,
        'ymin': 0.5,
        'ymax': 3.5,
        'dx': 1.0,
        'dy': 1.0,
        'nx': 4,
        'ny': 4
    })

    inside_aligned = GeoDict({
        'xmin': 1.5,
        'xmax': 2.5,
        'ymin': 1.5,
        'ymax': 2.5,
        'dx': 1.0,
        'dy': 1.0,
        'nx': 2,
        'ny': 2
    })
    inside_not_aligned = GeoDict({
        'xmin': 2.0,
        'xmax': 3.0,
        'ymin': 2.0,
        'ymax': 3.0,
        'dx': 1.0,
        'dy': 1.0,
        'nx': 2,
        'ny': 2
    })
    assert gd.isAligned(inside_aligned)
    assert not gd.isAligned(inside_not_aligned)
    print('Passed isAligned() method...')

    print('Testing getAligned method...')
    popdict = GeoDict({
        'dx': 0.00833333333333,
        'dy': 0.00833333333333,
        'nx': 43200,
        'ny': 20880,
        'xmax': 179.99583333318935,
        'xmin': -179.99583333333334,
        'ymax': 83.99583333326376,
        'ymin': -89.99583333333334
    })
    sampledict = GeoDict({
        'dx': 0.008333333333333333,
        'dy': 0.008336693548387094,
        'nx': 601,
        'ny': 497,
        'xmax': -116.046,
        'xmin': -121.046,
        'ymax': 36.2785,
        'ymin': 32.1435
    })
    aligndict = popdict.getAligned(sampledict)
    assert popdict.isAligned(aligndict)

    print('Testing geodict intersects method...')
    gd1 = GeoDict({
        'xmin': 0.5,
        'xmax': 3.5,
        'ymin': 0.5,
        'ymax': 3.5,
        'dx': 1.0,
        'dy': 1.0,
        'nx': 4,
        'ny': 4
    })

    print('Testing geodict intersects method...')
    gd2 = GeoDict({
        'xmin': 2.5,
        'xmax': 5.5,
        'ymin': 2.5,
        'ymax': 5.5,
        'dx': 1.0,
        'dy': 1.0,
        'nx': 4,
        'ny': 4
    })
    gd3 = GeoDict({
        'xmin': 4.5,
        'xmax': 7.5,
        'ymin': 4.5,
        'ymax': 7.5,
        'dx': 1.0,
        'dy': 1.0,
        'nx': 4,
        'ny': 4
    })
    gd4 = GeoDict({
        'xmin': 1.5,
        'xmax': 2.5,
        'ymin': 1.5,
        'ymax': 2.5,
        'dx': 1.0,
        'dy': 1.0,
        'nx': 2,
        'ny': 2
    })
    assert gd1.intersects(gd2)
    assert not gd1.intersects(gd3)
    print('Passed intersects method...')

    print('Testing geodict intersects method with real geographic data...')
    gda = GeoDict({
        'ymax': 83.62083333333263,
        'nx': 43201,
        'ny': 20835,
        'dx': 0.00833333333333,
        'dy': 0.00833333333333,
        'xmin': -179.99583333333334,
        'ymin': -89.99583333326461,
        'xmax': -179.99583333347732
    })
    gdb = GeoDict({
        'ymax': 28.729166666619193,
        'nx': 300,
        'ny': 264,
        'dx': 0.00833333333333,
        'dy': 0.00833333333333,
        'xmin': 84.08749999989436,
        'ymin': 26.537499999953404,
        'xmax': 86.57916666656007
    })
    assert gda.intersects(gdb)
    print('Passed geodict intersects method with real geographic data.')

    print('Testing geodict doesNotContain method...')
    assert gd1.doesNotContain(gd3)
    assert not gd1.doesNotContain(gd4)

    print('Passed doesNotContain method...')

    print('Testing geodict contains method...')

    assert gd1.contains(gd4)
    assert not gd1.contains(gd3)
    print('Passed contains method...')
示例#32
0
def quickcut(filename,
             gdict,
             tempname=None,
             extrasamp=5.,
             method='bilinear',
             precise=True,
             cleanup=True,
             verbose=False,
             override=False):
    """
    Use gdal to trim a large global file down quickly so mapio can read it
    efficiently. (Cannot read Shakemap.xml files, must save as .bil filrst)

    Args:
        filename (str): File path to original input file (raster).
        gdict (geodict): Geodictionary to cut around and align with.
        tempname (str): File path to desired location of clipped part of
            filename.
        extrasamp (int): Number of extra cells to cut around each edge of
            geodict to have resampling buffer for future steps.
        method (str): If resampling is necessary, method to use.
        precise (bool): If true, will resample to the gdict as closely as
            possible, if False it will just roughly cut around the area of
            interest without changing resolution
        cleanup (bool): if True, delete tempname after reading it back in
        verbose (bool): if True, prints more details
        override (bool): if True, if filename extent is not fully contained by
            gdict, read in the entire file (only used for ShakeMaps)

    Returns: New grid2D layer

    Note: This function uses the subprocess approach because ``gdal.Translate``
        doesn't hang on the command until the file is created which causes
        problems in the next steps.
    """
    if gdict.xmax < gdict.xmin:
        raise Exception('quickcut: your geodict xmax is smaller than xmin')

    try:
        filegdict = GDALGrid.getFileGeoDict(filename)
    except:
        try:
            filegdict = GMTGrid.getFileGeoDict(filename)
        except:
            raise Exception('Cannot get geodict for %s' % filename)

    if tempname is None:
        tempdir = tempfile.mkdtemp()
        tempname = os.path.join(tempdir, 'junk.tif')
        deltemp = True
    else:
        tempdir = None
        deltemp = False

    # if os.path.exists(tempname):
    #     os.remove(tempname)
    #     print('Temporary file already there, removing file')

    filegdict = filegdict[0]

    # Get the right methods for mapio (method) and gdal (method2)
    if method == 'linear':
        method2 = 'bilinear'
    if method == 'nearest':
        method2 = 'near'
    if method == 'bilinear':
        method = 'linear'
        method2 = 'bilinear'
    if method == 'near':
        method = 'nearest'
        method2 = 'near'
    else:
        method2 = method

    if filegdict != gdict:
        # First cut without resampling
        tempgdict = GeoDict.createDictFromBox(gdict.xmin,
                                              gdict.xmax,
                                              gdict.ymin,
                                              gdict.ymax,
                                              filegdict.dx,
                                              filegdict.dy,
                                              inside=True)

        try:
            egdict = filegdict.getBoundsWithin(tempgdict)

            ulx = egdict.xmin - extrasamp * egdict.dx
            uly = egdict.ymax + extrasamp * egdict.dy
            lrx = egdict.xmax + (extrasamp + 1) * egdict.dx
            lry = egdict.ymin - (extrasamp + 1) * egdict.dy

            cmd = 'gdal_translate -a_srs EPSG:4326 -of GTiff -projwin %1.8f \
            %1.8f %1.8f %1.8f -r %s %s %s' % (ulx, uly, lrx, lry, method2,
                                              filename, tempname)
        except Exception as e:
            if override:
                # When ShakeMap is being loaded, sometimes they won't align
                # right because it's already cut to the area, so just load
                # the whole file
                cmd = 'gdal_translate -a_srs EPSG:4326 -of GTiff -r %s %s %s' \
                      % (method2, filename, tempname)
            else:
                raise Exception('Failed to cut layer: %s' % e)

        rc, so, se = get_command_output(cmd)
        if not rc:
            raise Exception(se.decode())
        else:
            if verbose:
                print(so.decode())

        newgrid2d = GDALGrid.load(tempname)
        if precise:
            # Resample to exact geodictionary
            newgrid2d = newgrid2d.interpolate2(gdict, method=method)
        if cleanup:
            os.remove(tempname)

        if deltemp:
            shutil.rmtree(tempdir)

    else:
        ftype = GMTGrid.getFileType(filename)
        if ftype != 'unknown':
            newgrid2d = GMTGrid.load(filename)
        elif filename.endswith('.xml'):
            newgrid2d = ShakeGrid.load(filename)
        else:
            newgrid2d = GDALGrid.load(filename)

    return newgrid2d
示例#33
0
def get_exposures(grid,
                  pop_file,
                  shakefile=None,
                  shakethreshtype=None,
                  shakethresh=0.0,
                  probthresh=None,
                  stdgrid2D=None,
                  stdtype='mean',
                  maxP=1.):
    """
    Get exposure-based statistics.

    Args:
        grid: Model grid.
        pop_file (str):  Path to the landscan population grid.
        shakefile (str): Optional, path to shakemap file to use for ground
            motion threshold.
        shakethreshtype(str): Optional, Type of ground motion to use for
            shakethresh, 'pga', 'pgv', or 'mmi'.
        shakethresh: Optional, Float or list of shaking thresholds in %g for
            pga, cm/s for pgv, float for mmi.
        probthresh: Optional, None or float, exclude any cells with
            probabilities less than or equal to this value
        stdgrid2D: grid2D object of model standard deviations (optional)
        stdtype (str): assumption of spatial correlation used to compute
            the stdev of the statistics, 'max', 'min' or 'mean' of max and min

    Returns:
        dict: Dictionary with keys named exp_pop_# where # is the shakethresh
            and exp_std_# if stdgrid2D is supplied (stdev of exp_pop)
            and elim_#, the maximum exposure value possible with the
            applied thresholds and given maxP value
            p_exp_# beta distribution shape factor p (sometimes called alpha)
            q_exp_# beta distribution shape factor q (sometimes called beta)
    """

    # If probthresh defined, zero out any areas less than or equal to
    # probthresh before proceeding
    if type(shakethresh) != list and type(shakethresh) != np.ndarray:
        shakethresh = [shakethresh]
    if probthresh is not None:
        origdata = grid.getData()
        moddat = origdata.copy()
        moddat[moddat <= probthresh] = 0.0
        moddat[np.isnan(origdata)] = float('nan')
        if stdgrid2D is not None:
            stddat = stdgrid2D.getData().copy()
            stddat[moddat <= probthresh] = 0.0
            stddat[np.isnan(origdata)] = 0.0
    else:
        moddat = grid.getData().copy()
        if stdgrid2D is not None:
            stddat = stdgrid2D.getData().copy()

    mdict = grid.getGeoDict()

    # Cut out area from population file
    popcut = quickcut(pop_file,
                      mdict,
                      precise=False,
                      extrasamp=2.,
                      method='nearest')
    popdat = popcut.getData()
    pdict = popcut.getGeoDict()

    # Pad grid with nans to beyond extent of pdict
    pad_dict = {}
    pad_dict['padleft'] = int(
        np.abs(np.ceil((mdict.xmin - pdict.xmin) / mdict.dx)))
    pad_dict['padright'] = int(
        np.abs(np.ceil((pdict.xmax - mdict.xmax) / mdict.dx)))
    pad_dict['padbottom'] = int(
        np.abs(np.ceil((mdict.ymin - pdict.ymin) / mdict.dy)))
    pad_dict['padtop'] = int(
        np.abs(np.ceil((pdict.ymax - mdict.ymax) / mdict.dy)))

    padgrid, mdict2 = Grid2D.padGrid(moddat, mdict, pad_dict)  # padds with inf
    padgrid[np.isinf(padgrid)] = float('nan')  # change to pad with nan
    padgrid = Grid2D(data=padgrid, geodict=mdict2)  # Turn into grid2d object

    if stdgrid2D is not None:
        padstdgrid, mdict3 = Grid2D.padGrid(stddat, mdict,
                                            pad_dict)  # padds with inf
        padstdgrid[np.isinf(padstdgrid)] = float(
            'nan')  # change to pad with nan
        padstdgrid = Grid2D(data=padstdgrid,
                            geodict=mdict3)  # Turn into grid2d object

    # Resample model grid so as to be the nearest integer multiple of popdict
    factor = np.round(pdict.dx / mdict2.dx)

    # Create geodictionary that is a factor of X higher res but otherwise
    # identical
    ndict = GeoDict.createDictFromBox(pdict.xmin, pdict.xmax, pdict.ymin,
                                      pdict.ymax, pdict.dx / factor,
                                      pdict.dy / factor)

    # Resample
    grid2 = padgrid.interpolate2(ndict, method='linear')

    # Get proportion of each cell that has values (to account properly
    # for any nans)
    prop = block_reduce(~np.isnan(grid2.getData().copy()),
                        block_size=(int(factor), int(factor)),
                        cval=float('nan'),
                        func=np.sum) / (factor**2.)

    # Now block reduce to same geodict as popfile
    modresamp = block_reduce(grid2.getData().copy(),
                             block_size=(int(factor), int(factor)),
                             cval=float('nan'),
                             func=np.nanmean)

    if stdgrid2D is not None:
        grid2std = padstdgrid.interpolate2(ndict, method='linear')
        propstd = block_reduce(~np.isnan(grid2std.getData().copy()),
                               block_size=(int(factor), int(factor)),
                               cval=float('nan'),
                               func=np.sum) / (factor**2.)
        modresampstd = block_reduce(grid2std.getData().copy(),
                                    block_size=(int(factor), int(factor)),
                                    cval=float('nan'),
                                    func=np.nanmean)

    exp_pop = {}
    if shakefile is not None:
        # Resample shakefile to population grid
        # , doPadding=True, padValue=0.)
        shakemap = ShakeGrid.load(shakefile, resample=False)
        shakemap = shakemap.getLayer(shakethreshtype)
        shakemap = shakemap.interpolate2(pdict)
        shkdat = shakemap.getData()
        for shaket in shakethresh:
            threshmult = shkdat > shaket
            threshmult = threshmult.astype(float)
            mu = np.nansum(popdat * prop * modresamp * threshmult)
            exp_pop['exp_pop_%1.2fg' % (shaket / 100., )] = mu
            elim = maxP * np.nansum(popdat * prop * threshmult)
            exp_pop['elim_%1.2fg' % (shaket / 100., )] = elim
            if stdgrid2D is not None:
                totalmax = np.nansum(popdat * propstd * modresampstd *
                                     threshmult)
                totalmin = np.sqrt(
                    np.nansum(
                        (popdat * propstd * modresampstd * threshmult)**2.))
                if stdtype == 'max':
                    exp_pop['exp_std_%1.2fg' % (shaket / 100., )] = totalmax
                elif stdtype == 'min':
                    exp_pop['exp_std_%1.2fg' % (shaket / 100., )] = totalmin
                else:
                    exp_pop['exp_std_%1.2fg' %
                            (shaket / 100., )] = (totalmax + totalmin) / 2.
                # Beta distribution shape factors
                var = exp_pop['exp_std_%1.2fg' % (shaket / 100., )]**2.
                exp_pop['p_exp_%1.2fg' % (shaket / 100., )] = (mu / elim) * (
                    (elim * mu - mu**2) / var - 1)
                exp_pop['q_exp_%1.2fg' %
                        (shaket / 100., )] = (1 - mu / elim) * (
                            (elim * mu - mu**2) / var - 1)

    else:
        mu = np.nansum(popdat * prop * modresamp)
        exp_pop['exp_pop_0.00g'] = mu
        elim = maxP * np.nansum(popdat * prop)
        exp_pop['elim_0.00g'] = elim
        if stdgrid2D is not None:
            totalmax = np.nansum(popdat * propstd * modresampstd)
            totalmin = np.sqrt(np.nansum(
                (popdat * propstd * modresampstd)**2.))
            if stdtype == 'max':
                exp_pop['exp_std_0.00g'] = totalmax
            elif stdtype == 'min':
                exp_pop['exp_std_0.00g'] = totalmin
            else:
                exp_pop['exp_std_0.00g'] = (totalmax + totalmin) / 2.
            # Beta distribution shape factors
            var = exp_pop['exp_std_0.00g']**2.
            exp_pop['exp_std_0.00g'] = (mu / elim) * (
                (elim * mu - mu**2) / var - 1)
            exp_pop['exp_std_0.00g'] = (1 - mu / elim) * (
                (elim * mu - mu**2) / var - 1)
            #exp_pop['exp_std_0.00g'] = np.nansum(popdat * propstd * modresampstd)

    return exp_pop