def test_grid_hdf_container(): f,fname = tempfile.mkstemp() os.close(f) try: #test grid container container = GridHDFContainer.create(fname) # before we put anything in here, let's make sure we get empty lists from # all of the methods that are supposed to return lists of stuff. assert container.getGrids() == [] #test grid2d geodict = GeoDict.createDictFromBox(-118.5,-114.5,32.1,36.7,0.01,0.02) nrows,ncols = geodict.ny,geodict.nx data = np.random.rand(nrows,ncols) metadata = {'name':'Gandalf', 'color':'white', 'powers':'magic'} grid = Grid2D(data,geodict) container.setGrid('testgrid',grid,metadata=metadata) outgrid,outmetadata = container.getGrid('testgrid') np.testing.assert_array_equal(outgrid.getData(),data) assert outgrid.getGeoDict() == geodict assert outmetadata == metadata #set another grid without compression geodict = GeoDict.createDictFromBox(-119.5,-115.5,32.3,37.7,0.01,0.02) nrows,ncols = geodict.ny,geodict.nx data = np.random.rand(nrows,ncols) metadata = {'name':'Legolas', 'color':'green', 'powers':'stealth'} grid2 = Grid2D(data,geodict) container.setGrid('testgrid2',grid2,metadata=metadata,compression=False) outgrid2,outmetadata2 = container.getGrid('testgrid2') np.testing.assert_array_equal(outgrid2.getData(),data) assert outgrid2.getGeoDict() == geodict assert outmetadata2 == metadata #test getGridNames() names = container.getGrids() assert sorted(names) == ['testgrid','testgrid2'] #test looking for a grid that does not exist try: container.getGrid('foo') except LookupError as le: pass #test dropping a grid container.dropGrid('testgrid2') container.close() container2 = GridHDFContainer.load(fname) names = container2.getGrids() assert sorted(names) == ['testgrid'] except: assert 1==2 finally: os.remove(fname)
def getIMTGrids(self, imt_name, component): """ Retrieve a Grid2D object and any associated metadata from the container. Args: imt_name (str): The name of the IMT stored in the container. Returns: dict: Dictionary containing 4 items: - mean Grid2D object for IMT mean values. - mean_metadata Dictionary containing any metadata describing mean layer. - std Grid2D object for IMT standard deviation values. - std_metadata Dictionary containing any metadata describing standard deviation layer. """ if self.getDataType() != 'grid': raise TypeError('Requesting grid data from file containing points') group_name = '%s_%s' % (imt_name, component) if GROUPS['imt'] not in self._hdfobj: raise LookupError('No IMTs stored in HDF file %s' % (self.getFileName())) if group_name not in self._hdfobj[GROUPS['imt']]: raise LookupError('No group called %s in HDF file %s' % (imt_name, self.getFileName())) imt_group = self._hdfobj[GROUPS['imt']][group_name] # get the mean data and metadata mean_dset = imt_group['mean'] mean_data = mean_dset[()] array_metadata, mean_metadata = _split_dset_attrs(mean_dset) mean_geodict = GeoDict(array_metadata) mean_grid = Grid2D(mean_data, mean_geodict) # get the std data and metadata std_dset = imt_group['std'] std_data = std_dset[()] array_metadata, std_metadata = _split_dset_attrs(std_dset) std_geodict = GeoDict(array_metadata) std_grid = Grid2D(std_data, std_geodict) # create an output dictionary imt_dict = { 'mean': mean_grid, 'mean_metadata': mean_metadata, 'std': std_grid, 'std_metadata': std_metadata } return imt_dict
def test_interpolate(): geodict = GeoDict({ 'xmin': 0.5, 'xmax': 6.5, 'ymin': 1.5, 'ymax': 6.5, 'dx': 1.0, 'dy': 1.0, 'ny': 6, 'nx': 7 }) data = np.arange(14, 56).reshape(6, 7) for method in ['nearest', 'linear', 'cubic']: print('Testing interpolate with method "%s"...' % method) grid = Grid2D(data, geodict) sampledict = GeoDict({ 'xmin': 3.0, 'xmax': 4.0, 'ymin': 3.0, 'ymax': 4.0, 'dx': 1.0, 'dy': 1.0, 'ny': 2, 'nx': 2 }) grid = grid.interpolateToGrid(sampledict, method=method) tgrid = grid.interpolate2(sampledict, method=method) if method == 'nearest': output = np.array([[30.0, 31.0], [37.0, 38.0]]) elif method == 'linear': output = np.array([[34., 35.], [41., 42.]]) elif method == 'cubic': output = np.array([[34., 35.], [41., 42.]]) else: pass np.testing.assert_almost_equal(grid.getData(), output) print('Passed interpolate with method "%s".' % method) np.testing.assert_almost_equal(tgrid.getData(), output) print('Passed interpolate2 with method "%s".' % method) # speed test of interpolateToGrid and interpolate2 geodict = GeoDict.createDictFromBox(0, 10, 0, 10, 0.01, 0.01) data = np.random.rand(geodict.ny, geodict.nx) grid = Grid2D(data, geodict) sampledict = GeoDict.createDictFromBox(2, 8, 2, 8, 0.098, 0.098) t1 = time.time() grid2 = grid.interpolateToGrid(sampledict, method='linear') t2 = time.time() grid3 = grid.interpolate2(sampledict, method='linear') t3 = time.time() #np.testing.assert_almost_equal(grid2._data.sum(),grid3._data.sum()) print('scipy method: %.3f seconds' % (t2 - t1)) print('gdal method: %.3f seconds' % (t3 - t2))
def test(): print('Testing MultiGrid interpolate...') data = np.arange(14,56).reshape(6,7) geodict = GeoDict({'xmin':0.5,'xmax':6.5,'ymin':1.5,'ymax':6.5,'dx':1.0,'dy':1.0,'ny':6,'nx':7}) layers = OrderedDict() layers['layer1'] = Grid2D(data,geodict) mgrid = MultiGrid(layers) sampledict = GeoDict({'xmin':3.0,'xmax':4.0, 'ymin':3.0,'ymax':4.0, 'dx':1.0,'dy':1.0, 'ny':2,'nx':2}) for method in ['nearest','linear','cubic']: mgrid2 = mgrid.interpolateToGrid(sampledict,method=method) if method == 'nearest': output = np.array([[30.0,31.0],[37.0,38.0]]) elif method == 'linear': output = np.array([[34.,35.],[41.,42.]]) elif method == 'cubic': output = np.array([[34.,35.],[41.,42.]]) else: pass np.testing.assert_almost_equal(mgrid2.getLayer('layer1').getData(),output) print('Passed MultiGrid interpolate test.') print('Testing bounds retrieval...') b1 = np.array(mgrid.getBounds()) b2 = np.array((geodict.xmin,geodict.xmax,geodict.ymin,geodict.ymax)) np.testing.assert_almost_equal(b1,b2) print('Passed bounds retrieval...') print('Testing MultiGrid subdivide test...') data = np.arange(0,9).reshape((3,3)) geodict = GeoDict({'xmin':0.0,'xmax':10.0, 'ymin':0.0,'ymax':10.0, 'dx':5.0,'dy':5.0, 'ny':3,'nx':3}) layers = OrderedDict() layers['layer1'] = Grid2D(data,geodict) hostgrid = MultiGrid(layers) finedict = GeoDict({'xmin':-2.5,'xmax':11.5, 'ymin':-1.5,'ymax':10.5, 'dx':2.0,'dy':2.0, 'nx':8,'ny':7}) N = np.nan finegrid = hostgrid.subdivide(finedict,cellFill='min') output = np.array([[ N, 0., 0., 1., 1., 1., 2., 2.], [ N, 0., 0., 1., 1., 1., 2., 2.], [ N, 3., 3., 4., 4., 4., 5., 5.], [ N, 3., 3., 4., 4., 4., 5., 5.], [ N, 3., 3., 4., 4., 4., 5., 5.], [ N, 6., 6., 7., 7., 7., 8., 8.], [ N, 6., 6., 7., 7., 7., 8., 8.]]) np.testing.assert_almost_equal(finegrid.getLayer('layer1').getData(),output) print('Passed MultiGrid subdivide test.')
def getIMT(self, imt_name, component): """ Retrieve a Grid2D object and any associated metadata from the container. Args: imt_name (str): The name of the Grid2D object stored in the container. Returns: dict: Dictionary containing 4 items: - mean Grid2D object for IMT mean values. - mean_metadata Dictionary containing any metadata describing mean layer. - std Grid2D object for IMT standard deviation values. - std_metadata Dictionary containing any metadata describing standard deviation layer. """ logger = logging.getLogger() logger.info('Inside OutputContainer') group_name = '__imt_%s_%s__' % (imt_name, component) if group_name not in self._hdfobj: raise LookupError('No group called %s in HDF file %s' % (imt_name, self.getFileName())) imt_group = self._hdfobj[group_name] # get the mean data and metadata mean_name = '__mean_%s_%s__' % (imt_name, component) mean_dset = imt_group[mean_name] mean_data = mean_dset[()] array_metadata, mean_metadata = _split_dset_attrs(mean_dset) mean_geodict = GeoDict(array_metadata) mean_grid = Grid2D(mean_data, mean_geodict) # get the std data and metadata std_name = '__std_%s_%s__' % (imt_name, component) std_dset = imt_group[std_name] std_data = std_dset[()] array_metadata, std_metadata = _split_dset_attrs(std_dset) std_geodict = GeoDict(array_metadata) std_grid = Grid2D(std_data, std_geodict) # create an output dictionary imt_dict = { 'mean': mean_grid, 'mean_metadata': mean_metadata, 'std': std_grid, 'std_metadata': std_metadata } return imt_dict
def test_cut(): geodict = GeoDict({'xmin': 0.5, 'xmax': 4.5, 'ymin': 0.5, 'ymax': 4.5, 'dx': 1.0, 'dy': 1.0, 'ny': 5, 'nx': 5}) data = np.arange(0, 25).reshape(5, 5) print('Testing data extraction...') grid = Grid2D(data, geodict) xmin, xmax, ymin, ymax = (2.5, 3.5, 2.5, 3.5) newgrid = grid.cut(xmin, xmax, ymin, ymax) output = np.array([[7, 8], [12, 13]]) np.testing.assert_almost_equal(newgrid.getData(), output) print('Passed data extraction...') print('Testing data trimming with resampling...') # make a more complicated test using getboundswithin data = np.arange(0, 84).reshape(7, 12) geodict = GeoDict({'xmin': -180, 'xmax': 150, 'ymin': -90, 'ymax': 90, 'dx': 30, 'dy': 30, 'nx': 12, 'ny': 7}) grid = Grid2D(data, geodict) sampledict = GeoDict.createDictFromBox(-75, 45, -45, 75, geodict.dx, geodict.dy) cutdict = geodict.getBoundsWithin(sampledict) newgrid = grid.cut(cutdict.xmin, cutdict.xmax, cutdict.ymin, cutdict.ymax) output = np.array([[16, 17, 18, 19], [28, 29, 30, 31], [40, 41, 42, 43], [52, 53, 54, 55]]) np.testing.assert_almost_equal(newgrid.getData(), output) print('Passed data trimming with resampling...') print('Test cut with self-alignment...') geodict = GeoDict({'xmin': 0.5, 'xmax': 4.5, 'ymin': 0.5, 'ymax': 6.5, 'dx': 1.0, 'dy': 1.0, 'nx': 5, 'ny': 7}) data = np.arange(0, 35).astype(np.float32).reshape(7, 5) grid = Grid2D(data, geodict) cutxmin = 1.7 cutxmax = 3.7 cutymin = 1.7 cutymax = 5.7 cutgrid = grid.cut(cutxmin, cutxmax, cutymin, cutymax, align=True) output = np.array([[7, 8], [12, 13], [17, 18], [22, 23]]) np.testing.assert_almost_equal(cutgrid.getData(), output) print('Passed cut with self-alignment.')
def _trim_grid(ingrid): outgrid = Grid2D.copyFromGrid(ingrid) while np.isnan(outgrid._data).any(): nrows, ncols = outgrid._data.shape top = outgrid._data[0, :] bottom = outgrid._data[-1, :] left = outgrid._data[:, 0] right = outgrid._data[:, -1] ftop = np.isnan(top).sum() / ncols fbottom = np.isnan(bottom).sum() / ncols fleft = np.isnan(left).sum() / nrows fright = np.isnan(right).sum() / nrows side = np.argmax([ftop, fbottom, fleft, fright]) gdict = outgrid.getGeoDict().asDict() if side == 0: # removing top row outgrid._data = outgrid._data[1:, :] gdict['ymax'] -= gdict['dy'] gdict['ny'] -= 1 elif side == 1: # removing bottom row outgrid._data = outgrid._data[0:-1, :] gdict['ymin'] += gdict['dy'] gdict['ny'] -= 1 elif side == 2: # removing left column outgrid._data = outgrid._data[:, 1:] gdict['xmin'] += gdict['dx'] gdict['nx'] -= 1 elif side == 3: # removing right column outgrid._data = outgrid._data[:, 0:-1] gdict['xmax'] -= gdict['dx'] gdict['nx'] -= 1 geodict = GeoDict(gdict) outgrid = Grid2D(data=outgrid._data, geodict=geodict) return outgrid
def test_write(): data = np.arange(0, 25).reshape(5, 5).astype(np.float32) gdict = { 'xmin': 5.0, 'xmax': 9.0, 'ymin': 4.0, 'ymax': 8.0, 'dx': 1.0, 'dy': 1.0, 'nx': 5, 'ny': 5 } gd = GeoDict(gdict) grid = Grid2D(data, gd) for format_type in ['netcdf', 'esri', 'hdf']: tdir = tempfile.mkdtemp() fname = os.path.join(tdir, 'tempfile.grd') try: write(grid, fname, format_type) src = rasterio.open(fname, 'r') tdata = src.read(1) np.testing.assert_almost_equal(tdata, data) except Exception as e: raise e finally: shutil.rmtree(tdir)
def test_setData(): data = np.arange(0, 16).astype(np.float32).reshape(4, 4) geodict = GeoDict({ 'xmin': 0.5, 'xmax': 3.5, 'ymin': 0.5, 'ymax': 3.5, 'dx': 1.0, 'dy': 1.0, 'ny': 4, 'nx': 4 }) grid1 = Grid2D(data, geodict) x = np.ones((4, 4)) try: grid1.setData(x) #this should pass print('setData test passed.') except DataSetException as dse: print('setData test failed.') try: x = np.ones((5, 5)) grid1.setData(x) print('setData test did not fail when it should have.') except DataSetException as dse: print('setData test failed as expected.') try: x = 'fred' grid1.setData(x) print('setData test did not fail when it should have.') except DataSetException as dse: print('setData test failed as expected.')
def test_copy(): data = np.arange(0,16).astype(np.float32).reshape(4,4) geodict = GeoDict({'xmin':0.5,'xmax':3.5,'ymin':0.5,'ymax':3.5,'dx':1.0,'dy':1.0,'ny':4,'nx':4}) grid1 = Grid2D(data,geodict) grid2 = grid1.copyFromGrid(grid1) grid1._data[0,0] = np.nan print(grid2._data) print(grid2._geodict)
def createFromCenter(cls, cx, cy, xspan, yspan, dx, dy, defaultVs30=686.0, vs30File=None, vs30measured_grid=None, backarc=False, padding=False, resample=False): """ Create a Sites object by defining a center point, resolution, extent, and Vs30 values. :param cx: X coordinate of desired center point. :param cy: Y coordinate of desired center point. :param xspan: Width of desired grid. :param yspan: Height of desired grid. :param dx: Resolution of desired grid in X direction. :param dy: Resolution of desired grid in Y direction. :param defaultVs30: Default Vs30 value to use if vs30File not specified. :param vs30File: Name of GMT or GDAL format grid file containing Vs30 values. :param vs30measured_grid: Boolean grid indicating whether Vs30 values were measured or derived (i.e., from slope) :param backarc: Boolean indicating whether event is on the backarc as defined `here <http://earthquake.usgs.gov/learn/glossary/?term=backarc>`__. :param padding: Boolean indicating whether or not to pad resulting Vs30 grid out to edges of input bounds. If False, grid will be clipped to the extent of the input file. :param resample: Boolean indicating whether or not the grid should be resampled. """ geodict = GeoDict.createDictFromCenter(cx, cy, dx, dy, xspan, yspan) if vs30File is not None: vs30grid = cls._create(geodict, defaultVs30, vs30File, padding, resample) else: griddata = np.ones( (geodict.ny, geodict.nx), dtype=np.float64) * defaultVs30 vs30grid = Grid2D(griddata, geodict) return cls(vs30grid, vs30measured_grid=vs30measured_grid, backarc=backarc, defaultVs30=defaultVs30)
def test_basics(): geodict = GeoDict({ 'xmin': 0.5, 'xmax': 3.5, 'ymin': 0.5, 'ymax': 3.5, 'dx': 1.0, 'dy': 1.0, 'ny': 4, 'nx': 4 }) data = np.arange(0, 16).reshape(4, 4).astype(np.float32) grid = Grid2D(data, geodict) print( 'Testing basic Grid2D functionality (retrieving data, lat/lon to pixel coordinates, etc...' ) np.testing.assert_almost_equal(grid.getData(), data) assert grid.getGeoDict() == geodict assert grid.getBounds() == (geodict.xmin, geodict.xmax, geodict.ymin, geodict.ymax) lat, lon = grid.getLatLon(0, 0) assert lat == 3.5 and lon == 0.5 row, col = grid.getRowCol(lat, lon) assert row == 0 and col == 0 value = grid.getValue(lat, lon) assert value == 0 frow, fcol = grid.getRowCol(1.0, 3.0, returnFloat=True) assert frow == 2.5 and fcol == 2.5 irow, icol = grid.getRowCol(1.0, 3.0, returnFloat=False) assert irow == 2 and icol == 2 #test getting values in and outside of the grid bounds lat = np.array([0.0, 0.5, 2.5, 4.0]) lon = np.array([0.0, 0.5, 2.5, 4.0]) default = np.nan output = np.array([np.nan, 12, 6, np.nan]) value = grid.getValue(lat, lon, default=default) np.testing.assert_almost_equal(value, output) print( 'Passed basic Grid2D functionality (retrieving data, lat/lon to pixel coordinates, etc...' )
def _get_average_grid(gc, contents, myimt): """ Given an SA(X) IMT, attempt to find the grids that bracket its period and return an interpolated grid that is weighted average (weighted by the (log) differeences in period). If the period is less than the lowest, or greater than the highest, available period, then the closest endpoint grid is returned. Args: gc (GridHDFContainer): The container holding the amplification grids, labeled by IMT string. contents (list): A list of the IMTs available in gc. myimt (str): The target IMT; must be of type "SA(X)". Returns: tuple: A grid and its associated metadata. """ # # Make a list of the SA IMTs, add the target IMT to the list # and then sort by period. # imt_list = [thisimt for thisimt in contents if thisimt.startswith('SA(')] if len(imt_list) == 0: logging.warning('Generic Amp Factors: No SA grids in file') return None, None imt_list.append(myimt) imt_list_sorted = sorted(imt_list, key=get_period_from_imt) nimt = len(imt_list_sorted) ix = imt_list_sorted.index(myimt) if ix == 0: logging.warning("Generic Amp Factors:IMT %s less than min available " "imt, using %s" % (myimt, imt_list_sorted[1])) return gc.getGrid(imt_list_sorted[1]) elif ix == (nimt - 1): logging.warning("Generic Amp Factors:IMT %s greater than max " "available imt, using %s" % (myimt, imt_list_sorted[-2])) return gc.getGrid(imt_list_sorted[-2]) else: # Interpolate using (log) period: p1 is the shorter period, # p2 is the longer period, and p0 is the target period. g1, md1 = gc.getGrid(imt_list_sorted[ix - 1]) g2, md1 = gc.getGrid(imt_list_sorted[ix + 1]) p1 = np.log(get_period_from_imt(imt_list_sorted[ix - 1])) p2 = np.log(get_period_from_imt(imt_list_sorted[ix + 1])) p0 = np.log(get_period_from_imt(myimt)) w1 = (p2 - p0) / (p2 - p1) w2 = 1.0 - w1 gmean = g1.getData() * w1 + g2.getData() * w2 return Grid2D(gmean, g1.getGeoDict()), md1
def make_generic_amps(): imts = ['PGA', 'SA(0.3)', 'SA(1.0)', 'SA(3.0)'] install_path, _ = get_config_paths() geodict = { 'dx': 0.016666666666666666, 'dy': 0.016666666666666666, 'nx': 301, 'ny': 151, 'xmax': -116.0, 'xmin': -121.0, 'ymax': 35.5, 'ymin': 33.0 } gd = GeoDict(geodict) # make east-west file (1s on the left, 0s on the right) data = np.ones((gd.ny, gd.nx)) data[:, 151:] = 0 outfolder = os.path.join(install_path, 'data', 'GenericAmpFactors') east_west_file = os.path.join(outfolder, 'Test_basin_east_west.hdf') east_west = GridHDFContainer.create(east_west_file) for imt in imts: grid = Grid2D(data, gd) east_west.setGrid(imt, grid) east_west.close() # make east-west file (1s on the left, 0s on the right) data = np.ones((gd.ny, gd.nx)) data[76:151, :] = 0 outfolder = os.path.join(install_path, 'data', 'GenericAmpFactors') north_south_file = os.path.join(outfolder, 'Test_basin_north_south.hdf') north_south = GridHDFContainer.create(north_south_file) for imt in imts: grid = Grid2D(data, gd) north_south.setGrid(imt, grid) north_south.close() return (east_west_file, north_south_file)
def big_test(): xmin = -180 xmax = -170 ymin = 30 ymax = 40 dx = 0.0083 dy = 0.0083 gd = GeoDict.createDictFromBox(xmin, xmax, ymin, ymax, dx, dy) data = np.random.rand(gd.ny, gd.nx) grid = Grid2D(data, gd) fname = os.path.join(os.path.expanduser('~'), 'tempfile.grd') write(grid, fname, 'hdf') print(fname) src = rasterio.open(fname, 'r')
def fromBounds(cls, xmin, xmax, ymin, ymax, dx, dy, defaultVs30=686.0, vs30File=None, vs30measured_grid=None, backarc=None, padding=False, resample=False): """ Create a Sites object by defining a center point, resolution, extent, and Vs30 values. Args: xmin: X coordinate of left edge of bounds. xmax: X coordinate of right edge of bounds. ymin: Y coordinate of bottom edge of bounds. ymax: Y coordinate of top edge of bounds. dx: Resolution of desired grid in X direction. dy: Resolution of desired grid in Y direction. defaultVs30: Default Vs30 value to use if vs30File not specified. vs30File: Name of GMT or GDAL format grid file containing Vs30 values. vs30measured_grid: Boolean grid indicating whether Vs30 values were measured or derived (i.e., from slope). backarc: Boolean array indicating whether site is in the subduction `backarc <http://earthquake.usgs.gov/learn/glossary/?term=backarc>`__. padding: Boolean indicating whether or not to pad resulting Vs30 grid out to edges of input bounds. If False, grid will be clipped to the extent of the input file. resample: Boolean indicating whether or not the grid should be resampled. """ # noqa geodict = GeoDict.createDictFromBox(xmin, xmax, ymin, ymax, dx, dy) if vs30File is not None: vs30grid = cls._create(geodict, defaultVs30, vs30File, padding, resample) else: griddata = np.ones( (geodict.ny, geodict.nx), dtype=np.float64) * defaultVs30 vs30grid = Grid2D(griddata, geodict) return cls(vs30grid, vs30measured_grid=vs30measured_grid, backarc=backarc, defaultVs30=defaultVs30)
def getGrid(self, name): """ Retrieve a Grid2D object and any associated metadata from the container. Args: name (str): The name of the Grid2D object stored in the container. Returns: (tuple) Grid2D object, and a dictionary of metadata. """ array_name = '__grid_%s__' % name if array_name not in self._hdfobj: raise LookupError('Array %s not in %s' % (name, self.getFileName())) dset = self._hdfobj[array_name] data = dset[()] array_metadata, meta_metadata = _split_dset_attrs(dset) geodict = GeoDict(array_metadata) grid = Grid2D(data, geodict) return grid, meta_metadata
def test_getvalue(): array = np.arange(1, 26).reshape(5, 5) gdict = GeoDict({'xmin': 1.0, 'xmax': 5.0, 'ymin': 1.0, 'ymax': 5.0, 'dx': 1.0, 'dy': 1.0, 'nx': 5, 'ny': 5}) grid = Grid2D(array, gdict) assert grid.getValue(3.0, 3.0) == 13 lat = np.array([3.0, 4.0]) lon = np.array([3.0, 3.0]) test = grid.getValue(lat, lon) np.testing.assert_almost_equal(test, np.array([13, 8])) lat = np.array([[3.0, 4.0], [4.0, 5.0]]) lon = np.array([[3.0, 3.0], [4.0, 4.0]]) test = grid.getValue(lat, lon) np.testing.assert_almost_equal(test, np.array([[13, 8], [9, 4]]))
def test_interpolate(): geodict = GeoDict({'xmin':0.5,'xmax':6.5,'ymin':1.5,'ymax':6.5,'dx':1.0,'dy':1.0,'ny':6,'nx':7}) data = np.arange(14,56).reshape(6,7) for method in ['nearest','linear','cubic']: print('Testing interpolate with method "%s"...' % method) grid = Grid2D(data,geodict) sampledict = GeoDict({'xmin':3.0,'xmax':4.0, 'ymin':3.0,'ymax':4.0, 'dx':1.0,'dy':1.0, 'ny':2,'nx':2}) grid = grid.interpolateToGrid(sampledict,method=method) if method == 'nearest': output = np.array([[30.0,31.0],[37.0,38.0]]) elif method == 'linear': output = np.array([[34.,35.],[41.,42.]]) elif method == 'cubic': output = np.array([[34.,35.],[41.,42.]]) else: pass np.testing.assert_almost_equal(grid.getData(),output) print('Passed interpolate with method "%s".' % method)
def updateSequences(self, stime): etime = stime + timedelta(days=1) events = search(starttime=stime, endtime=etime, minlatitude=-90, maxlatitude=90, minlongitude=-180, maxlongitude=180, minmagnitude=0.0, maxmagnitude=9.9) todayframe = get_summary_data_frame(events) todaydata = get_day_counts(GDICT, todayframe) todaygrid = Grid2D(data=todaydata, geodict=GDICT) for row in range(0, GDICT.ny): for col in range(0, GDICT.nx): if row == 19 and col == 29: foo = 1 clat, clon = GDICT.getLatLon(row, col) tvalue = todaygrid._data[row, col] mvalue = self._meangrid._data[row, col] svalue = self._stdgrid._data[row, col] # thresh = tvalue > mvalue + svalue * 3 thresh = tvalue > MINEQ xmin = clon - GDICT.dx / 2 xmax = clon + GDICT.dx / 2 ymin = clat - GDICT.dy / 2 ymax = clat + GDICT.dy / 2 if thresh: c1 = todayframe['latitude'] > ymin c2 = todayframe['latitude'] <= ymax c3 = todayframe['longitude'] > xmin c4 = todayframe['longitude'] <= xmax cluster = todayframe[c1 & c2 & c3 & c4].copy() class_frame, pproj = self.get_clusters(cluster, clon, clat) self.insertSequences(class_frame, pproj) # call a method that filters out clusters that don't match the definition # of an earthquake sequence. self.cleanSequences()
def getSitesContext(self, lldict=None, rock_vs30=None): """ Create a SitesContext object by sampling the current Sites object. Args: lldict: Either - None, in which case the SitesContext for the complete Sites grid is returned, or - A location dictionary (elements are 'lats' and 'lons' and each is a numpy array). Each element must have the same shape. In this case the SitesContext for these locaitons is returned. rock_vs30: Either - None, in which case the SitesContext will reflect the Vs30 grid in the Sites instance, or - A float for the rock Vs30 value, in which case the SitesContext will be constructed for this constant Vs30 value. Returns: SitesContext object. Raises: ShakeLibException: When lat/lon input sequences do not share dimensionality. """ # noqa sctx = SitesContext() if lldict is not None: lats = lldict['lats'] lons = lldict['lons'] latshape = lats.shape lonshape = lons.shape if latshape != lonshape: msg = 'Input lat/lon arrays must have the same dimensions' raise ShakeLibException(msg) if rock_vs30 is not None: tmp = self._Vs30.getValue( lats, lons, default=self._defaultVs30) sctx.vs30 = np.ones_like(tmp) * rock_vs30 else: sctx.vs30 = self._Vs30.getValue( lats, lons, default=self._defaultVs30) sctx.lats = lats sctx.lons = lons else: sctx.lats = self._lats.copy() sctx.lons = self._lons.copy() if rock_vs30 is not None: sctx.vs30 = np.full_like(self._Vs30.getData(), rock_vs30) else: sctx.vs30 = self._Vs30.getData().copy() sctx = Sites._addDepthParameters(sctx) # For ShakeMap purposes, vs30 measured is always Fales sctx.vs30measured = np.zeros_like(sctx.vs30, dtype=bool) # Backarc should be a numpy array if lldict is not None: backarcgrid = Grid2D(self._backarc, self._Vs30.getGeoDict()) sctx.backarc = backarcgrid.getValue(lats, lons, default=False) else: sctx.backarc = self._backarc.copy() return sctx
def test_output_container(): geodict = GeoDict.createDictFromBox(-118.5, -114.5, 32.1, 36.7, 0.01, 0.02) nrows, ncols = geodict.ny, geodict.nx # create MMI mean data for maximum component mean_mmi_maximum_data = np.random.rand(nrows, ncols) mean_mmi_maximum_metadata = { 'name': 'Gandalf', 'color': 'white', 'powers': 'magic' } mean_mmi_maximum_grid = Grid2D(mean_mmi_maximum_data, geodict) # create MMI std data for maximum component std_mmi_maximum_data = mean_mmi_maximum_data / 10 std_mmi_maximum_metadata = { 'name': 'Legolas', 'color': 'green', 'powers': 'good hair' } std_mmi_maximum_grid = Grid2D(std_mmi_maximum_data, geodict) # create MMI mean data for rotd50 component mean_mmi_rotd50_data = np.random.rand(nrows, ncols) mean_mmi_rotd50_metadata = { 'name': 'Gimli', 'color': 'brown', 'powers': 'axing' } mean_mmi_rotd50_grid = Grid2D(mean_mmi_rotd50_data, geodict) # create MMI std data for rotd50 component std_mmi_rotd50_data = mean_mmi_rotd50_data / 10 std_mmi_rotd50_metadata = { 'name': 'Aragorn', 'color': 'white', 'powers': 'scruffiness' } std_mmi_rotd50_grid = Grid2D(std_mmi_rotd50_data, geodict) # create PGA mean data for maximum component mean_pga_maximum_data = np.random.rand(nrows, ncols) mean_pga_maximum_metadata = { 'name': 'Pippin', 'color': 'purple', 'powers': 'rashness' } mean_pga_maximum_grid = Grid2D(mean_pga_maximum_data, geodict) # create PGA std data for maximum component std_pga_maximum_data = mean_pga_maximum_data / 10 std_pga_maximum_metadata = { 'name': 'Merry', 'color': 'grey', 'powers': 'hunger' } std_pga_maximum_grid = Grid2D(std_pga_maximum_data, geodict) f, datafile = tempfile.mkstemp() os.close(f) try: container = ShakeMapOutputContainer.create(datafile) container.setIMTGrids('mmi', mean_mmi_maximum_grid, mean_mmi_maximum_metadata, std_mmi_maximum_grid, std_mmi_maximum_metadata, component='maximum') container.setIMTGrids('mmi', mean_mmi_rotd50_grid, mean_mmi_rotd50_metadata, std_mmi_rotd50_grid, std_mmi_rotd50_metadata, component='rotd50') container.setIMTGrids('pga', mean_pga_maximum_grid, mean_pga_maximum_metadata, std_pga_maximum_grid, std_pga_maximum_metadata, component='maximum') # get the maximum MMI imt data mmi_max_dict = container.getIMTGrids('mmi', component='maximum') np.testing.assert_array_equal(mmi_max_dict['mean'].getData(), mean_mmi_maximum_data) np.testing.assert_array_equal(mmi_max_dict['std'].getData(), std_mmi_maximum_data) assert mmi_max_dict['mean_metadata'] == mean_mmi_maximum_metadata assert mmi_max_dict['std_metadata'] == std_mmi_maximum_metadata # get the rotd50 MMI imt data mmi_rot_dict = container.getIMTGrids('mmi', component='rotd50') np.testing.assert_array_equal(mmi_rot_dict['mean'].getData(), mean_mmi_rotd50_data) np.testing.assert_array_equal(mmi_rot_dict['std'].getData(), std_mmi_rotd50_data) assert mmi_rot_dict['mean_metadata'] == mean_mmi_rotd50_metadata assert mmi_rot_dict['std_metadata'] == std_mmi_rotd50_metadata # Check repr method assert repr(container) == '''Data type: grid use "getIMTGrids" method to access interpolated IMTs Rupture: None Config: None Stations: None Metadata: None Available IMTs (components): mmi (maximum, rotd50) pga (maximum) ''' # get list of all imts imts = container.getIMTs() # get list of maximum imts max_imts = container.getIMTs(component='maximum') assert sorted(max_imts) == ['mmi', 'pga'] # get list of components for mmi mmi_comps = container.getComponents('mmi') assert sorted(mmi_comps) == ['maximum', 'rotd50'] # Test dropIMT imts = container.getIMTs('maximum') assert imts == ['mmi', 'pga'] container.dropIMT('mmi') imts = container.getIMTs('maximum') assert imts == ['pga'] container.close() except Exception as e: raise (e) finally: os.remove(datafile)
def draw_contour(shakegrid, popgrid, oceanfile, oceangridfile, cityfile, basename, borderfile=None, is_scenario=False): """Create a contour map showing MMI contours over greyscale population. :param shakegrid: ShakeGrid object. :param popgrid: Grid2D object containing population data. :param oceanfile: String path to file containing ocean vector data in a format compatible with fiona. :param oceangridfile: String path to file containing ocean grid data . :param cityfile: String path to file containing GeoNames cities data. :param basename: String path containing desired output PDF base name, i.e., /home/pager/exposure. ".pdf" and ".png" files will be made. :param make_png: Boolean indicating whether a PNG version of the file should also be created in the same output folder as the PDF. :returns: Tuple containing: - Name of PNG file created, or None if PNG output not specified. - Cities object containing the cities that were rendered on the contour map. """ gd = shakegrid.getGeoDict() # Retrieve the epicenter - this will get used on the map center_lat = shakegrid.getEventDict()['lat'] center_lon = shakegrid.getEventDict()['lon'] # load the ocean grid file (has 1s in ocean, 0s over land) # having this file saves us almost 30 seconds! oceangrid = read(oceangridfile, samplegeodict=gd, resample=True, doPadding=True) # load the cities data, limit to cities within shakemap bounds allcities = Cities.fromDefault() cities = allcities.limitByBounds((gd.xmin, gd.xmax, gd.ymin, gd.ymax)) # define the map # first cope with stupid 180 meridian height = (gd.ymax - gd.ymin) * DEG2KM if gd.xmin < gd.xmax: width = (gd.xmax - gd.xmin) * np.cos(np.radians(center_lat)) * DEG2KM xmin, xmax, ymin, ymax = (gd.xmin, gd.xmax, gd.ymin, gd.ymax) else: xmin, xmax, ymin, ymax = (gd.xmin, gd.xmax, gd.ymin, gd.ymax) xmax += 360 width = ((gd.xmax + 360) - gd.xmin) * \ np.cos(np.radians(center_lat)) * DEG2KM aspect = width / height # if the aspect is not 1, then trim bounds in x or y direction # as appropriate if width > height: dw = (width - height) / 2.0 # this is width in km xmin = xmin + dw / (np.cos(np.radians(center_lat)) * DEG2KM) xmax = xmax - dw / (np.cos(np.radians(center_lat)) * DEG2KM) width = (xmax - xmin) * np.cos(np.radians(center_lat)) * DEG2KM if height > width: dh = (height - width) / 2.0 # this is width in km ymin = ymin + dh / DEG2KM ymax = ymax - dh / DEG2KM height = (ymax - ymin) * DEG2KM aspect = width / height figheight = FIGWIDTH / aspect bbox = (xmin, ymin, xmax, ymax) bounds = (xmin, xmax, ymin, ymax) figsize = (FIGWIDTH, figheight) # Create the MercatorMap object, which holds a separate but identical # axes object used to determine collisions between city labels. mmap = MercatorMap(bounds, figsize, cities, padding=0.5) fig = mmap.figure ax = mmap.axes # this needs to be done here so that city label collision # detection will work fig.canvas.draw() geoproj = mmap.geoproj proj = mmap.proj # project our population grid to the map projection projstr = proj.proj4_init popgrid_proj = popgrid.project(projstr) popdata = popgrid_proj.getData() newgd = popgrid_proj.getGeoDict() # Use our GMT-inspired palette class to create population and MMI colormaps popmap = ColorPalette.fromPreset('pop') mmimap = ColorPalette.fromPreset('mmi') # set the image extent to that of the data img_extent = (newgd.xmin, newgd.xmax, newgd.ymin, newgd.ymax) plt.imshow(popdata, origin='upper', extent=img_extent, cmap=popmap.cmap, vmin=popmap.vmin, vmax=popmap.vmax, zorder=POP_ZORDER, interpolation='nearest') # draw 10m res coastlines ax.coastlines(resolution="10m", zorder=COAST_ZORDER) states_provinces = cfeature.NaturalEarthFeature( category='cultural', name='admin_1_states_provinces_lines', scale='50m', facecolor='none') ax.add_feature(states_provinces, edgecolor='black', zorder=COAST_ZORDER) # draw country borders using natural earth data set if borderfile is not None: borders = ShapelyFeature( Reader(borderfile).geometries(), ccrs.PlateCarree()) ax.add_feature(borders, zorder=COAST_ZORDER, edgecolor='black', linewidth=2, facecolor='none') # clip the ocean data to the shakemap bbox = (gd.xmin, gd.ymin, gd.xmax, gd.ymax) oceanshapes = _clip_bounds(bbox, oceanfile) ax.add_feature(ShapelyFeature(oceanshapes, crs=geoproj), facecolor=WATERCOLOR, zorder=OCEAN_ZORDER) # So here we're going to project the MMI data to # our mercator map, then smooth and contour that # projected grid. # smooth the MMI data for contouring, themn project mmi = shakegrid.getLayer('mmi').getData() smoothed_mmi = gaussian_filter(mmi, FILTER_SMOOTH) newgd = shakegrid.getGeoDict().copy() smooth_grid = Grid2D(data=smoothed_mmi, geodict=newgd) smooth_grid_merc = smooth_grid.project(projstr) newgd2 = smooth_grid_merc.getGeoDict() # project the ocean grid oceangrid_merc = oceangrid.project(projstr) # create masked arrays using the ocean grid data_xmin, data_xmax = newgd2.xmin, newgd2.xmax data_ymin, data_ymax = newgd2.ymin, newgd2.ymax smooth_data = smooth_grid_merc.getData() landmask = np.ma.masked_where(oceangrid_merc._data == 0.0, smooth_data) oceanmask = np.ma.masked_where(oceangrid_merc._data == 1.0, smooth_data) # contour the data contourx = np.linspace(data_xmin, data_xmax, newgd2.nx) contoury = np.linspace(data_ymin, data_ymax, newgd2.ny) ax.contour( contourx, contoury, np.flipud(oceanmask), linewidths=3.0, linestyles='solid', zorder=1000, cmap=mmimap.cmap, vmin=mmimap.vmin, vmax=mmimap.vmax, levels=np.arange(0.5, 10.5, 1.0), ) ax.contour( contourx, contoury, np.flipud(landmask), linewidths=2.0, linestyles='dashed', zorder=OCEANC_ZORDER, cmap=mmimap.cmap, vmin=mmimap.vmin, vmax=mmimap.vmax, levels=np.arange(0.5, 10.5, 1.0), ) # the idea here is to plot invisible MMI contours at integer levels # and then label them. clabel method won't allow text to appear, # which is this case is kind of ok, because it allows us an # easy way to draw MMI labels as roman numerals. cs_land = plt.contour( contourx, contoury, np.flipud(oceanmask), linewidths=0.0, levels=np.arange(0, 11), alpha=0.0, zorder=CLABEL_ZORDER, ) clabel_text = ax.clabel(cs_land, cs_land.cvalues, colors='k', fmt='%.0f', fontsize=40) for clabel in clabel_text: x, y = clabel.get_position() label_str = clabel.get_text() roman_label = MMI_LABELS[label_str] th = plt.text(x, y, roman_label, zorder=CLABEL_ZORDER, ha='center', va='center', color='black', weight='normal', size=16) th.set_path_effects([ path_effects.Stroke(linewidth=2.0, foreground='white'), path_effects.Normal() ]) cs_ocean = plt.contour( contourx, contoury, np.flipud(landmask), linewidths=0.0, levels=np.arange(0, 11), zorder=CLABEL_ZORDER, ) clabel_text = ax.clabel(cs_ocean, cs_ocean.cvalues, colors='k', fmt='%.0f', fontsize=40) for clabel in clabel_text: x, y = clabel.get_position() label_str = clabel.get_text() roman_label = MMI_LABELS[label_str] th = plt.text(x, y, roman_label, ha='center', va='center', color='black', weight='normal', size=16) th.set_path_effects([ path_effects.Stroke(linewidth=2.0, foreground='white'), path_effects.Normal() ]) # draw meridians and parallels using Cartopy's functions for that gl = ax.gridlines(draw_labels=True, linewidth=2, color=(0.9, 0.9, 0.9), alpha=0.5, linestyle='-', zorder=GRID_ZORDER) gl.xlabels_top = False gl.xlabels_bottom = False gl.ylabels_left = False gl.ylabels_right = False gl.xlines = True # let's floor/ceil the edges to nearest half a degree gxmin = np.floor(xmin * 2) / 2 gxmax = np.ceil(xmax * 2) / 2 gymin = np.floor(ymin * 2) / 2 gymax = np.ceil(ymax * 2) / 2 xlocs = np.linspace(gxmin, gxmax + 0.5, num=5) ylocs = np.linspace(gymin, gymax + 0.5, num=5) gl.xlocator = mticker.FixedLocator(xlocs) gl.ylocator = mticker.FixedLocator(ylocs) gl.xformatter = LONGITUDE_FORMATTER gl.yformatter = LATITUDE_FORMATTER gl.xlabel_style = {'size': 15, 'color': 'black'} gl.ylabel_style = {'size': 15, 'color': 'black'} # TODO - figure out x/y axes data coordinates # corresponding to 10% from left and 10% from top # use geoproj and proj dleft = 0.01 dtop = 0.97 proj_str = proj.proj4_init merc_to_dd = pyproj.Proj(proj_str) # use built-in transforms to get from axes units to data units display_to_data = ax.transData.inverted() axes_to_display = ax.transAxes # these are x,y coordinates in projected space yleft, t1 = display_to_data.transform( axes_to_display.transform((dleft, 0.5))) t2, xtop = display_to_data.transform(axes_to_display.transform( (0.5, dtop))) # these are coordinates in lon,lat space yleft_dd, t1_dd = merc_to_dd(yleft, t1, inverse=True) t2_dd, xtop_dd = merc_to_dd(t2, xtop, inverse=True) # drawing our own tick labels INSIDE the plot, as # Cartopy doesn't seem to support this. yrange = ymax - ymin xrange = xmax - xmin ddlabelsize = 12 for xloc in gl.xlocator.locs: outside = xloc < xmin or xloc > xmax # don't draw labels when we're too close to either edge near_edge = (xloc - xmin) < (xrange * 0.1) or (xmax - xloc) < (xrange * 0.1) if outside or near_edge: continue xtext = r'$%.1f^\circ$W' % (abs(xloc)) ax.text(xloc, xtop_dd, xtext, fontsize=ddlabelsize, zorder=GRID_ZORDER, ha='center', fontname=DEFAULT_FONT, transform=ccrs.Geodetic()) for yloc in gl.ylocator.locs: outside = yloc < gd.ymin or yloc > gd.ymax # don't draw labels when we're too close to either edge near_edge = (yloc - gd.ymin) < (yrange * 0.1) or (gd.ymax - yloc) < ( yrange * 0.1) if outside or near_edge: continue if yloc < 0: ytext = r'$%.1f^\circ$S' % (abs(yloc)) else: ytext = r'$%.1f^\circ$N' % (abs(yloc)) ax.text(yleft_dd, yloc, ytext, fontsize=ddlabelsize, zorder=GRID_ZORDER, va='center', fontname=DEFAULT_FONT, transform=ccrs.Geodetic()) # draw cities mapcities = mmap.drawCities(shadow=True, zorder=CITIES_ZORDER) # draw the figure border thickly # TODO - figure out how to draw map border # bwidth = 3 # ax.spines['top'].set_visible(True) # ax.spines['left'].set_visible(True) # ax.spines['bottom'].set_visible(True) # ax.spines['right'].set_visible(True) # ax.spines['top'].set_linewidth(bwidth) # ax.spines['right'].set_linewidth(bwidth) # ax.spines['bottom'].set_linewidth(bwidth) # ax.spines['left'].set_linewidth(bwidth) # Get the corner of the map with the lowest population corner_rect, filled_corner = _get_open_corner(popgrid, ax) clat2 = round_to_nearest(center_lat, 1.0) clon2 = round_to_nearest(center_lon, 1.0) # draw a little globe in the corner showing in small-scale # where the earthquake is located. proj = ccrs.Orthographic(central_latitude=clat2, central_longitude=clon2) ax2 = fig.add_axes(corner_rect, projection=proj) ax2.add_feature(cfeature.OCEAN, zorder=0, facecolor=WATERCOLOR, edgecolor=WATERCOLOR) ax2.add_feature(cfeature.LAND, zorder=0, edgecolor='black') ax2.plot([clon2], [clat2], 'w*', linewidth=1, markersize=16, markeredgecolor='k', markerfacecolor='r') ax2.gridlines() ax2.set_global() ax2.outline_patch.set_edgecolor('black') ax2.outline_patch.set_linewidth(2) # Draw the map scale in the unoccupied lower corner. corner = 'lr' if filled_corner == 'lr': corner = 'll' draw_scale(ax, corner, pady=0.05, padx=0.05) # Draw the epicenter as a black star plt.sca(ax) plt.plot(center_lon, center_lat, 'k*', markersize=16, zorder=EPICENTER_ZORDER, transform=geoproj) if is_scenario: plt.text(center_lon, center_lat, 'SCENARIO', fontsize=64, zorder=WATERMARK_ZORDER, transform=geoproj, alpha=0.2, color='red', horizontalalignment='center') # create pdf and png output file names pdf_file = basename + '.pdf' png_file = basename + '.png' # save to pdf plt.savefig(pdf_file) plt.savefig(png_file) return (pdf_file, png_file, mapcities)
def draw_map(adict, override_scenario=False): """If adict['imtype'] is MMI, draw a map of intensity draped over topography, otherwise Draw IMT contour lines over hill-shaded topography. Args: adict (dictionary): A dictionary containing the following keys: 'imtype' (str): The intensity measure type 'topogrid' (Grid2d): A topography grid 'allcities' (Cities): A list of global cities, 'states_provinces' (Cartopy Feature): States/province boundaries. 'countries' (Cartopy Feature): Country boundaries. 'oceans' (Cartopy Feature): Oceans. 'lakes' (Cartopy Feature): Lakes. 'roads' (Shapely Feature): Roads. 'faults' (Shapely Feature): Fault traces 'datadir' (str): The path into which to deposit products 'operator' (str): The producer of this shakemap 'filter_size' (int): The size of the filter used before contouring 'info' (dictionary): The shakemap info structure 'component' (str): The intensity measure component being plotted 'imtdict' (dictionary): Dict containing the IMT grids 'rupdict' (dictionary): Dict containing the rupture data 'stationdict' (dictionary): Dict of station data 'config' (dictionary): The configuration data for this shakemap 'tdict' (dictionary): The text strings to be printed on the map in the user's choice of language. 'license_text' (str): License text to display at bottom of map 'license_logo' (str): Path to license logo image to display next to license text override_scenario (bool): Turn off scenario watermark. Returns: Tuple of (Matplotlib figure, Matplotlib figure): Objects containing the map generated by this function, and the intensity legend, respectively. If the imtype of this map is not 'MMI', the second element of the tuple will be None. """ imtype = adict['imtype'] imtdict = adict['imtdict'] # mmidict imtdata = np.nan_to_num(imtdict['mean'], nan=0.0) # mmidata gd = GeoDict(imtdict['mean_metadata']) imtgrid = Grid2D(imtdata, gd) # mmigrid gd = imtgrid.getGeoDict() # Retrieve the epicenter - this will get used on the map rupture = rupture_from_dict(adict['ruptdict']) origin = rupture.getOrigin() center_lat = origin.lat center_lon = origin.lon # load the cities data, limit to cities within shakemap bounds cities = adict['allcities'].limitByBounds((gd.xmin, gd.xmax, gd.ymin, gd.ymax)) # get the map boundaries and figure size bounds, figsize, aspect = _get_map_info(gd) # Note: dimensions are: [left, bottom, width, height] dim_left = 0.1 dim_bottom = 0.19 dim_width = 0.8 dim_height = dim_width/aspect if dim_height > 0.8: dim_height = 0.8 dim_width = 0.8 * aspect dim_left = (1.0 - dim_width) / 2 # Create the MercatorMap object, which holds a separate but identical # axes object used to determine collisions between city labels. mmap = MercatorMap( bounds, figsize, cities, padding=0.5, dimensions=[dim_left, dim_bottom, dim_width, dim_height]) fig = mmap.figure ax = mmap.axes # this needs to be done here so that city label collision # detection will work fig.canvas.draw() # get the geographic projection object geoproj = mmap.geoproj # get the mercator projection object proj = mmap.proj # get the proj4 string - used by Grid2D project() method projstr = proj.proj4_init # get the projected IMT and topo grids pimtgrid, ptopogrid = _get_projected_grids(imtgrid, adict['topogrid'], projstr) # get the projected geodict proj_gd = pimtgrid.getGeoDict() pimtdata = pimtgrid.getData() ptopo_data = ptopogrid.getData() mmimap = ColorPalette.fromPreset('mmi') if imtype == 'MMI': draped_hsv = _get_draped(pimtdata, ptopo_data, mmimap) else: # get the draped topo data topo_colormap = ColorPalette.fromPreset('shaketopo') draped_hsv = _get_shaded(ptopo_data, topo_colormap) # convert units if imtype == 'PGV': pimtdata = np.exp(pimtdata) else: pimtdata = np.exp(pimtdata) * 100 plt.sca(ax) ax.set_xlim(proj_gd.xmin, proj_gd.xmax) ax.set_ylim(proj_gd.ymin, proj_gd.ymax) img_extent = (proj_gd.xmin, proj_gd.xmax, proj_gd.ymin, proj_gd.ymax) plt.imshow(draped_hsv, origin='upper', extent=img_extent, zorder=IMG_ZORDER, interpolation='none') config = adict['config'] gmice = get_object_from_config('gmice', 'modeling', config) gmice_imts = gmice.DEFINED_FOR_INTENSITY_MEASURE_TYPES gmice_pers = gmice.DEFINED_FOR_SA_PERIODS oqimt = imt.from_string(imtype) if imtype != 'MMI' and (not isinstance(oqimt, tuple(gmice_imts)) or (isinstance(oqimt, imt.SA) and oqimt.period not in gmice_pers)): my_gmice = None else: my_gmice = gmice if imtype != 'MMI': # call the contour module in plotting to get the vertices of the # contour lines contour_objects = contour(imtdict, imtype, adict['filter_size'], my_gmice) # get a color palette for the levels we have # levels = [c['properties']['value'] for c in contour_objects] # cartopy shapely feature has some weird behaviors, so I had to go # rogue and draw contour lines/labels myself. # To choose which contours to label, we will keep track of the lengths # of contours, grouped by isovalue contour_lens = defaultdict(lambda: []) def arclen(path): """ Compute the arclength of *path*, which should be a list of pairs of numbers. """ x0, y0 = [np.array(c) for c in zip(*path)] x1, y1 = [np.roll(c, -1) for c in (x0, y0)] # offset by 1 # don't include first-last vertices as an edge: x0, y0, x1, y1 = [c[:-1] for c in (x0, y0, x1, y1)] return np.sum(np.sqrt((x0 - x1)**2 + (y0 - y1)**2)) # draw dashed contours first, the ones over land will be overridden by # solid contours for contour_object in contour_objects: props = contour_object['properties'] multi_lines = sShape(contour_object['geometry']) pmulti_lines = proj.project_geometry(multi_lines, src_crs=geoproj) for multi_line in pmulti_lines: pmulti_line = mapping(multi_line)['coordinates'] x, y = zip(*pmulti_line) contour_lens[props['value']].append(arclen(pmulti_line)) # color = imt_cmap.getDataColor(props['value']) ax.plot(x, y, color=props['color'], linestyle='dashed', zorder=DASHED_CONTOUR_ZORDER) white_box = dict( boxstyle="round", ec=(0, 0, 0), fc=(1., 1, 1), color='k' ) # draw solid contours next - the ones over water will be covered by # ocean polygon for contour_object in contour_objects: props = contour_object['properties'] multi_lines = sShape(contour_object['geometry']) pmulti_lines = proj.project_geometry(multi_lines, src_crs=geoproj) # only label long contours (relative to others with the same # isovalue) min_len = np.array(contour_lens[props['value']]).mean() for multi_line in pmulti_lines: pmulti_line = mapping(multi_line)['coordinates'] x, y = zip(*pmulti_line) # color = imt_cmap.getDataColor(props['value']) ax.plot(x, y, color=props['color'], linestyle='solid', zorder=CONTOUR_ZORDER) if arclen(pmulti_line) >= min_len: # try to label each segment with black text in a white box xc = x[int(len(x)/3)] yc = y[int(len(y)/3)] if _label_close_to_edge( xc, yc, proj_gd.xmin, proj_gd.xmax, proj_gd.ymin, proj_gd.ymax): continue # TODO: figure out if box is going to go outside the map, # if so choose a different point on the line. # For small values, use scientific notation with 1 sig fig # to avoid multiple contours labelled 0.0: value = props['value'] fmt = '%.1g' if abs(value) < 0.1 else '%.1f' ax.text(xc, yc, fmt % value, size=8, ha="center", va="center", bbox=white_box, zorder=AXES_ZORDER-1) # make the border thicker lw = 2.0 ax.outline_patch.set_zorder(BORDER_ZORDER) ax.outline_patch.set_linewidth(lw) ax.outline_patch.set_joinstyle('round') ax.outline_patch.set_capstyle('round') # Coastlines will get drawn when we draw the ocean edges # ax.coastlines(resolution="10m", zorder=COAST_ZORDER, linewidth=3) if adict['states_provinces']: ax.add_feature(adict['states_provinces'], edgecolor='0.5', zorder=COAST_ZORDER) if adict['countries']: ax.add_feature(adict['countries'], edgecolor='black', zorder=BORDER_ZORDER) if adict['oceans']: ax.add_feature(adict['oceans'], edgecolor='black', zorder=OCEAN_ZORDER) if adict['lakes']: ax.add_feature(adict['lakes'], edgecolor='black', zorder=OCEAN_ZORDER) if adict['faults'] is not None: ax.add_feature(adict['faults'], edgecolor='firebrick', zorder=ROAD_ZORDER) if adict['roads'] is not None: ax.add_feature(adict['roads'], edgecolor='dimgray', zorder=ROAD_ZORDER) # draw graticules, ticks, tick labels _draw_graticules(ax, *bounds) # is this event a scenario? info = adict['info'] etype = info['input']['event_information']['event_type'] is_scenario = etype == 'SCENARIO' if is_scenario and not override_scenario: plt.text( center_lon, center_lat, adict['tdict']['title_parts']['scenario'], fontsize=72, zorder=SCENARIO_ZORDER, transform=geoproj, alpha=WATERMARK_ALPHA, color=WATERMARK_COLOR, horizontalalignment='center', verticalalignment='center', rotation=45, path_effects=[ path_effects.Stroke(linewidth=1, foreground='black')] ) # Draw the map scale in the unoccupied lower corner. corner = 'll' draw_scale(ax, corner, pady=0.05, padx=0.05, zorder=SCALE_ZORDER) # draw cities mmap.drawCities(shadow=True, zorder=CITIES_ZORDER, draw_dots=True) # Draw the epicenter as a black star plt.sca(ax) plt.plot(center_lon, center_lat, 'k*', markersize=16, zorder=EPICENTER_ZORDER, transform=geoproj) # draw the rupture polygon(s) in black, if not point rupture point_source = True if not isinstance(rupture, PointRupture): point_source = False json_dict = rupture._geojson for feature in json_dict['features']: for coords in feature['geometry']['coordinates']: for pcoords in coords: poly2d = sLineString([xy[0:2] for xy in pcoords]) ppoly = proj.project_geometry(poly2d) mppoly = mapping(ppoly)['coordinates'] for spoly in mppoly: x, y = zip(*spoly) ax.plot(x, y, 'k', lw=1, zorder=FAULT_ZORDER) # draw the station data on the map stations = adict['stationdict'] _draw_stations(ax, stations, imtype, mmimap, geoproj) _draw_title(imtype, adict) process_time = info['processing']['shakemap_versions']['process_time'] map_version = int(info['processing']['shakemap_versions']['map_version']) if imtype == 'MMI': _draw_mmi_legend(fig, mmimap, gmice, process_time, map_version, point_source, adict['tdict']) # make a separate MMI legend fig2 = plt.figure(figsize=figsize) _draw_mmi_legend(fig2, mmimap, gmice, process_time, map_version, point_source, adict['tdict']) else: _draw_imt_legend(fig, mmimap, imtype, gmice, process_time, map_version, point_source, adict['tdict']) plt.draw() fig2 = None _draw_license(fig, adict) return (fig, fig2)
def basic_test(): mmidata = np.array([[7, 8, 8, 8, 7], [8, 9, 9, 9, 8], [8, 9, 10, 9, 8], [8, 9, 9, 8, 8], [7, 8, 8, 6, 5]], dtype=np.float32) popdata = np.ones_like(mmidata) * 1e7 isodata = np.array( [[4, 4, 4, 4, 4], [4, 4, 4, 4, 4], [4, 4, 156, 156, 156], [156, 156, 156, 156, 156], [156, 156, 156, 156, 156]], dtype=np.int32) shakefile = get_temp_file_name() popfile = get_temp_file_name() isofile = get_temp_file_name() geodict = GeoDict({ 'xmin': 0.5, 'xmax': 4.5, 'ymin': 0.5, 'ymax': 4.5, 'dx': 1.0, 'dy': 1.0, 'nx': 5, 'ny': 5 }) layers = OrderedDict([ ('mmi', mmidata), ]) event_dict = { 'event_id': 'us12345678', 'magnitude': 7.8, 'depth': 10.0, 'lat': 34.123, 'lon': -118.123, 'event_timestamp': datetime.utcnow(), 'event_description': 'foo', 'event_network': 'us' } shake_dict = { 'event_id': 'us12345678', 'shakemap_id': 'us12345678', 'shakemap_version': 1, 'code_version': '4.5', 'process_timestamp': datetime.utcnow(), 'shakemap_originator': 'us', 'map_status': 'RELEASED', 'shakemap_event_type': 'ACTUAL' } unc_dict = {'mmi': (1, 1)} shakegrid = ShakeGrid(layers, geodict, event_dict, shake_dict, unc_dict) shakegrid.save(shakefile) popgrid = Grid2D(popdata, geodict.copy()) isogrid = Grid2D(isodata, geodict.copy()) write(popgrid, popfile, 'netcdf') write(isogrid, isofile, 'netcdf') ratedict = { 4: { 'start': [2010, 2012, 2014, 2016], 'end': [2012, 2014, 2016, 2018], 'rate': [0.01, 0.02, 0.03, 0.04] }, 156: { 'start': [2010, 2012, 2014, 2016], 'end': [2012, 2014, 2016, 2018], 'rate': [0.02, 0.03, 0.04, 0.05] } } popgrowth = PopulationGrowth(ratedict) popyear = datetime.utcnow().year exposure = Exposure(popfile, popyear, isofile, popgrowth=popgrowth) expdict = exposure.calcExposure(shakefile) modeldict = [ LognormalModel('AF', 11.613073, 0.180683, 1.0), LognormalModel('CN', 10.328811, 0.100058, 1.0) ] fatmodel = EmpiricalLoss(modeldict) # for the purposes of this test, let's override the rates # for Afghanistan and China with simpler numbers. fatmodel.overrideModel( 'AF', np.array([0, 0, 0, 0, 1e-6, 1e-5, 1e-4, 1e-3, 1e-2, 0], dtype=np.float32)) fatmodel.overrideModel( 'CN', np.array([0, 0, 0, 0, 1e-7, 1e-6, 1e-5, 1e-4, 1e-3, 0], dtype=np.float32)) print('Testing very basic fatality calculation...') fatdict = fatmodel.getLosses(expdict) # strictly speaking, the afghanistant fatalities should be 462,000 but floating point precision dictates otherwise. testdict = {'CN': 46111, 'AF': 461999, 'TotalFatalities': 508110} for key, value in fatdict.items(): assert value == testdict[key] print('Passed very basic fatality calculation...') print('Testing grid fatality calculations...') mmidata = exposure.getShakeGrid().getLayer('mmi').getData() popdata = exposure.getPopulationGrid().getData() isodata = exposure.getCountryGrid().getData() fatgrid = fatmodel.getLossGrid(mmidata, popdata, isodata) assert np.nansum(fatgrid) == 508111 print('Passed grid fatality calculations...') # Testing modifying rates and stuffing them back in... chile = LognormalModel('CL', 19.786773, 0.259531, 0.0) rates = chile.getLossRates(np.arange(5, 10)) modrates = rates * 2 # does this make event twice as deadly? # roughly the exposures from 2015-9-16 CL event expo_pop = np.array( [0, 0, 0, 1047000, 7314000, 1789000, 699000, 158000, 0, 0]) mmirange = np.arange(5, 10) chile_deaths = chile.getLosses(expo_pop[4:9], mmirange) chile_double_deaths = chile.getLosses(expo_pop[4:9], mmirange, rates=modrates) print('Chile model fatalities: %f' % chile_deaths) print('Chile model x2 fatalities: %f' % chile_double_deaths)
def execute(self): """ Raises: NotADirectoryError: When the event data directory does not exist. FileNotFoundError: When the the shake_result HDF file does not exist. """ install_path, data_path = get_config_paths() datadir = os.path.join(data_path, self._eventid, 'current', 'products') if not os.path.isdir(datadir): raise NotADirectoryError('%s is not a valid directory.' % datadir) datafile = os.path.join(datadir, 'shake_result.hdf') if not os.path.isfile(datafile): raise FileNotFoundError('%s does not exist.' % datafile) # Open the ShakeMapOutputContainer and extract the data container = ShakeMapOutputContainer.load(datafile) if container.getDataType() != 'grid': raise NotImplementedError('mapping module can only operate on ' 'gridded data, not sets of points') # get the path to the products.conf file, load the config config_file = os.path.join(install_path, 'config', 'products.conf') spec_file = get_configspec('products') validator = get_custom_validator() config = ConfigObj(config_file, configspec=spec_file) results = config.validate(validator) check_extra_values(config, self.logger) if not isinstance(results, bool) or not results: config_error(config, results) # create contour files self.logger.debug('Mapping...') # get the filter size from the products.conf filter_size = config['products']['contour']['filter_size'] # get the operator setting from config operator = config['products']['mapping']['operator'] # get all of the pieces needed for the mapping functions layers = config['products']['mapping']['layers'] if 'topography' in layers and layers['topography'] != '': topofile = layers['topography'] else: topofile = None if 'roads' in layers and layers['roads'] != '': roadfile = layers['roads'] else: roadfile = None if 'faults' in layers and layers['faults'] != '': faultfile = layers['faults'] else: faultfile = None # Get the number of parallel workers max_workers = config['products']['mapping']['max_workers'] # Reading HDF5 files currently takes a long time, due to poor # programming in MapIO. To save us some time until that issue is # resolved, we'll coarsely subset the topo grid once here and pass # it into both mapping functions # get the bounds of the map info = container.getMetadata() xmin = info['output']['map_information']['min']['longitude'] xmax = info['output']['map_information']['max']['longitude'] ymin = info['output']['map_information']['min']['latitude'] ymax = info['output']['map_information']['max']['latitude'] dy = float( info['output']['map_information']['grid_spacing']['latitude']) dx = float( info['output']['map_information']['grid_spacing']['longitude']) padx = 5 * dx pady = 5 * dy sxmin = float(xmin) - padx sxmax = float(xmax) + padx symin = float(ymin) - pady symax = float(ymax) + pady sampledict = GeoDict.createDictFromBox(sxmin, sxmax, symin, symax, dx, dy) if topofile: topogrid = read(topofile, samplegeodict=sampledict, resample=False) else: tdata = np.full([sampledict.ny, sampledict.nx], 0.0) topogrid = Grid2D(data=tdata, geodict=sampledict) model_config = container.getConfig() imtlist = container.getIMTs() textfile = os.path.join( get_data_path(), 'mapping', 'map_strings.' + config['products']['mapping']['language']) text_dict = get_text_strings(textfile) if config['products']['mapping']['fontfamily'] != '': matplotlib.rcParams['font.family'] = \ config['products']['mapping']['fontfamily'] matplotlib.rcParams['axes.unicode_minus'] = False allcities = Cities.fromDefault() states_provs = None countries = None oceans = None lakes = None extent = (float(xmin), float(ymin), float(xmax), float(ymax)) if 'CALLED_FROM_PYTEST' not in os.environ: states_provs = cfeature.NaturalEarthFeature( category='cultural', name='admin_1_states_provinces_lines', scale='10m', facecolor='none') states_provs = list(states_provs.intersecting_geometries(extent)) if len(states_provs) > 300: states_provs = None else: states_provs = cfeature.NaturalEarthFeature( category='cultural', name='admin_1_states_provinces_lines', scale='10m', facecolor='none') countries = cfeature.NaturalEarthFeature(category='cultural', name='admin_0_countries', scale='10m', facecolor='none') oceans = cfeature.NaturalEarthFeature(category='physical', name='ocean', scale='10m', facecolor=WATERCOLOR) lakes = cfeature.NaturalEarthFeature(category='physical', name='lakes', scale='10m', facecolor=WATERCOLOR) if faultfile is not None: faults = ShapelyFeature(Reader(faultfile).geometries(), ccrs.PlateCarree(), facecolor='none') else: faults = None if roadfile is not None: roads = ShapelyFeature(Reader(roadfile).geometries(), ccrs.PlateCarree(), facecolor='none') if len(list(roads.intersecting_geometries(extent))) > 200: roads = None else: roads = ShapelyFeature(Reader(roadfile).geometries(), ccrs.PlateCarree(), facecolor='none') else: roads = None alist = [] for imtype in imtlist: component, imtype = imtype.split('/') comp = container.getComponents(imtype)[0] d = { 'imtype': imtype, 'topogrid': topogrid, 'allcities': allcities, 'states_provinces': states_provs, 'countries': countries, 'oceans': oceans, 'lakes': lakes, 'roads': roads, 'faults': faults, 'datadir': datadir, 'operator': operator, 'filter_size': filter_size, 'info': info, 'component': comp, 'imtdict': container.getIMTGrids(imtype, comp), 'ruptdict': copy.deepcopy(container.getRuptureDict()), 'stationdict': container.getStationDict(), 'config': model_config, 'tdict': text_dict } alist.append(d) if imtype == 'MMI': g = copy.deepcopy(d) g['imtype'] = 'thumbnail' alist.append(g) h = copy.deepcopy(d) h['imtype'] = 'overlay' alist.append(h) self.contents.addFile('intensityMap', 'Intensity Map', 'Map of macroseismic intensity.', 'intensity.jpg', 'image/jpeg') self.contents.addFile('intensityMap', 'Intensity Map', 'Map of macroseismic intensity.', 'intensity.pdf', 'application/pdf') self.contents.addFile('intensityThumbnail', 'Intensity Thumbnail', 'Thumbnail of intensity map.', 'pin-thumbnail.png', 'image/png') self.contents.addFile( 'intensityOverlay', 'Intensity Overlay and World File', 'Macroseismic intensity rendered as a ' 'PNG overlay and associated world file', 'intensity_overlay.png', 'image/png') self.contents.addFile( 'intensityOverlay', 'Intensity Overlay and World File', 'Macroseismic intensity rendered as a ' 'PNG overlay and associated world file', 'intensity_overlay.pngw', 'text/plain') else: fileimt = oq_to_file(imtype) self.contents.addFile(fileimt + 'Map', fileimt.upper() + ' Map', 'Map of ' + imtype + '.', fileimt + '.jpg', 'image/jpeg') self.contents.addFile(fileimt + 'Map', fileimt.upper() + ' Map', 'Map of ' + imtype + '.', fileimt + '.pdf', 'application/pdf') if max_workers > 0: with cf.ProcessPoolExecutor(max_workers=max_workers) as ex: results = ex.map(make_map, alist) list(results) else: for adict in alist: make_map(adict) container.close()
def _get_grids(config, simfile): """Create a dictionary of Grid2D objects for each IMT in input CSV file. Args: config (ConfigObj): Dictionary containing fields: - simulation: (dict) - order = (required) "rows" or "cols" - projection = (optional) Proj4 string defining input X/Y data projection. - nx Number of columns in input grid. - ny Number of rows in input grid. - dx Resolution of columns (if XY, whatever those units are, otherwise decimal degrees). - dy Resolution of rows (if XY, whatever those units are, otherwise decimal degrees). simfile (str): Path to a CSV file with columns: - LAT Latitudes for each cell. If irregular, X/Y data will be used. - LON Longitudes for each cell. If irregular, X/Y data will be used. - X Regularized X coordinates for each cell. - Y Regularized Y coordinates for each cell. - H1_<IMT> First horizontal channel for given IMT. Supported IMTs are: PGA, PGV, SA(period). - H2_<IMT> Second horizontal channel for given IMT. Supported IMTs are: PGA, PGV, SA(period). Returns: dict: Dictionary of IMTs (PGA, PGV, SA(1.0), etc.) and Grid2D objects. If XY data was used, these grids are the result of a projection/resampling of that XY data back to a regular lat/lon grid. """ row_order = 'C' if config['simulation']['order'] != 'rows': row_order = 'F' dataframe = pd.read_csv(simfile) # construct a geodict geodict = _get_geodict(dataframe, config) # figure out which IMTs we have... column_list = [] for column in dataframe.columns: for imtmatch in IMT_MATCHES: if re.search(imtmatch, column): column_list.append(column) # gather up all "channels" for each IMT imtdict = {} # dictionary of imts and a list of columns for col in column_list: channel, imt = col.split('_') if imt in imtdict: imtdict[imt].append(col) else: imtdict[imt] = [col] # make a dictionary of Grid2D objects containing max of two "channels" # for each IMT nrows = geodict.ny ncols = geodict.nx imtgrids = {} for imt, imtcols in imtdict.items(): icount = len(imtcols) if icount != 2: raise IndexError( 'Incorrect number of channels for IMT %s.' % imt) channel1, channel2 = imtcols maximt = dataframe[[channel1, channel2]].max(axis=1).values data = np.reshape(maximt, (nrows, ncols), order=row_order) grid = Grid2D(data, geodict) # if we need to project data back to geographic, do that here if geodict.projection != GEO_PROJ_STR: grid = grid.project(GEO_PROJ_STR) imtgrids[imt] = grid return imtgrids
def test_subdivide(): print('Testing subdivide method - aligned grids...') data = np.arange(0, 4).reshape((2, 2)) geodict = GeoDict({'xmin': 0.0, 'xmax': 1.0, 'ymin': 0.0, 'ymax': 1.0, 'dx': 1.0, 'dy': 1.0, 'ny': 2, 'nx': 2}) hostgrid = Grid2D(data, geodict) finedict = GeoDict({'xmin': 0.0-(1.0/3.0), 'xmax': 1.0+(1.0/3.0), 'ymin': 0.0-(1.0/3.0), 'ymax': 1.0+(1.0/3.0), 'dx': 1.0/3.0, 'dy': 1.0/3.0, 'ny': 6, 'nx': 6}) finegrid = hostgrid.subdivide(finedict) output = np.array([[0., 0., 0., 1., 1., 1.], [0., 0., 0., 1., 1., 1.], [0., 0., 0., 1., 1., 1.], [2., 2., 2., 3., 3., 3.], [2., 2., 2., 3., 3., 3.], [2., 2., 2., 3., 3., 3.]]) np.testing.assert_almost_equal(finegrid.getData(), output) print('Passed subdivide method test - aligned grids.') print('Testing subdivide method - non-aligned grids...') data = np.arange(0, 9).reshape((3, 3)) geodict = GeoDict({'xmin': 0.0, 'xmax': 10.0, 'ymin': 0.0, 'ymax': 10.0, 'dx': 5.0, 'dy': 5.0, 'ny': 3, 'nx': 3}) hostgrid = Grid2D(data, geodict) finedict = GeoDict({'xmin': -2.5, 'xmax': 11.5, 'ymin': -1.5, 'ymax': 10.5, 'dx': 2.0, 'dy': 2.0, 'nx': 8, 'ny': 7}) N = np.nan print('Testing subdivide with min parameter...') finegrid = hostgrid.subdivide(finedict, cellFill='min') output = np.array([[N, 0., 0., 1., 1., 1., 2., 2.], [N, 0., 0., 1., 1., 1., 2., 2.], [N, 3., 3., 4., 4., 4., 5., 5.], [N, 3., 3., 4., 4., 4., 5., 5.], [N, 3., 3., 4., 4., 4., 5., 5.], [N, 6., 6., 7., 7., 7., 8., 8.], [N, 6., 6., 7., 7., 7., 8., 8.]]) np.testing.assert_almost_equal(finegrid.getData(), output) print('Passed subdivide with min parameter...') print('Testing subdivide with max parameter...') finegrid = hostgrid.subdivide(finedict, cellFill='max') output = np.array([[N, 0., 0., 1., 1., 2., 2., 2.], [N, 0., 0., 1., 1., 2., 2., 2.], [N, 3., 3., 4., 4., 5., 5., 5.], [N, 3., 3., 4., 4., 5., 5., 5.], [N, 6., 6., 7., 7., 8., 8., 8.], [N, 6., 6., 7., 7., 8., 8., 8.], [N, 6., 6., 7., 7., 8., 8., 8.]]) np.testing.assert_almost_equal(finegrid.getData(), output) print('Passed subdivide with max parameter...') print('Testing subdivide with mean parameter...') finegrid = hostgrid.subdivide(finedict, cellFill='mean') output = np.array([[N, 0., 0., 1., 1., 1.5, 2., 2.], [N, 0., 0., 1., 1., 1.5, 2., 2.], [N, 3., 3., 4., 4., 4.5, 5., 5.], [N, 3., 3., 4., 4., 4.5, 5., 5.], [N, 4.5, 4.5, 5.5, 5.5, 6.0, 6.5, 6.5], [N, 6., 6., 7., 7., 7.5, 8., 8.], [N, 6., 6., 7., 7., 7.5, 8., 8.]]) np.testing.assert_almost_equal(finegrid.getData(), output) print('Passed subdivide with mean parameter...') print('Passed subdivide method test - non-aligned grids.')
def calculate(self, cleanup=True, rowmax=300, colmax=None): """ Calculate the model. Args: cleanup (bool): If True, delete temporary hdf5 files rowmax (int): Number of rows to compute at once; If None, all rows will be computed at once. colmax (int): Number of columns to compute at once; If None, all columns will be computed at once. Returns: dict: Dictionary containing the model results (and model inputs if saveinputs was set to True). See `the description <https://github.com/usgs/groundfailure#api-for-model-output>`_ of the structure. """ tk = list(self.shakemap.keys())[0] # Figure out what slices to do rowstarts, rowends, colstarts, colends = \ self.shakemap[tk].getSliceDiv(rowmax, colmax) # Make empty matrix to fill X = np.empty([self.geodict.ny, self.geodict.nx]) # Loop through slices, appending output each time for rowstart, rowend, colstart, colend in \ zip(rowstarts, rowends, colstarts, colends): X[rowstart:rowend, colstart:colend] = eval(self.equation) P = 1 / (1 + np.exp(-X)) if 'vs30max' in self.config[self.model].keys(): vs30 = self.layerdict['vs30'].getSlice(None, None, None, None, name='vs30') P[vs30 > float(self.config[self.model]['vs30max'])] = 0.0 if 'minpgv' in self.config[self.model].keys(): pgv = self.shakemap['pgv'].getSlice(None, None, None, None, name='pgv') P[pgv < float(self.config[self.model]['minpgv'])] = 0.0 if 'minpga' in self.config[self.model].keys(): pga = self.shakemap['pga'].getSlice(None, None, None, None, name='pga') P[pga < float(self.config[self.model]['minpga'])] = 0.0 if self.uncert is not None: # hard code for now if 'Zhu and others (2017)' in self.modelrefs['shortref']: if 'stddev' in self.layerdict.keys(): stdX = self.layerdict['stddev'].getSlice() else: stdX = float(self.config[self.model]['default_stddev']) varX = stdX**2. + \ (self.coeffs['b1']**2. * self.uncert['stdpgv'].getSlice()**2.) varP = (np.exp(-X) / (np.exp(-X) + 1)**2.)**2. * varX if 'coverage' in self.config[self.model].keys(): a = 0.4915 b = 42.4 c = 9.165 # ((2*a*b*c*np.exp(2*c*P))/(b+np.exp(c*P))**3.)**2.*varP varL = ((2 * a * b * c * np.exp(-c * P)) / ((1 + b * np.exp(-c * P))**3.))**2. * varP std1 = np.sqrt(varL) else: std1 = np.sqrt(varP) elif 'Jessee' in self.modelrefs['shortref']: if 'stddev' in self.layerdict.keys(): stdX = self.layerdict['stddev'].getSlice() else: stdX = float(self.config[self.model]['default_stddev']) varX = stdX**2. + ( (self.coeffs['b1'] + self.coeffs['b6'] * (np.arctan(self.layerdict['slope'].getSlice()) * 180 / np.pi))**2. * self.uncert['stdpgv'].getSlice()**2.) varP = (np.exp(-X) / (np.exp(-X) + 1)**2.)**2. * varX if 'coverage' in self.config[self.model].keys(): a = -7.592 b = 5.237 c = -3.042 d = 4.035 varL = (np.exp(a + b * P + c * P**2. + d * P**3.) * (b + 2. * P * c + 3. * d * P**2.))**2. * varP std1 = np.sqrt(varL) else: std1 = np.sqrt(varP) else: print( 'cannot do uncertainty for %s model currently, skipping' % self.modelrefs['shortref']) self.uncert = None std1 = None else: std1 = None # P needs to be converted to areal coverage after dealing with uncertainty if 'coverage' in self.config[self.model].keys(): eqn = self.config[self.model]['coverage']['eqn'] P = eval(eqn) if self.slopefile is not None and self.nonzero is not None: # Apply slope min/max limits print('applying slope thresholds') P = P * self.nonzero if std1 is not None: # No uncert for masked values std1[P == 0] = 0. # Stuff into Grid2D object if 'Jessee' in self.modelrefs['shortref']: if 'coverage' not in self.config[self.model].keys(): units5 = 'Relative Hazard' else: units5 = 'Proportion of area affected' elif 'Zhu' in self.modelrefs['shortref']: if 'coverage' not in self.config[self.model].keys( ) and '2017' in self.modelrefs['shortref']: units5 = 'Relative Hazard' else: units5 = 'Proportion of area affected' else: units5 = 'Probability of any occurrence' shakedetail = ('%s_ver%s' % (self.shakedict['shakemap_id'], self.shakedict['shakemap_version'])) description = { 'name': self.modelrefs['shortref'], 'longref': self.modelrefs['longref'], 'units': units5, 'shakemap': shakedetail, 'event_id': self.eventdict['event_id'], 'parameters': { 'slopemin': self.slopemin, 'slopemax': self.slopemax, 'modeltype': self.modeltype, 'notes': self.notes } } if 'vs30max' in self.config[self.model].keys(): description['vs30max'] = float(self.config[self.model]['vs30max']) if 'minpgv' in self.config[self.model].keys(): description['minpgv'] = float(self.config[self.model]['minpgv']) Pgrid = Grid2D(P, self.geodict) if self.trimfile is not None: # Turn all offshore cells to nan Pgrid = trim_ocean(Pgrid, self.trimfile, nodata=float('nan')) rdict = collections.OrderedDict() rdict['model'] = { 'grid': Pgrid, 'label': ('%s estimate - %s') % (self.modeltype.capitalize(), units5.title()), 'type': 'output', 'description': description } if self.uncert is not None: Stdgrid = Grid2D(std1, self.geodict) if self.trimfile is not None: Stdgrid = trim_ocean(Stdgrid, self.trimfile, nodata=float('nan')) rdict['std'] = { 'grid': Stdgrid, 'label': ('%s estimate - %s (std)' % (self.modeltype.capitalize(), units5.title())), 'type': 'output', 'description': description } # This step might swamp memory for higher resolution runs if self.saveinputs is True: for layername, layergrid in list(self.layerdict.items()): units = self.units[layername] if units is None: units = '' rdict[layername] = { 'grid': Grid2D( layergrid.getSlice(None, None, None, None, name=layername), self.geodict), 'label': '%s (%s)' % (layername, units), 'type': 'input', 'description': { 'units': units, 'name': self.shortrefs[layername], 'longref': self.longrefs[layername] } } for gmused in self.gmused: if 'pga' in gmused: units = '%g' getkey = 'pga' elif 'pgv' in gmused: units = 'cm/s' getkey = 'pgv' elif 'mmi' in gmused: units = 'intensity' getkey = 'mmi' else: continue # Layer is derived from several input layers, skip # outputting this layer if getkey in rdict: continue layer = self.shakemap[getkey].getSlice(None, None, None, None, name=getkey) rdict[getkey] = { 'grid': Grid2D(layer, self.geodict), 'label': '%s (%s)' % (getkey.upper(), units), 'type': 'input', 'description': { 'units': units, 'shakemap': shakedetail } } if cleanup: shutil.rmtree(self.tempdir) return rdict
def get_exposures(grid, pop_file, shakefile=None, shakethreshtype=None, shakethresh=None, probthresh=None): """ Get exposure-based statistics. Args: grid: Model grid. pop_file (str): Path to the landscan population grid. shakefile (str): Optional, path to shakemap file to use for ground motion threshold. shakethreshtype(str): Optional, Type of ground motion to use for shakethresh, 'pga', 'pgv', or 'mmi'. shakethresh: Optional, Float or list of shaking thresholds in %g for pga, cm/s for pgv, float for mmi. probthresh: Optional, None or float, exclude any cells with probabilities less than or equal to this value Returns: dict: Dictionary with keys named exp_pop_# where # is the shakethresh """ # If probthresh defined, zero out any areas less than or equal to probthresh # before proceeding if probthresh is not None: origdata = grid.getData() moddat = origdata.copy() moddat[moddat <= probthresh] = 0.0 moddat[np.isnan(origdata)] = float('nan') else: moddat = grid.getData() mdict = grid.getGeoDict() # Cut out area from population file popcut = quickcut(pop_file, mdict, precise=False, extrasamp=2., method='nearest') popdat = popcut.getData() pdict = popcut.getGeoDict() # Pad grid with nans to beyond extent of pdict pad_dict = {} pad_dict['padleft'] = int( np.abs(np.ceil((mdict.xmin - pdict.xmin) / mdict.dx))) pad_dict['padright'] = int( np.abs(np.ceil((pdict.xmax - mdict.xmax) / mdict.dx))) pad_dict['padbottom'] = int( np.abs(np.ceil((mdict.ymin - pdict.ymin) / mdict.dy))) pad_dict['padtop'] = int( np.abs(np.ceil((pdict.ymax - mdict.ymax) / mdict.dy))) padgrid, mdict2 = Grid2D.padGrid(moddat, mdict, pad_dict) # padds with inf padgrid[np.isinf(padgrid)] = float('nan') # change to pad with nan padgrid = Grid2D(data=padgrid, geodict=mdict2) # Turn into grid2d object # Resample model grid so as to be the nearest integer multiple of popdict factor = np.round(pdict.dx / mdict2.dx) # Create geodictionary that is a factor of X higher res but otherwise # identical ndict = GeoDict.createDictFromBox(pdict.xmin, pdict.xmax, pdict.ymin, pdict.ymax, pdict.dx / factor, pdict.dy / factor) # Resample grid2 = padgrid.interpolate2(ndict, method='linear') # Get proportion of each cell that has values (to account properly # for any nans) prop = block_reduce(~np.isnan(grid2.getData().copy()), block_size=(int(factor), int(factor)), cval=float('nan'), func=np.sum) / (factor**2.) # Now block reduce to same geodict as popfile modresamp = block_reduce(grid2.getData().copy(), block_size=(int(factor), int(factor)), cval=float('nan'), func=np.nanmean) exp_pop = {} if shakefile is not None: # Resample shakefile to population grid # , doPadding=True, padValue=0.) shakemap = ShakeGrid.load(shakefile, resample=False) shakemap = shakemap.getLayer(shakethreshtype) shakemap = shakemap.interpolate2(pdict) shkdat = shakemap.getData() for shaket in shakethresh: threshmult = shkdat > shaket threshmult = threshmult.astype(float) exp_pop['exp_pop_%1.2fg' % (shaket / 100., )] = np.nansum( popdat * prop * modresamp * threshmult) else: exp_pop['exp_pop_0.00g'] = np.nansum(popdat * prop * modresamp) return exp_pop