def make_bdlknc_zones(grid, lakesshp, include_ids, feat_id_column='feat_id', lake_package_id_column='lak_id'): """ Make zones for populating with lakebed leakance values. Same as lakarr, but with a buffer around each lake so that horizontal connections have non-zero values of bdlknc, and near-shore areas can be assigend higher leakance values. """ print('setting up lakebed leakance zones...') t0 = time.time() if isinstance(lakesshp, str): # implement automatic reprojection in gis-utils # maintaining backwards compatibility kwargs = {'dest_crs': grid.crs} kwargs = get_input_arguments(kwargs, shp2df) lakes = shp2df(lakesshp, **kwargs) elif isinstance(lakesshp, pd.DataFrame): lakes = lakesshp.copy() else: raise ValueError( 'unrecognized input for "lakesshp": {}'.format(lakesshp)) # Exterior buffer id_column = feat_id_column.lower() lakes.columns = [c.lower() for c in lakes.columns] exterior_buffer = 30 # m lakes.index = lakes[id_column] lakes = lakes.loc[include_ids] if lake_package_id_column not in lakes.columns: lakes[lake_package_id_column] = np.arange(1, len(lakes) + 1) # speed up buffer construction by getting exteriors once # and probably more importantly, # simplifying possibly complex geometries of lakes generated from 2ft lidar unbuffered_exteriors = [ Polygon(g.exterior).simplify(5) for g in lakes.geometry ] lakes['geometry'] = [ g.buffer(exterior_buffer) for g in unbuffered_exteriors ] arr = rasterize(lakes, grid=grid, id_column=lake_package_id_column) # Interior buffer for lower leakance, assumed to be 20 m around the lake interior_buffer = -20 # m lakes['geometry'] = [ g.buffer(interior_buffer) for g in unbuffered_exteriors ] arr2 = rasterize(lakes, grid=grid, id_column=lake_package_id_column) arr2 = arr2 * 100 # Create new ids for the interior, as multiples of 10 arr[arr2 > 0] = arr2[arr2 > 0] # ensure that order of hydroids is unchanged # (used to match features to lake IDs in lake package) assert lakes[id_column].tolist() == list(include_ids) print('finished in {:.2f}s'.format(time.time() - t0)) return arr
def make_lakarr2d(grid, lakesdata, include_ids, id_column='hydroid'): """ Make a nrow x ncol array with lake package extent for each lake, using the numbers in the 'id' column in the lakes shapefile. """ if isinstance(lakesdata, str): # implement automatic reprojection in gis-utils # maintaining backwards compatibility kwargs = {'dest_crs': grid.crs} kwargs = get_input_arguments(kwargs, shp2df) lakes = shp2df(lakesdata, **kwargs) elif isinstance(lakesdata, pd.DataFrame): lakes = lakesdata.copy() else: raise ValueError( 'unrecognized input for "lakesdata": {}'.format(lakesdata)) id_column = id_column.lower() lakes.columns = [c.lower() for c in lakes.columns] lakes.index = lakes[id_column] lakes = lakes.loc[include_ids] lakes['lakid'] = np.arange(1, len(lakes) + 1) lakes['geometry'] = [Polygon(g.exterior) for g in lakes.geometry] arr = rasterize(lakes, grid=grid, id_column='lakid') # ensure that order of hydroids is unchanged # (used to match features to lake IDs in lake package) assert lakes[id_column].tolist() == include_ids return arr
def test_dis_setup(shellmound_model_with_grid): m = shellmound_model_with_grid #deepcopy(model_with_grid) # test intermediate array creation m.cfg['dis']['remake_top'] = True m.cfg['dis']['source_data']['top']['resample_method'] = 'nearest' m.cfg['dis']['source_data']['botm']['resample_method'] = 'nearest' dis = m.setup_dis() botm = m.dis.botm.array.copy() assert isinstance(dis, mf6.ModflowGwfdis) assert 'DIS' in m.get_package_list() # verify that units got conveted correctly assert m.dis.top.array.mean() < 100 assert m.dis.length_units.array == 'meters' # verify that modelgrid was reset after building DIS mg = m.modelgrid assert (mg.nlay, mg.nrow, mg.ncol) == m.dis.botm.array.shape assert np.array_equal(mg.top, m.dis.top.array) assert np.array_equal(mg.botm, m.dis.botm.array) arrayfiles = m.cfg['intermediate_data']['top'] + \ m.cfg['intermediate_data']['botm'] + \ m.cfg['intermediate_data']['idomain'] for f in arrayfiles: assert os.path.exists(f) fname = os.path.splitext(os.path.split(f)[1])[0] k = ''.join([s for s in fname if s.isdigit()]) var = fname.strip(k) data = np.loadtxt(f) model_array = getattr(m.dis, var).array if len(k) > 0: k = int(k) model_array = model_array[k] assert np.array_equal(model_array, data) # test that written idomain array reflects supplied shapefile of active area active_area = rasterize(m.cfg['dis']['source_data']['idomain']['filename'], m.modelgrid) isactive = active_area == 1 written_idomain = load_array(m.cfg['dis']['griddata']['idomain']) assert np.all(written_idomain[:, ~isactive] <= 0) # test idomain from just layer elevations del m.cfg['dis']['griddata']['idomain'] dis = m.setup_dis() top = dis.top.array.copy() top[top == m._nodata_value] = np.nan botm = dis.botm.array.copy() botm[botm == m._nodata_value] = np.nan thickness = get_layer_thicknesses(top, botm) invalid_botms = np.ones_like(botm) invalid_botms[np.isnan(botm)] = 0 invalid_botms[thickness < 1.0001] = 0 # these two arrays are not equal # because isolated cells haven't been removed from the second one # this verifies that _set_idomain is removing them assert not np.array_equal(m.idomain[:, isactive].sum(axis=1), invalid_botms[:, isactive].sum(axis=1)) invalid_botms = find_remove_isolated_cells(invalid_botms, minimum_cluster_size=20) active_cells = m.idomain[:, isactive].copy() active_cells[active_cells < 0] = 0 # need to do this because some idomain cells are -1 assert np.array_equal(active_cells.sum(axis=1), invalid_botms[:, isactive].sum(axis=1)) # test recreating package from external arrays m.remove_package('dis') assert m.cfg['dis']['griddata']['top'] is not None assert m.cfg['dis']['griddata']['botm'] is not None dis = m.setup_dis() assert np.array_equal(m.dis.botm.array[m.dis.idomain.array == 1], botm[m.dis.idomain.array == 1]) # test recreating just the top from the external array m.remove_package('dis') m.cfg['dis']['remake_top'] = False m.cfg['dis']['griddata']['botm'] = None dis = m.setup_dis() dis.write() assert np.array_equal(m.dis.botm.array[m.dis.idomain.array == 1], botm[m.dis.idomain.array == 1]) arrayfiles = m.cfg['dis']['griddata']['top'] for f in arrayfiles: assert os.path.exists(f['filename']) assert os.path.exists(os.path.join(m.model_ws, dis.filename)) # dis package idomain should be consistent with model property updated_idomain = m.idomain assert np.array_equal(m.dis.idomain.array, updated_idomain) # check that units were converted (or not) assert np.allclose(dis.top.array[dis.idomain.array[0] == 1].mean(), 40, atol=10) mcaq = m.cfg['dis']['source_data']['botm']['filenames'][3] assert 'mcaq' in mcaq with rasterio.open(mcaq) as src: mcaq_data = src.read(1) mcaq_data[mcaq_data == src.meta['nodata']] = np.nan assert np.allclose(m.dis.botm.array[3][dis.idomain.array[3] == 1].mean() / .3048, np.nanmean(mcaq_data), atol=5)
def setup_ghb_data(model): m = model source_data = model.cfg['ghb'].get('source_data').copy() # get the GHB cells # todo: generalize more of the GHB setup code and move it somewhere else if 'shapefile' in source_data: shapefile_data = source_data['shapefile'] key = [k for k in shapefile_data.keys() if 'filename' in k.lower()][0] shapefile_name = shapefile_data.pop(key) ghbcells = rasterize(shapefile_name, m.modelgrid, **shapefile_data) else: raise NotImplementedError('Only shapefile input supported for GHBs') cond = model.cfg['ghb'].get('cond') if cond is None: raise KeyError("key 'cond' not found in GHB yaml input. " "Must supply conductance via this key for GHB setup.") # sample DEM for minimum elevation in each cell with a GHB # todo: GHB: allow time-varying bheads via csv input vertices = np.array(m.modelgrid.vertices)[ghbcells.flat > 0, :, :] polygons = [Polygon(vrts) for vrts in vertices] if 'dem' in source_data: key = [ k for k in source_data['dem'].keys() if 'filename' in k.lower() ][0] dem_filename = source_data['dem'].pop(key) with rasterio.open(dem_filename) as src: meta = src.meta # reproject the polygons to the dem crs if needed try: from gisutils import get_authority_crs dem_crs = get_authority_crs(src.crs) except: dem_crs = pyproj.crs.CRS.from_user_input(src.crs) if dem_crs != m.modelgrid.crs: polygons = project(polygons, m.modelgrid.crs, dem_crs) all_touched = False if meta['transform'][0] > m.modelgrid.delr[0]: all_touched = True results = zonal_stats(polygons, dem_filename, stats='min', all_touched=all_touched) min_elevs = np.ones((m.nrow * m.ncol), dtype=float) * np.nan min_elevs[ghbcells.flat > 0] = np.array([r['min'] for r in results]) units_key = [k for k in source_data['dem'] if 'units' in k] if len(units_key) > 0: min_elevs *= convert_length_units(source_data['dem'][units_key[0]], model.length_units) min_elevs = np.reshape(min_elevs, (m.nrow, m.ncol)) else: raise NotImplementedError( 'Must supply DEM to sample for GHB elevations\n' '(GHB: source_data: dem:)') # make a DataFrame with MODFLOW input i, j = np.indices((m.nrow, m.ncol)) df = pd.DataFrame({ 'per': 0, 'k': 0, 'i': i.flat, 'j': j.flat, 'bhead': min_elevs.flat, 'cond': cond }) df.dropna(axis=0, inplace=True) # assign layers so that bhead is above botms df['k'] = get_layer(model.dis.botm.array, df.i, df.j, df.bhead) # remove GHB cells from places where the specified head is below the model below_bottom_of_model = df.bhead < model.dis.botm.array[-1, df.i, df.j] + 0.01 df = df.loc[~below_bottom_of_model].copy() # exclude inactive cells k, i, j = df.k, df.i, df.j if model.version == 'mf6': active_cells = model.idomain[k, i, j] >= 1 else: active_cells = model.ibound[k, i, j] >= 1 df = df.loc[active_cells] return df