def check_external_files_for_nans(files_list): has_nans = [] for f in files_list: try: # array text files # set nodata to np.nan # so that default nodata value of -9999 is not cast to np.nan # want to only check for instances of 'nan' that will crash MODFLOW arr = load_array(f, nodata=np.nan) if np.any(np.isnan(arr)): has_nans.append(f) except: # other text files (MODFLOW-6 input with blocks) with open(f) as src: text = src.read() if 'nan' in text: has_nans.append(f) return has_nans
def test_idomain_above_sfr(model_with_sfr): m = model_with_sfr sfr = m.sfr # get the kij locations of sfr reaches k, i, j = zip(*sfr.reach_data[['k', 'i', 'j']]) # verify that streambed tops are above layer bottoms assert np.all(sfr.packagedata.array['rtp'] > np.all(m.dis.botm.array[k, i, j])) # test that idomain above sfr cells is being set to 0 # by setting all botms above streambed tops new_botm = m.dis.botm.array.copy() new_top = m.dis.top.array.copy() new_botm[:, i, j] = 9999 new_top[i, j] = 9999 np.savetxt(m.cfg['dis']['griddata']['top'][0]['filename'], new_top) m.dis.botm = new_botm #m.dis.top = new_top m.remove_package(sfr) m._reset_bc_arrays() assert not np.any(m._isbc2d == 4) sfr = m.setup_sfr() # test loading a 3d array from a filelist idomain = load_array(m.cfg['dis']['griddata']['idomain']) assert np.array_equal(m.idomain, idomain) # dis package idomain of model instance attached to sfrdata # forms basis for identifying unconnected cells assert np.array_equal(m.idomain, m.sfrdata.model.idomain) assert np.array_equal(m.idomain, m.sfrdata.model.dis.idomain.array) # verify that dis package file still references external file m.dis.write() fname = os.path.join(m.model_ws, m.dis.filename) assert os.path.getsize(fname) < 3e3 # idomain should be zero everywhere there's a sfr reach # except for in the botm layer # (verifies that model botm was reset to accomdate SFR reaches) assert np.array_equal(m.sfr.reach_data.i, i) assert np.array_equal(m.sfr.reach_data.j, j) k, i, j = cellids_to_kij(sfr.packagedata.array['cellid']) assert idomain[:-1, i, j].sum() == 0 active = np.array([True if c != 'none' else False for c in sfr.packagedata.array['cellid']]) assert idomain[-1, i, j].sum() == active.sum() # assert np.all(m.dis.botm.array[:-1, i, j] > 9980) assert np.all(m.dis.botm.array[-1, i, j] < 100)
def model_setup_and_run(model_setup, mf6_exe): m = model_setup #deepcopy(model_setup) m.simulation.exe_name = mf6_exe dis_idomain = m.dis.idomain.array.copy() for i, d in enumerate(m.cfg['dis']['griddata']['idomain']): arr = load_array(d['filename']) assert np.array_equal(m.idomain[i], arr) assert np.array_equal(dis_idomain[i], arr) success = False if exe_exists(mf6_exe): success, buff = m.simulation.run_simulation() if not success: list_file = m.name_file.list.array with open(list_file) as src: list_output = src.read() assert success, 'model run did not terminate successfully:\n{}'.format(list_output) return m
def test_setup_lake_connectiondata(get_pleasant_mf6_with_dis): m = get_pleasant_mf6_with_dis df = setup_lake_connectiondata(m) df['k'], df['i'], df['j'] = zip(*df['cellid']) vertical_connections = df.loc[df.claktype == 'vertical'] lakezones = load_array(m.cfg['intermediate_data']['lakzones'][0]) litleak = m.cfg['lak']['source_data']['littoral_leakance'] profleak = m.cfg['lak']['source_data']['profundal_leakance'] # 2D array showing all vertical connections in lake package lakarr2d_6 = np.zeros((m.nrow, m.ncol), dtype=bool) lakarr2d_6[vertical_connections.i.values, vertical_connections.j.values] = True # verify that number of vert. connection locations is consistent between lakarr and mf6 list input assert np.sum(m.lakarr.sum(axis=0) > 0) == np.sum(lakarr2d_6) # verify that there is only one vertical connection at each location ij_locations = set(zip(vertical_connections.i, vertical_connections.j)) assert len(vertical_connections) == len(ij_locations) # verify that the connections are in the same place (horizontally) assert not np.any((m.lakarr.sum(axis=0) > 0) != lakarr2d_6) # check that the number of vert. connections in each layer is consistent lake_thickness = (m.lakarr > 0).sum(axis=0) for k in range(1, m.nlay + 1): # lake connections in current layer i, j = np.where(lake_thickness == k) highest_active_layer = np.argmax(m.idomain[:, i, j], axis=0) connection_cellids = list(zip(highest_active_layer, i, j)) kvc = vertical_connections.loc[vertical_connections.cellid.isin( connection_cellids)] # by definition, number of vert. connections in kvc is same as # cells with lake_thickness == k # verity that specified leakances are consistent with lake zones assert np.sum(kvc.bedleak == profleak) == np.sum( lakezones[lake_thickness == k] == 100) assert np.sum(kvc.bedleak == litleak) == np.sum( lakezones[lake_thickness == k] == 1)
def test_ibound(pleasant_nwt_with_dis): m = pleasant_nwt_with_dis # use pleasant lake extent as ibound is_pleasant_lake = m.lakarr[0] # clear out lake info, just for this test function m.cfg['model']['packages'].remove('lak') del m.cfg['lak']['source_data'] # specify path relative to cfg file m.cfg['bas6']['source_data']['ibound'] = { 'filename': '../../../examples/data/pleasant/source_data/shps/all_lakes.shp' } m._reset_bc_arrays() bas6 = m.setup_bas6() bas6.write_file() assert np.array_equal(m.ibound, m.bas6.ibound.array) # find_remove_isolated_cells is run on ibound array but not in Lake setup assert np.array_equal(m.ibound[0], find_remove_isolated_cells(is_pleasant_lake)) ibound = load_array(m.cfg['bas6']['ibound']) assert np.array_equal(m.ibound, ibound)
def test_dis_setup(shellmound_model_with_grid): m = shellmound_model_with_grid #deepcopy(model_with_grid) # test intermediate array creation m.cfg['dis']['remake_top'] = True m.cfg['dis']['source_data']['top']['resample_method'] = 'nearest' m.cfg['dis']['source_data']['botm']['resample_method'] = 'nearest' dis = m.setup_dis() botm = m.dis.botm.array.copy() assert isinstance(dis, mf6.ModflowGwfdis) assert 'DIS' in m.get_package_list() # verify that units got conveted correctly assert m.dis.top.array.mean() < 100 assert m.dis.length_units.array == 'meters' # verify that modelgrid was reset after building DIS mg = m.modelgrid assert (mg.nlay, mg.nrow, mg.ncol) == m.dis.botm.array.shape assert np.array_equal(mg.top, m.dis.top.array) assert np.array_equal(mg.botm, m.dis.botm.array) arrayfiles = m.cfg['intermediate_data']['top'] + \ m.cfg['intermediate_data']['botm'] + \ m.cfg['intermediate_data']['idomain'] for f in arrayfiles: assert os.path.exists(f) fname = os.path.splitext(os.path.split(f)[1])[0] k = ''.join([s for s in fname if s.isdigit()]) var = fname.strip(k) data = np.loadtxt(f) model_array = getattr(m.dis, var).array if len(k) > 0: k = int(k) model_array = model_array[k] assert np.array_equal(model_array, data) # test that written idomain array reflects supplied shapefile of active area active_area = rasterize(m.cfg['dis']['source_data']['idomain']['filename'], m.modelgrid) isactive = active_area == 1 written_idomain = load_array(m.cfg['dis']['griddata']['idomain']) assert np.all(written_idomain[:, ~isactive] <= 0) # test idomain from just layer elevations del m.cfg['dis']['griddata']['idomain'] dis = m.setup_dis() top = dis.top.array.copy() top[top == m._nodata_value] = np.nan botm = dis.botm.array.copy() botm[botm == m._nodata_value] = np.nan thickness = get_layer_thicknesses(top, botm) invalid_botms = np.ones_like(botm) invalid_botms[np.isnan(botm)] = 0 invalid_botms[thickness < 1.0001] = 0 # these two arrays are not equal # because isolated cells haven't been removed from the second one # this verifies that _set_idomain is removing them assert not np.array_equal(m.idomain[:, isactive].sum(axis=1), invalid_botms[:, isactive].sum(axis=1)) invalid_botms = find_remove_isolated_cells(invalid_botms, minimum_cluster_size=20) active_cells = m.idomain[:, isactive].copy() active_cells[active_cells < 0] = 0 # need to do this because some idomain cells are -1 assert np.array_equal(active_cells.sum(axis=1), invalid_botms[:, isactive].sum(axis=1)) # test recreating package from external arrays m.remove_package('dis') assert m.cfg['dis']['griddata']['top'] is not None assert m.cfg['dis']['griddata']['botm'] is not None dis = m.setup_dis() assert np.array_equal(m.dis.botm.array[m.dis.idomain.array == 1], botm[m.dis.idomain.array == 1]) # test recreating just the top from the external array m.remove_package('dis') m.cfg['dis']['remake_top'] = False m.cfg['dis']['griddata']['botm'] = None dis = m.setup_dis() dis.write() assert np.array_equal(m.dis.botm.array[m.dis.idomain.array == 1], botm[m.dis.idomain.array == 1]) arrayfiles = m.cfg['dis']['griddata']['top'] for f in arrayfiles: assert os.path.exists(f['filename']) assert os.path.exists(os.path.join(m.model_ws, dis.filename)) # dis package idomain should be consistent with model property updated_idomain = m.idomain assert np.array_equal(m.dis.idomain.array, updated_idomain) # check that units were converted (or not) assert np.allclose(dis.top.array[dis.idomain.array[0] == 1].mean(), 40, atol=10) mcaq = m.cfg['dis']['source_data']['botm']['filenames'][3] assert 'mcaq' in mcaq with rasterio.open(mcaq) as src: mcaq_data = src.read(1) mcaq_data[mcaq_data == src.meta['nodata']] = np.nan assert np.allclose(m.dis.botm.array[3][dis.idomain.array[3] == 1].mean() / .3048, np.nanmean(mcaq_data), atol=5)
def test_idomain_above_sfr(model_with_sfr): m = model_with_sfr sfr = m.sfr # get the kij locations of sfr reaches k, i, j = zip(*sfr.reach_data[['k', 'i', 'j']]) # verify that streambed tops are above layer bottoms assert np.all( sfr.packagedata.array['rtp'] > np.all(m.dis.botm.array[k, i, j])) # test that idomain above sfr cells is being set to 0 # by setting all botms above streambed tops new_botm = m.dis.botm.array.copy() new_top = m.dis.top.array.copy() new_botm[:-1, i, j] = 9999 new_botm[-1, i, j] = 9990 new_top[i, j] = 9999 np.savetxt(m.cfg['dis']['griddata']['top'][0]['filename'], new_top) m.dis.botm = new_botm m.dis.top = new_top # reset external files for model top # (that are used to cache an original version of the model top # prior to any adjustment to lake bottoms) from pathlib import Path original_top_file = Path( m.tmpdir, f"{m.name}_{m.cfg['dis']['top_filename_fmt']}.original") original_top_file.unlink() # if original_top_file is not found or invalid, # the routine in sourcedata.setup_array for setting up the botm array # attempts to write original_top_file from # m.cfg['intermediate_data']['top'] # successive calls to sourcedata.setup_array # in the context of setting up the bottom array # then reference this "original" top, # so if adjustments to lake bathymetry are made, # they are only made relative to the "original" top, # and not a revised top (which would keep pushing the bottoms downward) np.savetxt(m.cfg['intermediate_data']['top'][0], new_top) m.remove_package(sfr) m._reset_bc_arrays() assert not np.any(m._isbc2d == 4) sfr = m.setup_sfr() # test loading a 3d array from a filelist idomain = load_array(m.cfg['dis']['griddata']['idomain']) assert np.array_equal(m.idomain, idomain) # dis package idomain of model instance attached to sfrdata # forms basis for identifying unconnected cells assert np.array_equal(m.idomain, m.sfrdata.model.idomain) assert np.array_equal(m.idomain, m.sfrdata.model.dis.idomain.array) # verify that dis package file still references external file m.dis.write() fname = os.path.join(m.model_ws, m.dis.filename) assert os.path.getsize(fname) < 3e3 # idomain should be zero everywhere there's a sfr reach # except for in the botm layer # (verifies that model botm was reset to accomdate SFR reaches) assert np.array_equal(m.sfr.reach_data.i, i) assert np.array_equal(m.sfr.reach_data.j, j) k, i, j = cellids_to_kij(sfr.packagedata.array['cellid']) assert idomain[:-1, i, j].sum() == 0 active = np.array([ True if c != 'none' else False for c in sfr.packagedata.array['cellid'] ]) assert idomain[-1, i, j].sum() == active.sum() # assert np.all(m.dis.botm.array[:-1, i, j] > 9980) assert np.all(m.dis.botm.array[-1, i, j] < 100)