def test_parse_modflowgwf_kwargs(shellmound_cfg):
    cfg = deepcopy(shellmound_cfg)
    cfg = MF6model._parse_model_kwargs(cfg)
    kwargs = get_input_arguments(cfg['model'],
                                 mf6.ModflowGwf,
                                 exclude='packages')
    m = MF6model(cfg=cfg, **kwargs)
    sim_path = os.path.normpath(
        m.simulation.simulation_data.mfpath._sim_path).lower()
    assert sim_path == cfg['simulation']['sim_ws'].lower()
    m.write()

    # verify that options were written correctly to namefile
    # newton solver, but without underrelaxation
    nampath = os.path.join(m.model_ws, m.model_nam_file)
    options = read_mf6_block(nampath, 'options')
    assert os.path.normpath(options['list'][0]).lower() == \
           os.path.normpath(cfg['model']['list']).lower()
    for option in ['print_input', 'print_flows', 'save_flows']:
        if cfg['model'][option]:
            assert option in options
    assert len(options['newton']) == 0

    # newton solver, with underrelaxation
    cfg['model']['options']['newton_under_relaxation'] = True
    cfg = MF6model._parse_model_kwargs(cfg)
    assert cfg['model']['options']['newtonoptions'] == ['under_relaxation']
    kwargs = get_input_arguments(cfg['model'],
                                 mf6.ModflowGwf,
                                 exclude='packages')
    m = MF6model(cfg=cfg, **kwargs)
    m.write()
    options = read_mf6_block(nampath, 'options')
    assert options['newton'] == ['under_relaxation']
예제 #2
0
def test_external_tables(get_pleasant_mf6_with_dis):
    m = get_pleasant_mf6_with_dis
    lak = m.setup_lak()
    lak.write()
    assert os.path.exists(m.cfg['external_files']['lak_connectiondata'][0])
    blocks = read_mf6_block(lak.filename, 'connectiondata')
    assert blocks['connectiondata'][0].strip().split()[1].strip('\'') == \
           m.cfg['external_files']['lak_connectiondata'][0]

    wel = m.setup_wel()
    wel.write()
    for f in m.cfg['external_files']['wel_stress_period_data'].values():
        assert os.path.exists(f)
    blocks = read_mf6_block(wel.filename, 'period')
    for period, block in blocks.items():
        assert block[0].strip().split()[1].strip('\'') in m.cfg[
            'external_files']['wel_stress_period_data'].values()

    chd = m.setup_perimeter_boundary()
    chd.write()
    for f in m.cfg['external_files']['chd_stress_period_data'].values():
        assert os.path.exists(f)
    blocks = read_mf6_block(chd.filename, 'period')
    for period, block in blocks.items():
        assert block[0].strip().split()[1].strip('\'') in m.cfg[
            'external_files']['chd_stress_period_data'].values()
예제 #3
0
def test_setup_mover(pleasant_lgr_setup_from_yaml):
    m = pleasant_lgr_setup_from_yaml
    assert isinstance(m.simulation.mvr, mf6.ModflowMvr)
    assert os.path.exists(m.simulation.mvr.filename)
    perioddata = read_mf6_block(m.simulation.mvr.filename, 'period')
    assert len(perioddata[1]) == 2
    for model in m, m.inset['plsnt_lgr_inset']:
        options = read_mf6_block(model.sfr.filename, 'options')
        assert 'mover' in options
예제 #4
0
def test_oc_setup(get_pleasant_mf6_with_dis):
    m = get_pleasant_mf6_with_dis  # deepcopy(model)
    oc = m.setup_oc()
    oc.write()
    ocfile = os.path.join(m.model_ws, oc.filename)
    assert os.path.exists(ocfile)
    assert isinstance(oc, mf6.ModflowGwfoc)
    options = read_mf6_block(ocfile, 'options')
    options = {k: ' '.join(v).lower() for k, v in options.items()}
    perioddata = read_mf6_block(ocfile, 'period')
    assert 'fileout' in options['budget'] and '.cbc' in options['budget']
    assert 'fileout' in options['head'] and '.hds' in options['head']
    assert 'save head last' in perioddata[1]
    assert 'save budget last' in perioddata[1]
예제 #5
0
def test_lak_setup(get_pleasant_mf6_with_dis):
    m = get_pleasant_mf6_with_dis  # deepcopy(model)
    m.cfg['lak']['external_files'] = False
    m.cfg['lak']['horizontal_connections'] = True
    lak = m.setup_lak()
    lak.write()
    assert isinstance(lak, mf6.ModflowGwflak)
    package_filename = os.path.join(m.model_ws, lak.filename)
    assert os.path.exists(package_filename)
    for f in lak.tables.array['tab6']:
        assert os.path.exists(f)
    options = read_mf6_block(package_filename, 'options')
    for var in [
            'boundnames', 'save_flows', 'obs6', 'surfdep', 'time_conversion',
            'length_conversion'
    ]:
        assert var in options
    assert float(
        options['time_conversion'][0]) == 86400. == lak.time_conversion.array
    assert float(
        options['length_conversion'][0]) == 1. == lak.length_conversion.array
    assert lak.nlakes.array == len(lak.tables.array)
    assert lak.packagedata.array['nlakeconn'][0] == len(
        lak.connectiondata.array)
    # verify that there are no connections to inactive cells
    k, i, j = zip(*lak.connectiondata.array['cellid'])
    inactive = m.dis.idomain.array[k, i, j] < 1
    assert not np.any(inactive)
    assert len(lak.perioddata.array) == m.nper
    lake_fluxes = m.lake_fluxes.copy()
    lake_fluxes['rainfall'] = lake_fluxes['precipitation']
    for per in range(m.nper):
        for var in ['rainfall', 'evaporation']:
            loc = m.lak.perioddata.array[0]['laksetting'] == var
            value = m.lak.perioddata.array[per]['laksetting_data'][loc][0]
            assert np.allclose(value, lake_fluxes.loc[per, var])

    # check the auxilliary table
    connections_lookup_file = m.cfg['lak']['output_files'][
        'connections_lookup_file'].format(m.name)
    connections_lookup_file = os.path.join(
        m._tables_path,
        os.path.split(connections_lookup_file)[1])
    info = pd.read_csv(connections_lookup_file)
    assert not info.zone.isnull().any()
    assert not info.loc[info.claktype == 'horizontal',
                        'cellface'].isnull().any()

    # check the lake discretization
    import rasterio
    i, j = 35, 40  # point in the middle of the lake
    x = m.modelgrid.xcellcenters[i, j]
    y = m.modelgrid.ycellcenters[i, j]
    datum = m.dis.top.array[i, j]
    bathy_raster = m.cfg['lak']['source_data']['bathymetry_raster']['filename']
    with rasterio.open(bathy_raster) as src:
        bathy = np.squeeze(list(src.sample(zip([x], [y]))))
        bathy[(bathy == src.nodata) | (bathy == 0)] = np.nan
    assert np.allclose(m.dis.botm.array[:2, i, j], m.dis.top[i, j])
    assert np.allclose(m.dis.idomain.array[:2, i, j], 0)
예제 #6
0
def test_sto_setup(get_pleasant_mf6_with_dis):

    m = get_pleasant_mf6_with_dis  #deepcopy(model_with_grid)
    sto = m.setup_sto()
    sto.write()
    assert os.path.exists(os.path.join(m.model_ws, sto.filename))
    assert isinstance(sto, mf6.ModflowGwfsto)
    for var in ['sy', 'ss']:
        model_array = getattr(sto, var).array
        for k, item in enumerate(m.cfg['sto']['griddata'][var]):
            f = item['filename']
            assert os.path.exists(f)
            data = np.loadtxt(f)
            assert np.array_equal(model_array[k], data)
    period_data = read_mf6_block(sto.filename, 'period')
    assert period_data[1] == ['steady-state']
    assert period_data[2] == ['transient']

    # compare values to parent model
    inset_parent_layer_mapping = {0: 0, 1: 0, 2: 1, 3: 2, 4: 3}
    for var in ['ss', 'sy']:
        parent_array = m.parent.upw.__dict__[var].array
        inset_array = sto.__dict__[var].array
        compare_inset_parent_values(inset_array, parent_array,
                                    m.modelgrid, m.parent.modelgrid,
                                    inset_parent_layer_mapping,
                                    nodata=1.0,
                                    rtol=0.05
                                    )
예제 #7
0
def test_lgr_model_setup(pleasant_lgr_setup_from_yaml):
    m = pleasant_lgr_setup_from_yaml
    assert isinstance(m.inset, dict)
    assert len(m.simulation._models) > 1
    for k, v in m.inset.items():
        # verify that the inset model is part of the same simulation
        # (same memory address)
        assert v.simulation is m.simulation
        assert v.name in m.simulation._models

        # read the options block in the inset name file
        # verify that all of the specified options are there
        name_options = read_mf6_block(v.name_file.filename, 'options')
        specified_options = {'list', 'print_input', 'save_flows', 'newton'}
        assert not any(specified_options.difference(name_options.keys()))
        path, fname = os.path.split(name_options['list'][0])
        assert os.path.abspath(m.model_ws).lower() == os.path.abspath(path).lower()
        assert name_options['newton'][0] == 'under_relaxation'

    # check that the model names were included in the external files
    external_files = glob.glob(os.path.join(m.model_ws, m.external_path, '*'))
    for f in external_files:
        if 'stage_area_volume' in f:
            continue
        assert m.name in f or 'plsnt_lgr_inset' in f
def test_lak_setup(get_pleasant_mf6_with_lak):
    m = get_pleasant_mf6_with_lak  # deepcopy(model)
    lak = m.setup_lak()
    lak.write()
    assert isinstance(lak, mf6.ModflowGwflak)
    package_filename = os.path.join(m.model_ws, lak.filename)
    assert os.path.exists(package_filename)
    for f in lak.tables.array['tab6']:
        assert os.path.exists(f)
    options = read_mf6_block(package_filename, 'options')
    for var in [
            'boundnames', 'save_flows', 'obs6', 'surfdep', 'time_conversion',
            'length_conversion'
    ]:
        assert var in options
    assert float(
        options['time_conversion'][0]) == 86400. == lak.time_conversion.array
    assert float(
        options['length_conversion'][0]) == 1. == lak.length_conversion.array
    assert lak.nlakes.array == len(lak.tables.array)
    assert lak.packagedata.array['nlakeconn'][0] == len(
        lak.connectiondata.array)
    # verify that there are no connections to inactive cells
    k, i, j = zip(*lak.connectiondata.array['cellid'])
    inactive = m.dis.idomain.array[k, i, j] < 1
    assert not np.any(inactive)
    assert len(lak.perioddata.array) == m.nper
    lake_fluxes = m.lake_fluxes.copy()
    lake_fluxes['rainfall'] = lake_fluxes['precipitation']
    for per in range(m.nper):
        for var in ['rainfall', 'evaporation']:
            loc = m.lak.perioddata.array[0]['laksetting'] == var
            value = m.lak.perioddata.array[per]['laksetting_data'][loc][0]
            assert np.allclose(value, lake_fluxes.loc[per, var])
def test_oc_setup(shellmound_model_with_dis, options):
    cfg = {'head_fileout_fmt': '{}.hds', 'budget_fileout_fmt': '{}.cbc'}
    cfg.update(options)
    m = shellmound_model_with_dis  # deepcopy(model)
    m.cfg['oc'] = cfg
    oc = m.setup_oc()
    oc.write()
    ocfile = os.path.join(m.model_ws, oc.filename)
    assert os.path.exists(ocfile)
    assert isinstance(oc, mf6.ModflowGwfoc)
    options = read_mf6_block(ocfile, 'options')
    options = {k: ' '.join(v).lower() for k, v in options.items()}
    perioddata = read_mf6_block(ocfile, 'period')
    assert 'fileout' in options['budget'] and '.cbc' in options['budget']
    assert 'fileout' in options['head'] and '.hds' in options['head']
    assert 'save head last' in perioddata[1]
    assert 'save budget last' in perioddata[1]
예제 #10
0
def test_write_sfr(get_pleasant_mf6_with_sfr):
    m = get_pleasant_mf6_with_sfr
    m.write_input()
    sfr_package_file = m.sfrdata.modflow_sfr2.fn_path
    options = read_mf6_block(sfr_package_file, 'options')
    assert 'save_flows' in options
    assert options['budget'] == ['fileout', 'pleasant_mf6.sfr.out.bin']
    assert options['stage'] == ['fileout', 'pleasant_mf6.sfr.stage.bin']
    assert options['obs6'] == ['filein', 'pleasant_mf6.sfr.obs']
    assert options['unit_conversion'] == ['86400.0']
    assert options['auxiliary'] == ['line_id']
예제 #11
0
def test_wel_setup(get_pleasant_mf6_with_dis):
    m = get_pleasant_mf6_with_dis
    m.cfg['wel']['external_files'] = False
    wel = m.setup_wel()
    wel.write()
    assert os.path.exists(os.path.join(m.model_ws, wel.filename))
    assert isinstance(wel, mf6.ModflowGwfwel)
    assert wel.stress_period_data is not None

    # verify that periodata blocks were written
    output = read_mf6_block(wel.filename, 'period')
    for per, ra in wel.stress_period_data.data.items():
        assert len(output[per + 1]) == len(ra)
예제 #12
0
def test_lak_obs_setup(get_pleasant_mf6_with_dis):
    m = get_pleasant_mf6_with_dis  # deepcopy(model)
    lak = m.setup_lak()
    m.write()
    # todo: add lake obs tests
    obsinput = read_mf6_block('{}.lak.obs'.format(m.name), 'continuous')
    boundnames = set()
    for k, v in obsinput.items():
        for line in v:
            line = line.strip().split()
            variable = line[0]
            # these variables require a boundname to be specified
            # to get the value for the whole lake
            # otherwise, specific connection is monitored
            # (if ID2 is entered, otherwise the output is 0.)
            # verify that boundname is not an integer (mf6 requirement)
            if variable in ['lak', 'wetted-area', 'conductance']:
                assert not line[-1].isdigit()
                boundnames.add(line[-1])
    # check that the boundnames exist
    packagedata = read_mf6_block(lak.filename, 'packagedata')
    for line in packagedata['packagedata']:
        line = line.strip().split()
        assert line[-1] in boundnames
예제 #13
0
def test_sfr_obs(get_pleasant_mf6_with_sfr):
    m = get_pleasant_mf6_with_sfr
    m.write_input()
    # verify that observation data were added and written
    sfr_package_filename = os.path.join(m.model_ws, m.sfr.filename)
    obs = pd.read_csv(m.cfg['sfr']['source_data']['observations']['filename'])
    assert len(m.sfrdata.observations) == len(obs)
    expected = obs[m.cfg['sfr']['source_data']['observations']['obsname_column']].astype(str).tolist()
    assert m.sfrdata.observations['obsname'].tolist() == expected
    sfr_obs_filename = os.path.normpath(os.path.join(m.model_ws, m.sfrdata.observations_file))
    assert os.path.exists(sfr_obs_filename)
    obs_input = read_mf6_block(sfr_obs_filename, 'continuous')
    assert obs_input[sfr_obs_filename + '.output.csv'] == \
           ['# obsname obstype rno',
            '1000000 downstream-flow 22',
            '2000000 downstream-flow 25']
예제 #14
0
def test_sto_setup(get_pleasant_mf6_with_dis, simulate_high_k_lakes):
    m = get_pleasant_mf6_with_dis  #deepcopy(model_with_grid)
    m.cfg['high_k_lakes']['simulate_high_k_lakes'] = simulate_high_k_lakes
    sto = m.setup_sto()
    sto.write()
    assert os.path.exists(os.path.join(m.model_ws, sto.filename))
    assert isinstance(sto, mf6.ModflowGwfsto)
    for var in ['sy', 'ss']:
        model_array = getattr(sto, var).array
        for k, item in enumerate(m.cfg['sto']['griddata'][var]):
            f = item['filename']
            assert os.path.exists(f)
            data = np.loadtxt(f)
            assert np.array_equal(model_array[k], data)
    period_data = read_mf6_block(sto.filename, 'period')
    assert period_data[1] == ['steady-state']
    assert period_data[2] == ['transient']

    # compare values to parent model
    inset_parent_layer_mapping = {0: 0, 1: 0, 2: 1, 3: 2, 4: 3}
    for var in ['ss', 'sy']:
        parent_array = m.parent.upw.__dict__[var].array
        inset_array = sto.__dict__[var].array
        # with addition of high-k lakes block,
        # ss has a different default value than parent
        if simulate_high_k_lakes and var == 'ss':
            continue
        compare_inset_parent_values(inset_array,
                                    parent_array,
                                    m.modelgrid,
                                    m.parent.modelgrid,
                                    inset_parent_layer_mapping,
                                    nodata=1.0,
                                    rtol=0.05)

    if not simulate_high_k_lakes:
        assert not np.any(m._isbc2d == 2)
        assert sto.sy.array.max() < m.cfg['high_k_lakes']['sy']
        assert sto.ss.array.min() > m.cfg['high_k_lakes']['ss']
    else:
        assert np.any(m._isbc2d == 2)
        assert sto.sy.array.max() == m.cfg['high_k_lakes']['sy']
        assert sto.ss.array.min() == m.cfg['high_k_lakes']['ss']
예제 #15
0
def test_tdis_setup(get_pleasant_mf6):

    m = get_pleasant_mf6  #deepcopy(model)
    tdis = m.setup_tdis()
    tdis.write()
    assert os.path.exists(os.path.join(m.model_ws, tdis.filename))
    assert isinstance(tdis, mf6.ModflowTdis)
    period_df = pd.DataFrame(tdis.perioddata.array)
    period_df['perlen'] = period_df['perlen'].astype(np.float64)
    period_df['nstp'] = period_df['nstp'].astype(np.int64)
    pd.testing.assert_frame_equal(period_df[['perlen', 'nstp', 'tsmult']],
                                  m.perioddata[['perlen', 'nstp', 'tsmult']])

    # check that period start/end dates were added to tdis file
    m.write_input()
    results = read_mf6_block(m.simulation.tdis.filename, 'perioddata')
    for i, line in enumerate(results['perioddata'][1:]):
        start_date, end_date = line.split('#')[1].strip().split('to')
        assert pd.Timestamp(start_date) == m.perioddata.start_datetime[i]
        assert pd.Timestamp(end_date) == m.perioddata.end_datetime[i]
예제 #16
0
def test_lak_setup(get_pleasant_mf6_with_dis):
    m = get_pleasant_mf6_with_dis  # deepcopy(model)
    m.cfg['lak']['external_files'] = False
    lak = m.setup_lak()
    lak.write()
    assert isinstance(lak, mf6.ModflowGwflak)
    package_filename = os.path.join(m.model_ws, lak.filename)
    assert os.path.exists(package_filename)
    for f in lak.tables.array['tab6']:
        assert os.path.exists(f)
    options = read_mf6_block(package_filename, 'options')
    for var in ['boundnames', 'save_flows', 'obs6', 'surfdep',
                'time_conversion', 'length_conversion']:
        assert var in options
    assert float(options['time_conversion'][0]) == 86400. == lak.time_conversion.array
    assert float(options['length_conversion'][0]) == 1. == lak.length_conversion.array
    assert lak.nlakes.array == len(lak.tables.array)
    assert lak.packagedata.array['nlakeconn'][0] == len(lak.connectiondata.array)
    # verify that there are no connections to inactive cells
    k, i, j = zip(*lak.connectiondata.array['cellid'])
    inactive = m.dis.idomain.array[k, i, j] < 1
    assert not np.any(inactive)
    assert len(lak.perioddata.array) == m.nper
    lake_fluxes = m.lake_fluxes.copy()
    lake_fluxes['rainfall'] = lake_fluxes['precipitation']
    for per in range(m.nper):
        for var in ['rainfall', 'evaporation']:
            loc = m.lak.perioddata.array[0]['laksetting'] == var
            value = m.lak.perioddata.array[per]['laksetting_data'][loc][0]
            assert np.allclose(value, lake_fluxes.loc[per, var])

    # check the auxilliary table
    connections_lookup_file = m.cfg['lak']['output_files']['connections_lookup_file'].format(m.name)
    connections_lookup_file = os.path.join(m._tables_path, os.path.split(connections_lookup_file)[1])
    info = pd.read_csv(connections_lookup_file)
    assert not info.zone.isnull().any()
    assert not info.loc[info.claktype == 'horizontal', 'cellface'].isnull().any()
예제 #17
0
def test_wel_setup(shellmound_model_with_dis):
    m = shellmound_model_with_dis  # deepcopy(model)
    m.cfg['wel']['external_files'] = False
    wel = m.setup_wel()
    wel.write()
    assert os.path.exists(os.path.join(m.model_ws, wel.filename))
    assert isinstance(wel, mf6.ModflowGwfwel)
    assert wel.stress_period_data is not None

    # verify that periodata blocks were written
    output = read_mf6_block(wel.filename, 'period')
    for per, ra in wel.stress_period_data.data.items():
        assert len(output[per + 1]) == len(ra)

    # check the stress_period_data against source data
    sums = [
        ra['q'].sum() if ra is not None else 0
        for ra in wel.stress_period_data.array
    ]
    cellids = set()
    cellids2d = set()
    for per, ra in wel.stress_period_data.data.items():
        cellids.update(set(ra['cellid']))
        cellids2d.update(set([c[1:] for c in ra['cellid']]))

    # sum the rates from the source files
    min_thickness = m.cfg['wel']['source_data']['csvfiles'][
        'vertical_flux_distribution']['minimum_layer_thickness']
    dfs = []
    for f in m.cfg['wel']['source_data']['csvfiles']['filenames']:
        dfs.append(pd.read_csv(f))
    df = pd.concat(dfs)

    # cull wells to within model area
    l, b, r, t = m.modelgrid.bounds
    outside = (df.x.values > r) | (df.x.values < l) | (df.y.values <
                                                       b) | (df.y.values > t)
    df['outside'] = outside
    df = df.loc[~outside]
    df['start_datetime'] = pd.to_datetime(df.start_datetime)
    df['end_datetime'] = pd.to_datetime(df.end_datetime)
    from mfsetup.grid import get_ij
    i, j = get_ij(m.modelgrid, df.x.values, df.y.values)
    df['i'] = i
    df['j'] = j
    thicknesses = get_layer_thicknesses(m.dis.top.array, m.dis.botm.array,
                                        m.idomain)
    b = thicknesses[:, i, j]
    b[np.isnan(b)] = 0
    df['k'] = np.argmax(b, axis=0)
    df['laythick'] = b[df['k'].values, range(b.shape[1])]
    df['idomain'] = m.idomain[df['k'], i, j]
    valid_ij = (df['idomain'] == 1) & (
        df['laythick'] > min_thickness
    )  # nwell array of valid i, j locations (with at least one valid layer)
    culled = df.loc[~valid_ij].copy()  # wells in invalid i, j locations
    df = df.loc[valid_ij].copy()  # remaining wells
    cellids_2d_2 = set(list(zip(df['i'], df['j'])))
    df.index = df.start_datetime
    sums2 = []
    for i, r in m.perioddata.iterrows():
        end_datetime = r.end_datetime - pd.Timedelta(1, unit='d')
        welldata_overlaps_period = (df.start_datetime < end_datetime) & \
                                   (df.end_datetime > r.start_datetime)
        q = df.loc[welldata_overlaps_period, 'flux_m3'].sum()
        sums2.append(q)
    sums = np.array(sums)
    sums2 = np.array(sums2)
    # if this doesn't match
    # may be due to wells with invalid open intervals getting removed
    assert np.allclose(sums, sums2, rtol=0.01)