def test_add_bound_activity_up_all_modes(message_test_mp): scen = Scenario(message_test_mp, **SCENARIO["dantzig"]).clone() scen.solve(quiet=True) # data for act bound exp = 0.95 * calculate_activity(scen).sum() data = pd.DataFrame( { "node_loc": "seattle", "technology": "transport_from_seattle", "year_act": _year, "time": "year", "unit": "cases", "mode": "all", "value": exp, }, index=[0], ) # test limiting all modes clone = scen.clone("foo", "baz", keep_solution=False) clone.check_out() clone.add_par("bound_activity_up", data) clone.commit("foo") clone.solve() obs = calculate_activity(clone).sum() assert np.isclose(obs, exp) orig_obj = scen.var("OBJ")["lvl"] new_obj = clone.var("OBJ")["lvl"] assert new_obj >= orig_obj
def test_add_bound_activity_up(test_mp): scen = Scenario(test_mp, *msg_args) scen.solve() # data for act bound exp = 0.5 * calculate_activity(scen).sum() data = pd.DataFrame({ 'node_loc': 'seattle', 'technology': 'transport_from_seattle', 'year_act': 2010, 'time': 'year', 'unit': 'cases', 'mode': 'to_chicago', 'value': exp, }, index=[0]) # test limiting one mode clone = scen.clone('foo', 'bar', keep_solution=False) clone.check_out() clone.add_par('bound_activity_up', data) clone.commit('foo') clone.solve() obs = calculate_activity(clone).loc['to_chicago'] assert np.isclose(obs, exp) orig_obj = scen.var('OBJ')['lvl'] new_obj = clone.var('OBJ')['lvl'] assert new_obj >= orig_obj
def test_add_bound_activity_up_all_modes(message_test_mp): scen = Scenario(message_test_mp, **SCENARIO['dantzig']).clone() scen.solve() # data for act bound exp = 0.95 * calculate_activity(scen).sum() data = pd.DataFrame( { 'node_loc': 'seattle', 'technology': 'transport_from_seattle', 'year_act': _year, 'time': 'year', 'unit': 'cases', 'mode': 'all', 'value': exp, }, index=[0]) # test limiting all modes clone = scen.clone('foo', 'baz', keep_solution=False) clone.check_out() clone.add_par('bound_activity_up', data) clone.commit('foo') clone.solve() obs = calculate_activity(clone).sum() assert np.isclose(obs, exp) orig_obj = scen.var('OBJ')['lvl'] new_obj = clone.var('OBJ')['lvl'] assert new_obj >= orig_obj
def solve_modified(base: Scenario, new_name: str): """Context manager for a cloned scenario. At the end of the block, the modified Scenario yielded by :func:`solve_modified` is committed, set as default, and solved. Use in a ``with:`` statement to make small modifications and leave a variable in the current scope with the solved scenario. Examples -------- >>> with solve_modified(base_scen, "new name") as s: ... s.add_par( ... ) # Modify the scenario ... # `s` is solved at the end of the block Yields ------ .Scenario Cloned from `base`, with the scenario name `new_name` and no solution. """ s = base.clone( scenario=new_name, annotation=f"Cloned by solve_modified() from {repr(base.scenario)}", keep_solution=False, ) s.check_out() yield s s.commit("Commit by solve_modified() at end of 'with:' statement") s.set_as_default() s.solve()
def test_new_timeseries_long_name64plus(message_test_mp): scen = Scenario(message_test_mp, **SCENARIO["dantzig multi-year"]) scen = scen.clone(keep_solution=False) scen.check_out(timeseries_only=True) df = pd.DataFrame( { "region": [ "India", ], "variable": [ ( "Emissions|CO2|Energy|Demand|Transportation|Aviation|" "Domestic|Freight|Oil" ), ], "unit": [ "Mt CO2/yr", ], "2012": [ 0.257009, ], } ) scen.add_timeseries(df) scen.commit("importing a testing timeseries")
def test_add_cat_unique(test_mp): scen = Scenario(test_mp, *msg_multiyear_args) scen2 = scen.clone(keep_solution=False) scen2.check_out() scen2.add_cat('year', 'firstmodelyear', 2020, True) df = scen2.cat('year', 'firstmodelyear') npt.assert_array_equal(df, ['2020']) scen2.discard_changes()
def test_share_commodity_lo(test_mp): scen = Scenario(test_mp, *msg_args) scen.solve() # data for share bound def calc_share(s): a = calculate_activity( s, tec='transport_from_seattle').loc['to_new-york'] b = calculate_activity( s, tec='transport_from_san-diego').loc['to_new-york'] return a / (a + b) exp = 1. * calc_share(scen) # add share constraints clone = scen.clone(scenario='share_commodity_lo', keep_solution=False) clone.check_out() clone.add_cat('technology', 'share', 'transport_from_seattle') clone.add_cat('technology', 'total', ['transport_from_seattle', 'transport_from_san-diego']) clone.add_set('shares', 'test-share') clone.add_set('map_shares_commodity_share', pd.DataFrame({ 'shares': 'test-share', 'node_share': 'new-york', 'node': 'new-york', 'type_tec': 'share', 'mode': 'all', 'commodity': 'cases', 'level': 'consumption', }, index=[0])) clone.add_set('map_shares_commodity_total', pd.DataFrame({ 'shares': 'test-share', 'node_share': 'new-york', 'node': 'new-york', 'type_tec': 'total', 'mode': 'all', 'commodity': 'cases', 'level': 'consumption', }, index=[0])) clone.add_par('share_commodity_lo', pd.DataFrame({ 'shares': 'test-share', 'node_share': 'new-york', 'year_act': 2010, 'time': 'year', 'unit': 'cases', 'value': exp, }, index=[0])) clone.commit('foo') clone.solve() obs = calc_share(clone) assert np.isclose(obs, exp) orig_obj = scen.var('OBJ')['lvl'] new_obj = clone.var('OBJ')['lvl'] assert new_obj >= orig_obj
def test_add_bound_activity_up_modes(test_mp): def calculate(scen): return ( scen .var('ACT') .groupby(['technology', 'mode'])['lvl'] .sum() .loc['transport_from_seattle'] ) scen = Scenario(test_mp, *msg_args) scen.solve() # data for act bound data = pd.DataFrame({ 'node_loc': 'seattle', 'technology': 'transport_from_seattle', 'year_act': 2010, 'time': 'year', 'unit': 'cases', }, index=[0]) # test limiting one mode clone = scen.clone('foo', 'bar', keep_solution=False) clone.check_out() exp = 0.5 * calculate(scen).sum() data['mode'] = 'to_chicago' data['value'] = exp clone.add_par('bound_activity_up', data) clone.commit('foo') clone.solve() obs = calculate(clone).loc['to_chicago'] assert np.isclose(obs, exp) # test limiting all modes clone2 = scen.clone('foo', 'baz', keep_solution=False) clone2.check_out() exp = 0.95 * calculate(scen).sum() data['mode'] = 'all' data['value'] = exp clone2.add_par('bound_activity_up', data) clone2.commit('foo') clone2.solve() obs = calculate(clone2).sum() assert np.isclose(obs, exp)
def test_init(test_mp): scen = Scenario(test_mp, *msg_args) scen = scen.clone('foo', 'bar') scen.check_out() macro.init(scen) scen.commit('foo') scen.solve() assert np.isclose(scen.var('OBJ')['lvl'], 153.675)
def test_years_active_extend(test_mp): scen = Scenario(test_mp, *msg_multiyear_args) scen = scen.clone(keep_solution=False) scen.check_out() scen.add_set('year', ['2040', '2050']) scen.add_par('duration_period', '2040', 10, 'y') scen.add_par('duration_period', '2050', 10, 'y') df = scen.years_active('seattle', 'canning_plant', '2020') npt.assert_array_equal(df, [2020, 2030, 2040]) scen.discard_changes()
def test_rename_technology(test_mp): scen = Scenario(test_mp, *msg_args) assert scen.par('output')['technology'].isin(['canning_plant']).any() clone = scen.clone('foo', 'bar') clone.rename('technology', {'canning_plant': 'foo_bar'}) assert not clone.par('output')['technology'].isin(['canning_plant']).any() assert clone.par('output')['technology'].isin(['foo_bar']).any() clone.solve() assert np.isclose(clone.var('OBJ')['lvl'], 153.675)
def test_add_cat(test_mp): scen = Scenario(test_mp, *msg_args) scen2 = scen.clone(keep_solution=False) scen2.check_out() scen2.add_cat('technology', 'trade', ['transport_from_san-diego', 'transport_from_seattle']) df = scen2.cat('technology', 'trade') npt.assert_array_equal( df, ['transport_from_san-diego', 'transport_from_seattle']) scen2.discard_changes()
def test_rename_technology_no_rm(test_mp): scen = Scenario(test_mp, *msg_args) assert scen.par('output')['technology'].isin(['canning_plant']).any() clone = scen.clone('foo', 'bar') # also test if already checked out clone.check_out() clone.rename('technology', {'canning_plant': 'foo_bar'}, keep=True) assert clone.par('output')['technology'].isin(['canning_plant']).any() assert clone.par('output')['technology'].isin(['foo_bar']).any()
def test_solve_legacy_scenario(tmp_path, test_data_path): db_path = create_test_platform(tmp_path, test_data_path, "legacy") mp = Platform(backend="jdbc", driver="hsqldb", path=db_path) scen = Scenario(mp, model="canning problem (MESSAGE scheme)", scenario="standard") exp = scen.var("OBJ")["lvl"] # solve scenario, assert that the new objective value is close to previous scen = scen.clone(keep_solution=False) scen.solve() assert np.isclose(exp, scen.var("OBJ")["lvl"])
def test_init(test_mp): scen = Scenario(test_mp, *msg_args) obs = scen.var('OBJ')['lvl'] scen = scen.clone('foo', 'bar', keep_solution=False) scen.check_out() macro.init(scen) scen.commit('foo') scen.solve() exp = scen.var('OBJ')['lvl'] assert np.isclose(obs, exp)
def test_solve_legacy_scenario(tmp_path, test_data_path): db_path = create_test_platform(tmp_path, test_data_path, 'legacy') mp = Platform(backend='jdbc', driver='hsqldb', path=db_path) scen = Scenario(mp, model='canning problem (MESSAGE scheme)', scenario='standard') exp = scen.var('OBJ')['lvl'] # solve scenario, assert that the new objective value is close to previous scen = scen.clone(keep_solution=False) scen.solve() assert np.isclose(exp, scen.var('OBJ')['lvl'])
def test_new_timeseries_long_name64plus(test_mp): scen = Scenario(test_mp, *msg_multiyear_args) scen = scen.clone(keep_solution=False) scen.check_out(timeseries_only=True) df = pd.DataFrame({ 'region': ['India', ], 'variable': [('Emissions|CO2|Energy|Demand|Transportation|Aviation|' 'Domestic|Freight|Oil'), ], 'unit': ['Mt CO2/yr', ], '2012': [0.257009, ] }) scen.add_timeseries(df) scen.commit('importing a testing timeseries')
def test_rename_technology(test_mp): scen = Scenario(test_mp, *msg_args) scen.solve() assert scen.par('output')['technology'].isin(['canning_plant']).any() exp_obj = scen.var('OBJ')['lvl'] clone = scen.clone('foo', 'bar', keep_solution=False) clone.rename('technology', {'canning_plant': 'foo_bar'}) assert not clone.par('output')['technology'].isin(['canning_plant']).any() assert clone.par('output')['technology'].isin(['foo_bar']).any() clone.solve() obs_obj = clone.var('OBJ')['lvl'] assert obs_obj == exp_obj
def test_init(message_test_mp): scen = Scenario(message_test_mp, **SCENARIO["dantzig"]) scen = scen.clone("foo", "bar") scen.check_out() MACRO.initialize(scen) scen.commit("foo") scen.solve(quiet=True) assert np.isclose(scen.var("OBJ")["lvl"], 153.675) assert "mapping_macro_sector" in scen.set_list() assert "aeei" in scen.par_list() assert "DEMAND" in scen.var_list() assert "COST_ACCOUNTING_NODAL" in scen.equ_list()
def test_init(message_test_mp): scen = Scenario(message_test_mp, **SCENARIO['dantzig']) scen = scen.clone('foo', 'bar') scen.check_out() MACRO.initialize(scen) scen.commit('foo') scen.solve() assert np.isclose(scen.var('OBJ')['lvl'], 153.675) assert 'mapping_macro_sector' in scen.set_list() assert 'aeei' in scen.par_list() assert 'DEMAND' in scen.var_list() assert 'COST_ACCOUNTING_NODAL' in scen.equ_list()
def test_excel_read_write(message_test_mp, tmp_path): # Path to temporary file tmp_path /= 'excel_read_write.xlsx' # Convert to string to ensure this can be handled fname = str(tmp_path) scen1 = Scenario(message_test_mp, **SCENARIO['dantzig']) scen1 = scen1.clone(keep_solution=False) scen1.check_out() scen1.init_set('new_set') scen1.add_set('new_set', 'member') scen1.init_par('new_par', idx_sets=['new_set']) scen1.add_par('new_par', 'member', 2, '-') scen1.commit('new set and parameter added.') # Writing to Excel without solving scen1.to_excel(fname) # Writing to Excel when scenario has a solution scen1.solve() scen1.to_excel(fname) scen2 = Scenario(message_test_mp, model='foo', scenario='bar', version='new') # Fails without init_items=True with pytest.raises(ValueError, match="no set 'new_set'"): scen2.read_excel(fname) # Succeeds with init_items=True scen2.read_excel(fname, init_items=True, commit_steps=True) exp = scen1.par('input') obs = scen2.par('input') pdt.assert_frame_equal(exp, obs) assert scen2.has_par('new_par') assert float(scen2.par('new_par')['value']) == 2 scen2.commit('foo') # must be checked in scen2.solve() assert np.isclose(scen2.var('OBJ')['lvl'], scen1.var('OBJ')['lvl'])
def test_excel_read_write(message_test_mp, tmp_path): # Path to temporary file tmp_path /= "excel_read_write.xlsx" # Convert to string to ensure this can be handled fname = str(tmp_path) scen1 = Scenario(message_test_mp, **SCENARIO["dantzig"]) scen1 = scen1.clone(keep_solution=False) scen1.check_out() scen1.init_set("new_set") scen1.add_set("new_set", "member") scen1.init_par("new_par", idx_sets=["new_set"]) scen1.add_par("new_par", "member", 2, "-") scen1.commit("new set and parameter added.") # Writing to Excel without solving scen1.to_excel(fname) # Writing to Excel when scenario has a solution scen1.solve() scen1.to_excel(fname) scen2 = Scenario(message_test_mp, model="foo", scenario="bar", version="new") # Fails without init_items=True with pytest.raises(ValueError, match="no set 'new_set'"): scen2.read_excel(fname) # Succeeds with init_items=True scen2.read_excel(fname, init_items=True, commit_steps=True) exp = scen1.par("input") obs = scen2.par("input") pdt.assert_frame_equal(exp, obs) assert scen2.has_par("new_par") assert float(scen2.par("new_par")["value"]) == 2 scen2.solve() assert np.isclose(scen2.var("OBJ")["lvl"], scen1.var("OBJ")["lvl"])
def test_add_share_mode_lo(message_test_mp): scen = Scenario(message_test_mp, **SCENARIO["dantzig"]).clone() scen.solve(quiet=True) # data for share bound def calc_share(s): a = calculate_activity( s, tec="transport_from_san-diego").loc["to_new-york"] b = calculate_activity(s, tec="transport_from_san-diego").sum() return a / b exp = 1.05 * calc_share(scen) # add share constraints clone = scen.clone("foo", "baz", keep_solution=False) clone.check_out() clone.add_set("shares", "test-share") clone.add_par( "share_mode_lo", pd.DataFrame( { "shares": "test-share", "node_share": "san-diego", "technology": "transport_from_san-diego", "mode": "to_new-york", "year_act": _year, "time": "year", "unit": "cases", "value": exp, }, index=[0], ), ) clone.commit("foo") clone.solve() obs = calc_share(clone) assert np.isclose(obs, exp) orig_obj = scen.var("OBJ")["lvl"] new_obj = clone.var("OBJ")["lvl"] assert new_obj >= orig_obj
def test_new_timeseries_long_name64(message_test_mp): scen = Scenario(message_test_mp, **SCENARIO['dantzig multi-year']) scen = scen.clone(keep_solution=False) scen.check_out(timeseries_only=True) df = pd.DataFrame({ 'region': [ 'India', ], 'variable': [ ('Emissions|CO2|Energy|Demand|Transportation|Aviation|' 'Domestic|Fre'), ], 'unit': [ 'Mt CO2/yr', ], '2012': [ 0.257009, ] }) scen.add_timeseries(df) scen.commit('importing a testing timeseries')
def test_add_share_mode_lo(message_test_mp): scen = Scenario(message_test_mp, **SCENARIO['dantzig']).clone() scen.solve() # data for share bound def calc_share(s): a = calculate_activity( s, tec='transport_from_san-diego').loc['to_new-york'] b = calculate_activity(s, tec='transport_from_san-diego').sum() return a / b exp = 1.05 * calc_share(scen) # add share constraints clone = scen.clone('foo', 'baz', keep_solution=False) clone.check_out() clone.add_set('shares', 'test-share') clone.add_par( 'share_mode_lo', pd.DataFrame( { 'shares': 'test-share', 'node_share': 'san-diego', 'technology': 'transport_from_san-diego', 'mode': 'to_new-york', 'year_act': _year, 'time': 'year', 'unit': 'cases', 'value': exp, }, index=[0])) clone.commit('foo') clone.solve() obs = calc_share(clone) assert np.isclose(obs, exp) orig_obj = scen.var('OBJ')['lvl'] new_obj = clone.var('OBJ')['lvl'] assert new_obj >= orig_obj
def test_add_share_mode_up(test_mp): scen = Scenario(test_mp, *msg_args).clone() scen.solve() # data for share bound def calc_share(s): a = calculate_activity(s, tec='transport_from_seattle').loc['to_chicago'] b = calculate_activity(s, tec='transport_from_seattle').sum() return a / b exp = 0.95 * calc_share(scen) # add share constraints clone = scen.clone(scenario='share_mode_up', keep_solution=False) clone.check_out() clone.add_set('shares', 'test-share') clone.add_par( 'share_mode_up', pd.DataFrame( { 'shares': 'test-share', 'node_share': 'seattle', 'technology': 'transport_from_seattle', 'mode': 'to_chicago', 'year_act': 2010, 'time': 'year', 'unit': 'cases', 'value': exp, }, index=[0])) clone.commit('foo') clone.solve() obs = calc_share(clone) assert np.isclose(obs, exp) orig_obj = scen.var('OBJ')['lvl'] new_obj = clone.var('OBJ')['lvl'] assert new_obj >= orig_obj
def test_clone(tmpdir): # Two local platforms mp1 = ixmp.Platform(driver="hsqldb", path=tmpdir / "mp1") mp2 = ixmp.Platform(driver="hsqldb", path=tmpdir / "mp2") # A minimal scenario scen1 = Scenario(mp1, model="model", scenario="scenario", version="new") scen1.add_spatial_sets({"country": "Austria"}) scen1.add_set("technology", "bar") scen1.add_horizon(year=[2010, 2020]) scen1.commit("add minimal sets for testing") assert len(mp1.scenario_list(default=False)) == 1 # Clone scen2 = scen1.clone(platform=mp2) # Return type of ixmp.Scenario.clone is message_ix.Scenario assert isinstance(scen2, Scenario) # Close and re-open both databases mp1.close_db() # TODO this should be done automatically on del mp2.close_db() # TODO this should be done automatically on del del mp1, mp2 mp1 = ixmp.Platform(driver="hsqldb", path=tmpdir / "mp1") mp2 = ixmp.Platform(driver="hsqldb", path=tmpdir / "mp2") # Same scenarios present in each database assert all( mp1.scenario_list(default=False) == mp2.scenario_list(default=False)) # Load both scenarios scen1 = Scenario(mp1, "model", "scenario") scen2 = Scenario(mp2, "model", "scenario") # Contents are identical assert all(scen1.set("node") == scen2.set("node")) assert all(scen1.set("year") == scen2.set("year"))
def test_clone(tmpdir): # Two local platforms mp1 = ixmp.Platform(tmpdir / 'mp1', dbtype='HSQLDB') mp2 = ixmp.Platform(tmpdir / 'mp2', dbtype='HSQLDB') # A minimal scenario scen1 = Scenario(mp1, model='model', scenario='scenario', version='new') scen1.add_spatial_sets({'country': 'Austria'}) scen1.add_set('technology', 'bar') scen1.add_horizon({'year': [2010, 2020]}) scen1.commit('add minimal sets for testing') assert len(mp1.scenario_list(default=False)) == 1 # Clone scen2 = scen1.clone(platform=mp2) # Return type of ixmp.Scenario.clone is message_ix.Scenario assert isinstance(scen2, Scenario) # Close and re-open both databases mp1.close_db() # TODO this should be done automatically on del mp2.close_db() # TODO this should be done automatically on del del mp1, mp2 mp1 = ixmp.Platform(tmpdir / 'mp1', dbtype='HSQLDB') mp2 = ixmp.Platform(tmpdir / 'mp2', dbtype='HSQLDB') # Same scenarios present in each database assert all( mp1.scenario_list(default=False) == mp2.scenario_list(default=False)) # Load both scenarios scen1 = Scenario(mp1, 'model', 'scenario') scen2 = Scenario(mp2, 'model', 'scenario') # Contents are identical assert all(scen1.set('node') == scen2.set('node')) assert all(scen1.set('year') == scen2.set('year'))
def test_add_cat_unique(message_test_mp): scen = Scenario(message_test_mp, **SCENARIO["dantzig multi-year"]) scen2 = scen.clone(keep_solution=False) scen2.check_out() scen2.add_cat("year", "firstmodelyear", 1963, True) assert [1963] == scen2.cat("year", "firstmodelyear")
def test_commodity_share_up(message_test_mp): """Origial Solution ---------------- lvl mode mrg node_loc technology 0 350.0 production 0.000 seattle canning_plant 1 50.0 to_new-york 0.000 seattle transport_from_seattle 2 300.0 to_chicago 0.000 seattle transport_from_seattle 3 0.0 to_topeka 0.036 seattle transport_from_seattle 4 600.0 production 0.000 san-diego canning_plant 5 275.0 to_new-york 0.000 san-diego transport_from_san-diego 6 0.0 to_chicago 0.009 san-diego transport_from_san-diego 7 275.0 to_topeka 0.000 san-diego transport_from_san-diego Constraint Test --------------- Seattle canning_plant production (original: 350) is limited to 50% of all transport_from_san-diego (original: 550). Expected outcome: some increase of transport_from_san-diego with some decrease of production in seattle. """ # data for share bound def calc_share(s): a = s.var("ACT", filters={ "technology": ["canning_plant"], "node_loc": ["seattle"] })["lvl"][0] b = calculate_activity(s, tec="transport_from_san-diego").sum() return a / b # common operations for both subtests def add_data(s, map_df): s.add_cat("technology", "share", "canning_plant") s.add_cat("technology", "total", "transport_from_san-diego") s.add_set("shares", "test-share") s.add_set( "map_shares_commodity_share", pd.DataFrame( { "shares": "test-share", "node_share": "seattle", "node": "seattle", "type_tec": "share", "mode": "production", "commodity": "cases", "level": "supply", }, index=[0], ), ) s.add_set("map_shares_commodity_total", map_df) s.add_par( "share_commodity_up", pd.DataFrame( { "shares": "test-share", "node_share": "seattle", "year_act": _year, "time": "year", "unit": "%", "value": 0.5, }, index=[0], ), ) # initial data scen = Scenario(message_test_mp, **SCENARIO["dantzig"]).clone() scen.solve(quiet=True) exp = 0.5 # check shares orig, should be bigger than expected bound orig = calc_share(scen) assert orig > exp # add share constraints for modes explicitly map_df = pd.DataFrame({ "shares": "test-share", "node_share": "seattle", "node": "san-diego", "type_tec": "total", "mode": ["to_new-york", "to_chicago", "to_topeka"], "commodity": "cases", "level": "supply", }) clone = scen.clone(scenario="share_mode_list", keep_solution=False) clone.check_out() add_data(clone, map_df) clone.commit("foo") clone.solve() # check shares new, should be lower than expected bound obs = calc_share(clone) assert obs <= exp # check obj orig_obj = scen.var("OBJ")["lvl"] new_obj = clone.var("OBJ")["lvl"] assert new_obj >= orig_obj # add share constraints with mode == 'all' map_df2 = pd.DataFrame( { "shares": "test-share", "node_share": "seattle", "node": "san-diego", "type_tec": "total", "mode": "all", "commodity": "cases", "level": "supply", }, index=[0], ) clone2 = scen.clone(scenario="share_all_modes", keep_solution=False) clone2.check_out() add_data(clone2, map_df2) clone2.commit("foo") clone2.solve() # check shares new, should be lower than expected bound obs2 = calc_share(clone2) assert obs2 <= exp # it should also be the same as the share with explicit modes assert obs == obs2 # check obj orig_obj = scen.var("OBJ")["lvl"] new_obj = clone2.var("OBJ")["lvl"] assert new_obj >= orig_obj