Exemplo n.º 1
0
def test_run_clone(tmpdir):
    # this test is designed to cover the full functionality of the GAMS API
    # - initialize a new ixmp platform instance
    # - create a new scenario based on Dantzigs tutorial transport model
    # - solve the model and read back the solution from the output
    # - perform tests on the objective value and the timeseries data
    mp = Platform(tmpdir, dbtype='HSQLDB')
    scen = make_dantzig(mp, solve=True)
    assert np.isclose(scen.var('OBJ')['lvl'], 153.675)
    assert scen.firstmodelyear == 1963
    pdt.assert_frame_equal(scen.timeseries(iamc=True), TS_DF)

    # cloning with `keep_solution=True` keeps all timeseries and the solution
    # (same behaviour as `ixmp.Scenario`)
    scen2 = scen.clone(keep_solution=True)
    assert np.isclose(scen2.var('OBJ')['lvl'], 153.675)
    assert scen2.firstmodelyear == 1963
    pdt.assert_frame_equal(scen2.timeseries(iamc=True), TS_DF)

    # cloning with `keep_solution=False` drops the solution and only keeps
    # timeseries set as `meta=True` or prior to the first model year
    # (DIFFERENT behaviour from `ixmp.Scenario`)
    scen3 = scen.clone(keep_solution=False)
    assert np.isnan(scen3.var('OBJ')['lvl'])
    assert scen3.firstmodelyear == 1963
    pdt.assert_frame_equal(scen3.timeseries(iamc=True), TS_DF_CLEARED)
Exemplo n.º 2
0
def test_legacy_mp(tmp_path_factory, test_data_path):
    """Path to a database properties file referring to a test database."""
    # adapting `ixmp.testing:test_mp()`
    db_path = Path(str(tmp_path_factory.mktemp('test_mp_props')))
    db_name = 'message_ix_legacy'
    props = create_local_testdb(db_path, test_data_path / 'testdb', db_name)
    mp = Platform(props)

    yield mp
Exemplo n.º 3
0
def session_context(pytestconfig, tmp_env):
    """A Context connected to a temporary, in-memory database.

    Uses the :func:`.tmp_env` fixture from ixmp.
    """
    ctx = Context.only()

    # Temporary, empty local directory for local data
    session_tmp_dir = Path(pytestconfig._tmp_path_factory.mktemp("data"))

    # Set the cache path according to whether pytest --local-cache was given. If True,
    # pick up the existing setting from the user environment. If False, use a pytest-
    # managed cache directory that persists across test sessions.
    ctx.cache_path = (
        ctx.local_data.joinpath("cache") if pytestconfig.option.local_cache
        # TODO use pytestconfig.cache.mkdir() when pytest >= 6.3 is available
        else Path(pytestconfig.cache.makedir("cache")))

    # Other local data in the temporary directory for this session only
    ctx.local_data = session_tmp_dir

    # If message_data is not installed, use a temporary path for private_data_path()
    message_data_path = util.MESSAGE_DATA_PATH
    if util.MESSAGE_DATA_PATH is None:
        util.MESSAGE_DATA_PATH = session_tmp_dir.joinpath("message_data")

        # Create some subdirectories
        util.MESSAGE_DATA_PATH.joinpath("data", "tests").mkdir(parents=True)

    platform_name = "message-ix-models"

    # Add a platform connected to an in-memory database
    # NB cannot call Config.add_platform() here because it does not support supplying a
    #    URL for a HyperSQL database.
    # TODO add that feature upstream.
    ixmp_config.values["platform"][platform_name] = {
        "class": "jdbc",
        "driver": "hsqldb",
        "url": f"jdbc:hsqldb:mem://{platform_name}",
        "jvmargs": pytestconfig.option.jvmargs,
    }

    # Launch Platform and connect to testdb (reconnect if closed)
    mp = Platform(name=platform_name)
    mp.open_db()

    ctx.platform_info["name"] = platform_name

    try:
        yield ctx
    finally:
        ctx.close_db()
        ixmp_config.remove_platform(platform_name)

        # Restore prior value
        util.MESSAGE_DATA_PATH = message_data_path
Exemplo n.º 4
0
def test_multi_db_run(tmpdir):
    # create a new instance of the transport problem and solve it
    mp1 = Platform(driver="hsqldb", path=tmpdir / "mp1")
    scen1 = make_dantzig(mp1, solve=True)

    mp2 = Platform(driver="hsqldb", path=tmpdir / "mp2")
    # add other unit to make sure that the mapping is correct during clone
    mp2.add_unit("wrong_unit")
    mp2.add_region("wrong_region", "country")

    # check that cloning across platforms must copy the full solution
    dest = dict(platform=mp2)
    pytest.raises(NotImplementedError,
                  scen1.clone,
                  keep_solution=False,
                  **dest)
    pytest.raises(NotImplementedError,
                  scen1.clone,
                  shift_first_model_year=1964,
                  **dest)

    # clone solved model across platforms (with default settings)
    scen1.clone(platform=mp2, keep_solution=True)

    # close the db to ensure that data and solution of the clone are saved
    mp2.close_db()
    del mp2

    # reopen the connection to the second platform and reload scenario
    _mp2 = Platform(driver="hsqldb", path=tmpdir / "mp2")
    scen2 = Scenario(_mp2, **SCENARIO["dantzig"])
    assert_multi_db(mp1, _mp2)

    # check that sets, variables and parameter were copied correctly
    npt.assert_array_equal(scen1.set("node"), scen2.set("node"))
    scen2.firstmodelyear == 1963
    assert_frame_equal(scen1.par("var_cost"), scen2.par("var_cost"))
    assert np.isclose(scen2.var("OBJ")["lvl"], 153.675)
    assert_frame_equal(scen1.var("ACT"), scen2.var("ACT"))

    # check that custom unit, region and timeseries are migrated correctly
    assert_frame_equal(scen2.timeseries(iamc=True), TS_DF)
Exemplo n.º 5
0
def test_solve_legacy_scenario(tmp_path, test_data_path):
    db_path = create_test_platform(tmp_path, test_data_path, 'legacy')
    mp = Platform(backend='jdbc', driver='hsqldb', path=db_path)
    scen = Scenario(mp,
                    model='canning problem (MESSAGE scheme)',
                    scenario='standard')
    exp = scen.var('OBJ')['lvl']

    # solve scenario, assert that the new objective value is close to previous
    scen.remove_solution()
    scen.solve()
    assert np.isclose(exp, scen.var('OBJ')['lvl'])
Exemplo n.º 6
0
def test_solve_legacy_scenario(tmp_path, test_data_path):
    db_path = create_test_platform(tmp_path, test_data_path, "legacy")
    mp = Platform(backend="jdbc", driver="hsqldb", path=db_path)
    scen = Scenario(mp,
                    model="canning problem (MESSAGE scheme)",
                    scenario="standard")
    exp = scen.var("OBJ")["lvl"]

    # solve scenario, assert that the new objective value is close to previous
    scen = scen.clone(keep_solution=False)
    scen.solve()
    assert np.isclose(exp, scen.var("OBJ")["lvl"])
Exemplo n.º 7
0
def test_multi_db_run(tmpdir):
    # create a new instance of the transport problem and solve it
    mp1 = Platform(tmpdir / 'mp1', dbtype='HSQLDB')
    scen1 = make_dantzig(mp1, solve=True)

    mp2 = Platform(tmpdir / 'mp2', dbtype='HSQLDB')
    # add other unit to make sure that the mapping is correct during clone
    mp2.add_unit('wrong_unit')
    mp2.add_region('wrong_region', 'country')

    # check that cloning across platforms must copy the full solution
    dest = dict(platform=mp2)
    pytest.raises(ValueError, scen1.clone, keep_solution=False, **dest)
    pytest.raises(ValueError, scen1.clone, shift_first_model_year=1964, **dest)

    # clone solved model across platforms (with default settings)
    scen1.clone(platform=mp2, keep_solution=True)

    # close the db to ensure that data and solution of the clone are saved
    mp2.close_db()
    del mp2

    # reopen the connection to the second platform and reload scenario
    _mp2 = Platform(tmpdir / 'mp2', dbtype='HSQLDB')
    scen2 = Scenario(_mp2, **models['dantzig'])
    assert_multi_db(mp1, _mp2)

    # check that sets, variables and parameter were copied correctly
    npt.assert_array_equal(scen1.set('node'), scen2.set('node'))
    scen2.firstmodelyear == 1963
    pdt.assert_frame_equal(scen1.par('var_cost'), scen2.par('var_cost'))
    assert np.isclose(scen2.var('OBJ')['lvl'], 153.675)
    pdt.assert_frame_equal(scen1.var('ACT'), scen2.var('ACT'))

    # check that custom unit, region and timeseries are migrated correctly
    pdt.assert_frame_equal(scen2.timeseries(iamc=True), TS_DF)
Exemplo n.º 8
0
def test_add_year_cli(message_ix_cli, base_scen_mp):
    scen_ref, test_mp = base_scen_mp

    # Information about the base Scenario
    platform_name = test_mp.name
    model = scen_ref.model
    scenario = scen_ref.scenario

    cmd = [
        "--platform",
        platform_name,
        "--model",
        model,
        "--scenario",
        scenario,
        "add-years",
        "--years_new",
        repr(YEARS_NEW),
        "--model_new",
        "add_year",
        "--scen_new",
        "standard",
    ]

    # Delete the objects so that the database connection is closed
    del test_mp, scen_ref

    r = message_ix_cli(*cmd)
    print(r.output, r.exception)
    assert r.exit_code == 0

    # Re-load the base Scenario
    mp = Platform(name=platform_name)
    scen_ref = Scenario(mp, model=model, scenario=scenario)

    # Load the created Scenario
    scen_new = Scenario(mp, model="add_year", scenario="standard")

    assert_function(scen_ref, scen_new, YEARS_NEW, yr_test=2025)

    # Same, except with --dry-run
    r = message_ix_cli(*cmd, "--dry-run")
    assert r.exit_code == 0

    # Bad usage: not giving the base scenario info
    r = message_ix_cli(*cmd[6:], "--dry-run")
    assert r.exit_code == 2
Exemplo n.º 9
0
def _platform_fixture(request, tmp_env, test_data_path):
    """Helper for :func:`test_mp` and other fixtures."""
    # Long, unique name for the platform.
    # Remove '/' so that the name can be used in URL tests.
    platform_name = request.node.nodeid.replace("/", " ")

    # Add a platform
    ixmp_config.add_platform(
        platform_name, "jdbc", "hsqldb", url=f"jdbc:hsqldb:mem:{platform_name}"
    )

    # Launch Platform
    mp = Platform(name=platform_name)
    yield mp

    # Teardown: don't show log messages when destroying the platform, even if
    # the test using the fixture modified the log level
    mp._backend.set_log_level(logging.CRITICAL)
    del mp

    # Remove from config
    ixmp_config.remove_platform(platform_name)
postprocess_path = '{}/post-processing/reporting/'.format(msg_data_path)
print postprocess_path
if postprocess_path not in sys.path:
    sys.path.append(postprocess_path)
from iamc_report_india import report as reporting

# Enter name of the input file that should be read
fname = 'wb_baseline.xlsx'
# Choose whether or not data entered into the datastrucuture should be displayed (True or False)
verbose = False
# Choose whether or not data input errors are shown (True or False)
disp_error = False
# Choose whether to write plotted data to xlsx file
output_xlsx = True

mp = Platform(dbtype='HSQLDB')
im = xlsx_core.init_model(mp, fname, verbose, disp_error)
mpa_gen = False
soft_constraints = True
price_carbon = False
meta, tecs, dems, resources, mpa_data = im.read_input()
# # Create scenario
scenario, model_nm, scen_nm = im.create_scen()
# # Setup scenario metadata
horizon, vintage_years, firstyear = im.add_metadata()
# # Process input data
# ## Import class add_par from xlsx_core
ap = xlsx_core.add_par(scenario, horizon, vintage_years, firstyear, disp_error)
# ## Process demand data
im.demand_input_data(ap)
# ## Process fossil resource data