예제 #1
0
def test_co2_emissions_only(package):
    df = zero_emissions.timeseries()

    time = zero_emissions["time"]
    emms_fossil_co2 = np.linspace(0, 30, len(time))
    df.loc[(
        df.index.get_level_values("variable") ==
        "Emissions|CO2|MAGICC Fossil and Industrial"), :, ] = emms_fossil_co2

    scen = MAGICCData(df)
    scen.metadata["header"] = "Test CO2 emissions only file"
    results = package.run(
        scen,
        endyear=max(scen["time"]).year,
        rf_total_constantafteryr=5000,
        rf_total_runmodus="CO2",
        co2_switchfromconc2emis_year=min(scen["time"]).year - 1,
        out_emissions=1,
        only=[
            "Surface Temperature", "Emissions|CO2|MAGICC Fossil and Industrial"
        ],
    )

    output_co2 = (results.filter(variable="Em*CO2*Fossil*",
                                 region="World").timeseries().values.squeeze())
    assert not (output_co2 == 0).all()
    np.testing.assert_allclose(output_co2, emms_fossil_co2, rtol=0.0005)

    temperature_global = (results.filter(
        variable="Surface Temperature",
        region="World").timeseries().values.squeeze())
    assert (temperature_global[1:] - temperature_global[:-1] >= 0).all()
예제 #2
0
파일: cli.py 프로젝트: smutch/netcdf-scm
def _write_mag_file(  # pylint:disable=too-many-arguments
        openscmdf, metadata, header, outfile_dir, symlink_dir, fnames, force):
    out_file = os.path.join(outfile_dir, fnames[0])
    out_file = "{}.MAG".format(os.path.splitext(out_file)[0])

    if _skip_file(out_file, force, symlink_dir):
        return

    writer = MAGICCData(openscmdf)
    writer["todo"] = "SET"
    time_steps = writer.timeseries().columns[1:] - writer.timeseries(
    ).columns[:-1]
    step_upper = np.timedelta64(32, "D")  # pylint:disable=too-many-function-args
    step_lower = np.timedelta64(28, "D")  # pylint:disable=too-many-function-args
    if any((time_steps > step_upper) | (time_steps < step_lower)):
        raise ValueError("Please raise an issue at "
                         "github.com/znicholls/netcdf-scm/issues "
                         "to discuss how to handle non-monthly data wrangling")

    writer.metadata = metadata
    writer.metadata["timeseriestype"] = "MONTHLY"
    writer.metadata["header"] = header

    logger.info("Writing file to %s", out_file)
    writer.write(out_file, magicc_version=7)

    symlink_file = os.path.join(symlink_dir, os.path.basename(out_file))
    logger.info("Making symlink to %s", symlink_file)
    os.symlink(out_file, symlink_file)
예제 #3
0
def writing_base_mag():
    tregions = (
        ["World"]
        + ["World|{}".format(r) for r in ["Northern Hemisphere", "Southern Hemisphere"]]
        + ["World|{}".format(r) for r in ["Land", "Ocean"]]
    )

    writing_base_mag = MAGICCData(
        data=np.arange(27 * len(tregions)).reshape(27, len(tregions)),
        index=[
            dt.datetime(2099, 1, 16, 12, 0),
            dt.datetime(2099, 2, 15, 0, 0),
            dt.datetime(2099, 3, 16, 12, 0),
            dt.datetime(2099, 4, 16, 0, 0),
            dt.datetime(2099, 5, 16, 12, 0),
            dt.datetime(2099, 6, 16, 0, 0),
            dt.datetime(2099, 7, 16, 12, 0),
            dt.datetime(2099, 8, 16, 12, 0),
            dt.datetime(2099, 9, 16, 0, 0),
            dt.datetime(2099, 10, 16, 12, 0),
            dt.datetime(2099, 11, 16, 0, 0),
            dt.datetime(2099, 12, 16, 12, 0),
            dt.datetime(2100, 1, 16, 12, 0),
            dt.datetime(2100, 2, 15, 0, 0),
            dt.datetime(2100, 3, 16, 12, 0),
            dt.datetime(2100, 4, 16, 0, 0),
            dt.datetime(2100, 5, 16, 12, 0),
            dt.datetime(2100, 6, 16, 0, 0),
            dt.datetime(2100, 7, 16, 12, 0),
            dt.datetime(2100, 8, 16, 12, 0),
            dt.datetime(2100, 9, 16, 0, 0),
            dt.datetime(2100, 10, 16, 12, 0),
            dt.datetime(2100, 11, 16, 0, 0),
            dt.datetime(2100, 12, 16, 12, 0),
            dt.datetime(2101, 1, 16, 12, 0),
            dt.datetime(2101, 2, 15, 0, 0),
            dt.datetime(2101, 3, 16, 12, 0),
        ],
        columns={
            "region": tregions,
            "variable": "NPP",
            "model": "unspecified",
            "scenario": "mag test",
            "unit": "gC/yr",
            "todo": "SET",
        },
    )

    writing_base_mag.metadata = {
        "header": "Test mag file",
        "timeseriestype": "MONTHLY",
        "other info": "checking time point handling",
    }

    yield writing_base_mag
예제 #4
0
def test_pymagicc_writing_has_an_effect(
    package,
    test_filename,
    relevant_config,
    outputs_to_check,
    time_check_min,
    time_check_max,
):
    if (package.version == 6) and test_filename.endswith("SCEN7"):
        # maybe this should throw error instead
        pytest.skip("MAGICC6 cannot run SCEN7 files")
    if ("SRES" in test_filename) and (package.version == 7):
        # maybe this should throw error instead
        pytest.skip("MAGICC7 cannot run SRES SCEN files")
    if ("SCEN" in test_filename) and (package.version == 7):
        # special undocumented flags!!!
        relevant_config["fgas_adjstfutremis2past_0no1scale"] = 0
        relevant_config["mhalo_adjstfutremis2past_0no1scale"] = 0

    for key, value in relevant_config.items():
        if value == "test_filename":
            relevant_config[key] = test_filename

    package.set_config(**relevant_config)
    initial_results = package.run()

    ttweak_factor = 0.9

    mdata = MAGICCData(
        join(package.run_dir, test_filename),
        columns={
            "model": ["unspecified"],
            "scenario": ["unspecified"]
        },
    )
    mdata._data *= ttweak_factor
    mdata.write(join(package.run_dir, test_filename), package.version)

    tweaked_results = package.run()

    for output_to_check in outputs_to_check:
        result = (tweaked_results.filter(
            variable=output_to_check,
            year=range(time_check_min,
                       time_check_max + 1)).timeseries().values)
        expected = (ttweak_factor * initial_results.filter(
            variable=output_to_check,
            year=range(time_check_min,
                       time_check_max + 1)).timeseries().values)
        abstol = np.max([result, expected]) * 10**-3
        np.testing.assert_allclose(result, expected, rtol=1e-5, atol=abstol)
예제 #5
0
def valid_tcr_ecs_diagnosis_results():
    startyear = 1700
    endyear = 4000
    spin_up_time = 50
    rising_time = 70
    tcr_yr = startyear + spin_up_time + rising_time
    ecs_yr = endyear
    fake_PI_conc = 278.0
    eqm_time = endyear - startyear - spin_up_time - rising_time

    fake_time = np.arange(startyear, endyear + 1)
    fake_concs = np.concatenate((
        fake_PI_conc * np.ones(spin_up_time),
        fake_PI_conc * 1.01**(np.arange(rising_time + 1)),
        fake_PI_conc * 1.01**(rising_time) * np.ones(eqm_time),
    ))
    fake_rf = 2.0 * np.log(fake_concs / fake_PI_conc)
    fake_temp = np.log(fake_rf + 1.0) + fake_time / fake_time[1400]
    fake_regions = ["World"] * len(fake_time)

    mock_co2_conc = pd.DataFrame({
        "time":
        fake_time,
        "unit": ["ppm"] * len(fake_time),
        "variable": ["Atmospheric Concentrations|CO2"] * len(fake_time),
        "value":
        fake_concs,
        "region":
        fake_regions,
        "model": ["N/A"] * len(fake_time),
        "scenario": ["1%/yr_co2"] * len(fake_time),
    })
    mock_rf = pd.DataFrame({
        "time": fake_time,
        "unit": ["W / m^2"] * len(fake_time),
        "variable": ["Radiative Forcing"] * len(fake_time),
        "value": fake_rf,
        "region": fake_regions,
        "model": ["N/A"] * len(fake_time),
        "scenario": ["1%/yr_co2"] * len(fake_time),
    })
    mock_temp = pd.DataFrame({
        "time":
        fake_time,
        "unit": ["K"] * len(fake_time),
        "variable": ["Surface Temperature"] * len(fake_time),
        "value":
        fake_temp,
        "region":
        fake_regions,
        "model": ["N/A"] * len(fake_time),
        "scenario": ["1%/yr_co2"] * len(fake_time),
    })
    mock_results = MAGICCData(pd.concat([mock_co2_conc, mock_rf, mock_temp]))

    yield {
        "mock_results": mock_results,
        "tcr_time": datetime(tcr_yr, 1, 1),
        "ecs_time": datetime(ecs_yr, 1, 1),
    }
예제 #6
0
def assert_bad_tcr_ecs_diagnosis_values_caught(base_data,
                                               method_to_run,
                                               regexp_to_match,
                                               *args,
                                               test_target="other"):
    test_time = base_data["time"].values
    times_to_break = [
        test_time[3],
        test_time[15],
        test_time[115],
        test_time[-100],
        test_time[-1],
    ]
    if test_target != "temperature":
        times_to_break.append(test_time[0])
    for time_to_break in times_to_break:
        broken_data = base_data.timeseries()
        col_to_adjust = broken_data.columns == time_to_break
        if test_target == "temperature":
            broken_data.iloc[:, col_to_adjust] -= 0.1
        else:
            broken_data.iloc[:, col_to_adjust] *= 1.01
            broken_data.iloc[:, col_to_adjust] += 0.01
            broken_data.iloc[-1, col_to_adjust] += 0.1
        with pytest.raises(ValueError, match=regexp_to_match):
            method_to_run(MAGICCData(broken_data), *args)
예제 #7
0
def read_scen_file(
        filepath,
        columns={
            "model": ["unspecified"],
            "scenario": ["unspecified"],
            "climate_model": ["unspecified"],
        },
        **kwargs):
    """
    Read a MAGICC .SCEN file.

    Parameters
    ----------
    filepath : str
        Filepath of the .SCEN file to read

    columns : dict
        Passed to ``__init__`` method of MAGICCData. See
        ``MAGICCData.__init__`` for details.

    kwargs
        Passed to ``__init__`` method of MAGICCData. See
        ``MAGICCData.__init__`` for details.

    Returns
    -------
    :obj:`pymagicc.io.MAGICCData`
        ``MAGICCData`` object containing the data and metadata.
    """
    mdata = MAGICCData(filepath, columns=columns, **kwargs)

    return mdata
예제 #8
0
def test_external_forcing_only_run(package):
    time = zero_emissions["time"]

    forcing_external = 2.0 * np.arange(0, len(time)) / len(time)
    forcing_ext = MAGICCData(
        forcing_external,
        columns={
            "index": time,
            "scenario": ["idealised"],
            "model": ["unspecified"],
            "climate_model": ["unspecified"],
            "variable": ["Radiative Forcing|Extra"],
            "unit": ["W / m^2"],
            "todo": ["SET"],
            "region": ["World"],
        },
    )
    forcing_ext_filename = "EXTERNAL_RF.IN"
    forcing_ext.metadata = {
        "header": "External radiative forcing file for testing"
    }
    forcing_ext.write(join(package.run_dir, forcing_ext_filename),
                      package.version)

    results = package.run(
        rf_extra_read=1,
        file_extra_rf=forcing_ext_filename,
        rf_total_runmodus="QEXTRA",
        endyear=max(time).year,
        rf_initialization_method=
        "ZEROSTARTSHIFT",  # this is default but just in case
        rf_total_constantafteryr=5000,
    )

    # MAGICC's weird last year business means that last result is just constant from previous
    # year and is not treated properly
    # TODO: add this in docs
    validation_output = (results.filter(
        variable="Radiative Forcing",
        region="World").timeseries().values.squeeze()[:-1])
    validation_input = forcing_external[:-1]
    np.testing.assert_allclose(validation_input, validation_output, rtol=1e-5)
    temperature_global = (results.filter(
        variable="Surface Temperature",
        region="World").timeseries().values.squeeze())
    assert (temperature_global[1:] - temperature_global[:-1] >= 0).all()
예제 #9
0
파일: cli.py 프로젝트: smutch/netcdf-scm
def _write_magicc_input_files(  # pylint:disable=too-many-arguments,too-many-locals
    openscmdf,
    time_id,
    outfile_dir,
    symlink_dir,
    force,
    metadata,
    header,
    timeseriestype,
):
    try:
        var_to_write = openscmdf["variable"].unique()[0]
        variable_abbreviations = {
            "filename": var_to_write,
            "magicc_name": _MAGICC_VARIABLE_MAP[var_to_write][0],
            "magicc_internal_name": _MAGICC_VARIABLE_MAP[var_to_write][1],
        }
    except KeyError:
        raise KeyError(
            "I don't know which MAGICC variable to use for input `{}`".format(
                var_to_write))

    region_filters = {
        "FOURBOX": [
            "World|Northern Hemisphere|Land",
            "World|Southern Hemisphere|Land",
            "World|Northern Hemisphere|Ocean",
            "World|Southern Hemisphere|Ocean",
        ],
        "GLOBAL": ["World"],
    }
    for region_key, regions_to_keep in region_filters.items():
        out_file = os.path.join(
            outfile_dir,
            (("{}_{}_{}_{}_{}_{}_{}.IN").format(
                variable_abbreviations["filename"],
                openscmdf["scenario"].unique()[0],
                openscmdf["climate_model"].unique()[0],
                openscmdf["member_id"].unique()[0],
                time_id,
                region_key,
                variable_abbreviations["magicc_internal_name"],
            ).upper()),
        )
        symlink_file = os.path.join(symlink_dir, os.path.basename(out_file))

        if _skip_file(out_file, force, symlink_dir):
            return

        writer = MAGICCData(openscmdf).filter(region=regions_to_keep)
        writer["todo"] = "SET"
        writer["variable"] = variable_abbreviations["magicc_name"]
        writer.metadata = metadata
        writer.metadata["header"] = header
        writer.metadata["timeseriestype"] = timeseriestype

        logger.info("Writing file to %s", out_file)
        writer.write(out_file, magicc_version=7)
        logger.info("Making symlink to %s", symlink_file)
        os.symlink(out_file, symlink_file)
예제 #10
0
def test_run_rewritten_scen_file(package, temp_dir):
    starting_scen = join(MAGICC6_DIR, "RCP26.SCEN")
    written_scen = join(package.run_dir, "RCP26.SCEN7")

    cols = {
        "model": ["IMAGE"],
        "scenario": ["RCP26"],
        "climate_model": ["MAGICC6"]
    }
    mdata_initial = MAGICCData(starting_scen, columns=cols)

    mdata_initial.write(written_scen, magicc_version=7)

    mdata_written = MAGICCData(written_scen, columns=cols)
    results = package.run(mdata_written, only=["Surface Temperature"])

    assert len(results["variable"].unique()) == 1
    assert "Surface Temperature" in results["variable"].values
예제 #11
0
    def _do_comparison(res, expected, update=False):
        """Run test that crunched files are unchanged

        Parameters
        ----------
        res : str
            Directory written as part of the test
        expected : str
            Directory against which the comparison should be done
        update : bool
            If True, don't perform the test and instead simply
            overwrite the ``expected`` with ``res``

        Raises
        ------
        AssertionError
            If ``update`` is ``False`` and ``res`` and ``expected``
            are not identical.
        """
        paths_to_walk = [expected, res] if not update else [res]
        for p in paths_to_walk:
            for dirpath, _, filenames in walk(p):
                if filenames:
                    if update:
                        path_to_check = dirpath.replace(res, expected)
                        if not path.exists(path_to_check):
                            makedirs(path_to_check)

                    for f in filenames:
                        base_f = join(dirpath, f)
                        comparison_p = expected if p == res else res
                        comparison_f = base_f.replace(p, comparison_p)
                        assert base_f != comparison_f
                        if update:
                            print("Updating {}".format(comparison_f))
                            shutil.copy(base_f, comparison_f)
                        else:
                            base_scmdf = MAGICCData(base_f)
                            comparison_scmdf = MAGICCData(comparison_f)
                            assert_scmdata_frames_allclose(
                                base_scmdf, comparison_scmdf)

        if update:
            pytest.skip("Updated {}".format(expected))
예제 #12
0
def test_can_read_all_files_in_magicc_dir(dir_to_check):
    cannot_read = []
    ignored = []
    for file_to_read in listdir(dir_to_check):
        try:
            if file_to_read.endswith(
                (".exe", ".mod", ".mat", ".m", ".BINOUT")):
                ignored.append(file_to_read)
            elif file_to_read.endswith(".CFG"):
                read_cfg_file(join(dir_to_check, file_to_read))
            elif file_to_read.endswith("PARAMETERS.OUT"):
                read_cfg_file(join(dir_to_check, file_to_read))
            else:
                mdata = MAGICCData()
                mdata.read(dir_to_check, file_to_read)
        except:
            cannot_read.append(file_to_read)

    print_summary(cannot_read, ignored, dir_to_check)
예제 #13
0
def writing_base_emissions():
    no_cols = 5
    yield MAGICCData(
        np.arange(0, 2 * no_cols).reshape((2, no_cols)),
        index=np.arange(1995, 1997),
        columns={
            "region": ["region {}".format(i) for i in range(no_cols)],
            "scenario": ["test"],
            "model": ["unspecified"],
            "variable": ["variable"],
            "unit": ["unit"],
            "todo": ["SET"],
        },
    )
예제 #14
0
def test_run_with_magiccdata(package, temp_dir):
    tmodel = "IMAGE"
    tscenario = "RCP26"
    scen = MAGICCData(
        join(MAGICC6_DIR, "RCP26.SCEN"),
        columns={
            "model": [tmodel],
            "scenario": [tscenario]
        },
    )

    results = package.run(scen, only=["Surface Temperature"])

    assert len(results["variable"].unique()) == 1
    assert "Surface Temperature" in results["variable"].values

    assert (results["climate_model"] == "MAGICC{}".format(
        package.version)).all()
    assert (results["model"] == tmodel).all()
    assert (results["scenario"] == tscenario).all()
예제 #15
0
파일: cli.py 프로젝트: smutch/netcdf-scm
def _write_mag_file_with_operation(  # pylint:disable=too-many-arguments
        openscmdf, metadata, header, outfile_dir, symlink_dir, fnames, force,
        out_format):  # pylint:disable=too-many-locals
    if len(fnames) > 1:
        raise AssertionError("more than one file to wrangle?"
                             )  # pragma: no cover # emergency valve

    ts = openscmdf.timeseries()

    src_time_points = ts.columns
    original_years = ts.columns.map(lambda x: x.year).unique()

    time_id = "{}-{}".format(src_time_points[0].year, src_time_points[-1].year)
    regex_search = r"{:04d}\d*-{:04d}\d*".format(src_time_points[0].year,
                                                 src_time_points[-1].year)
    old_time_id = re.search(regex_search, fnames[0]).group(0)

    out_file = os.path.join(outfile_dir,
                            fnames[0].replace(old_time_id, time_id))
    out_file = "{}.MAG".format(os.path.splitext(out_file)[0])

    if _skip_file(out_file, force, symlink_dir):
        return

    writer = MAGICCData(_do_timeseriestype_operation(
        openscmdf, out_format)).filter(year=original_years)

    writer["todo"] = "SET"
    writer.metadata = metadata
    writer.metadata["timeseriestype"] = (out_format.replace(
        "mag-files-", "").replace("-", "_").upper())

    writer.metadata["header"] = header

    logger.info("Writing file to %s", out_file)
    writer.write(out_file, magicc_version=7)

    symlink_file = os.path.join(symlink_dir, os.path.basename(out_file))
    logger.info("Making symlink to %s", symlink_file)
    os.symlink(out_file, symlink_file)
예제 #16
0
"""A simple script that can be used to read and write a file to see the effects of the formatting without having to always stop and debug tests
"""

import os
from os.path import join, expanduser

from pymagicc.io import MAGICCData

here = os.path.dirname(os.path.abspath(__file__))
fpath = join(here, "..", "pymagicc", "MAGICC6", "run")
fname = "RCP26.SCEN"

mi_writer = MAGICCData()
mi_writer.read(filepath=fpath, filename=fname)

mi_writer.write(join(expanduser("~"), fname))
예제 #17
0
def test_wrangling_in_file_operations(tmpdir, test_cmip6_crunch_output, caplog,
                                      out_format_in_file):
    runner = CliRunner()

    input_dir = join(test_cmip6_crunch_output,
                     "CMIP/IPSL/IPSL-CM6A-LR/piControl")
    output_dir = str(tmpdir)

    caplog.clear()
    with caplog.at_level("INFO"):
        result_raw = runner.invoke(
            wrangle_netcdf_scm_ncs,
            [
                input_dir,
                output_dir,
                "test",
                "--drs",
                "CMIP6Output",
                "--number-workers",
                1,
                "--out-format",
                "magicc-input-files",
                "--regexp",
                ".*tas.*",
            ],
        )
    assert result_raw.exit_code == 0, result_raw.stdout

    # also a global file but don't worry about that
    expected_file_raw = join(
        output_dir,
        "CMIP6/CMIP/IPSL/IPSL-CM6A-LR/piControl/r1i1p1f1/Amon/tas/gr/v20181123/TAS_PICONTROL_IPSL-CM6A-LR_R1I1P1F1_284001-285912_FOURBOX_SURFACE_TEMP.IN",
    )

    res_raw = MAGICCData(expected_file_raw)
    res_raw_resampled = _get_expected_wrangled_ts(res_raw, out_format_in_file)

    caplog.clear()
    with caplog.at_level("INFO"):
        result = runner.invoke(
            wrangle_netcdf_scm_ncs,
            [
                input_dir,
                output_dir,
                "test",
                "--drs",
                "CMIP6Output",
                "--out-format",
                out_format_in_file,
                "--regexp",
                ".*tas.*",
                "--number-workers",
                1,
            ],
        )
    assert result.exit_code == 0, result.stdout

    os.listdir(
        join(
            output_dir,
            "CMIP6/CMIP/IPSL/IPSL-CM6A-LR/piControl/r1i1p1f1/Amon/tas/gr/v20181123/",
        ))
    expected_file = join(
        output_dir,
        "CMIP6/CMIP/IPSL/IPSL-CM6A-LR/piControl/r1i1p1f1/Amon/tas/gr/v20181123/TAS_PICONTROL_IPSL-CM6A-LR_R1I1P1F1_2840-2859_FOURBOX_SURFACE_TEMP.IN",
    )

    res = MAGICCData(expected_file)

    np.testing.assert_allclose(res_raw_resampled,
                               res.timeseries(),
                               rtol=2 * 1e-3)
    with open(expected_file) as f:
        content = f.read()

    assert ("timeseriestype: {}".format(
        out_format_in_file.replace("magicc-input-files-",
                                   "").replace("-", "_").upper()) in content)
예제 #18
0
def test_wrangling_mag_file_operations(tmpdir, test_cmip6_crunch_output,
                                       caplog, out_format_mag):
    runner = CliRunner()

    input_dir = join(test_cmip6_crunch_output, "ScenarioMIP/IPSL/IPSL-CM6A-LR")
    output_dir = str(tmpdir)

    caplog.clear()
    with caplog.at_level("INFO"):
        result_raw = runner.invoke(
            wrangle_netcdf_scm_ncs,
            [
                input_dir,
                output_dir,
                "test",
                "--drs",
                "CMIP6Output",
                "--number-workers",
                1,
            ],
        )
    assert result_raw.exit_code == 0

    expected_file_raw = join(
        output_dir,
        "CMIP6/ScenarioMIP/IPSL/IPSL-CM6A-LR/ssp126/r1i1p1f1/Lmon/cSoilFast/gr/v20190121/netcdf-scm_cSoilFast_Lmon_IPSL-CM6A-LR_ssp126_r1i1p1f1_gr_202501-204012.MAG",
    )

    res_raw = MAGICCData(expected_file_raw)
    res_raw_resampled = _get_expected_wrangled_ts(res_raw, out_format_mag)

    caplog.clear()
    with caplog.at_level("INFO"):
        result = runner.invoke(
            wrangle_netcdf_scm_ncs,
            [
                input_dir,
                output_dir,
                "test",
                "--drs",
                "CMIP6Output",
                "--out-format",
                out_format_mag,
                "--number-workers",
                1,
            ],
        )
    assert result.exit_code == 0

    expected_file = join(
        output_dir,
        "CMIP6/ScenarioMIP/IPSL/IPSL-CM6A-LR/ssp126/r1i1p1f1/Lmon/cSoilFast/gr/v20190121/netcdf-scm_cSoilFast_Lmon_IPSL-CM6A-LR_ssp126_r1i1p1f1_gr_2025-2040.MAG",
    )

    res = MAGICCData(expected_file)

    np.testing.assert_allclose(res_raw_resampled,
                               res.timeseries(),
                               rtol=2 * 1e-3)
    with open(expected_file) as f:
        content = f.read()

    assert ("THISFILE_TIMESERIESTYPE = '{}'".format(
        out_format_mag.replace("mag-files-", "").replace("-", "_").upper())
            in content)
예제 #19
0
def test_wrangling_units_specs_area_sum(tmpdir, test_cmip6_crunch_output,
                                        caplog):
    target_unit = "Gt / yr"
    target_units = pd.DataFrame([["fgco2", target_unit], ["tos", "K"]],
                                columns=["variable", "unit"])
    target_units_csv = join(tmpdir, "target_units.csv")
    target_units.to_csv(target_units_csv, index=False)

    runner = CliRunner()

    input_dir = join(test_cmip6_crunch_output, "CMIP/CCCma")
    output_dir = str(tmpdir)

    result_raw = runner.invoke(
        wrangle_netcdf_scm_ncs,
        [
            input_dir, output_dir, "test", "--drs", "CMIP6Output",
            "--number-workers", 1
        ],
    )

    expected_file = join(
        output_dir,
        "CMIP6/CMIP/CCCma/CanESM5/piControl/r1i1p1f1/Omon/fgco2/gn/v20190429/netcdf-scm_fgco2_Omon_CanESM5_piControl_r1i1p1f1_gn_600101-600103.MAG",
    )
    assert result_raw.exit_code == 0
    res_raw = MAGICCData(expected_file)

    caplog.clear()
    with caplog.at_level("INFO"):
        result = runner.invoke(
            wrangle_netcdf_scm_ncs,
            [
                input_dir,
                output_dir,
                "test",
                "--drs",
                "CMIP6Output",
                "--number-workers",
                1,
                "--target-units-specs",
                target_units_csv,
                "--force",
            ],
        )

    assert result.exit_code == 0
    assert (
        "Converting units of fgco2 from kg m^-2 s^-1 to {}".format(target_unit)
        in caplog.messages)
    res = MAGICCData(expected_file)

    assert sorted(res["region"].tolist()) == sorted(res_raw["region"].tolist())
    for region, df in res.timeseries().groupby("region"):
        for k, v in res.metadata.items():
            if "{} (".format(SCMCube._convert_region_to_area_key(region)) in k:
                unit = k.split("(")[-1].split(")")[0]
                assert unit == "m**2", "assumed unit for test has changed..."
                conv_factor = (float(v) * 10**-12 * 3.155695e07
                               )  # area x mass conv x time conv
                break

        np.testing.assert_allclose(df.values,
                                   res_raw.filter(region=region).values *
                                   conv_factor,
                                   rtol=1e-5)
예제 #20
0
def test_wrangling_units_specs(tmpdir, test_cmip6_crunch_output, target_unit,
                               conv_factor, caplog):
    target_units = pd.DataFrame([["fgco2", target_unit], ["tos", "K"]],
                                columns=["variable", "unit"])
    target_units_csv = join(tmpdir, "target_units.csv")
    target_units.to_csv(target_units_csv, index=False)

    runner = CliRunner()

    input_dir = join(test_cmip6_crunch_output, "CMIP/CCCma")
    output_dir = str(tmpdir)

    caplog.clear()
    with caplog.at_level("INFO"):
        result_raw = runner.invoke(
            wrangle_netcdf_scm_ncs,
            [
                input_dir,
                output_dir,
                "test",
                "--drs",
                "CMIP6Output",
                "--number-workers",
                1,
            ],
        )
    assert result_raw.exit_code == 0
    assert "Converting units" not in caplog.messages

    expected_file = join(
        output_dir,
        "CMIP6/CMIP/CCCma/CanESM5/piControl/r1i1p1f1/Omon/fgco2/gn/v20190429/netcdf-scm_fgco2_Omon_CanESM5_piControl_r1i1p1f1_gn_600101-600103.MAG",
    )

    res_raw = MAGICCData(expected_file)
    caplog.clear()
    with caplog.at_level("INFO"):
        result = runner.invoke(
            wrangle_netcdf_scm_ncs,
            [
                input_dir,
                output_dir,
                "test",
                "--drs",
                "CMIP6Output",
                "--number-workers",
                1,
                "--target-units-specs",
                target_units_csv,
                "--force",
            ],
        )
    assert result.exit_code == 0
    assert (
        "Converting units of fgco2 from kg m^-2 s^-1 to {}".format(target_unit)
        in caplog.messages)

    res = MAGICCData(expected_file)

    np.testing.assert_allclose(res_raw.timeseries() * conv_factor,
                               res.timeseries(),
                               rtol=1e-5)
예제 #21
0
def test_pymagicc_writing_has_an_effect(
    package,
    test_filename,
    relevant_config,
    outputs_to_check,
    time_check_min,
    time_check_max,
):
    if ("SCEN" in test_filename) and (package.version == 7):
        # special undocumented flags!!!
        relevant_config["fgas_adjstfutremis2past_0no1scale"] = 0
        relevant_config["mhalo_adjstfutremis2past_0no1scale"] = 0

    iter_dict = copy.deepcopy(relevant_config)
    for key, value in iter_dict.items():
        if value == "test_filename":
            relevant_config[key] = test_filename
        # Handle adjustment to `.prn` handling in MAGICC
        if key == "file_mhalo_emis" and package.version == 7:
            relevant_config["mhalo_prnfile_emis"] = relevant_config.pop(key)
            relevant_config["mhalo_take_prnfile"] = 1
        if key == "file_mhalo_conc" and package.version == 7:
            relevant_config["mhalo_prnfile_conc"] = relevant_config.pop(key)
            relevant_config["mhalo_take_prnfile"] = 1

    package.set_config(conflict="ignore", **relevant_config)

    if (package.version == 6) and test_filename.endswith("SCEN7"):
        error_msg = re.compile("MAGICC6 cannot run SCEN7 files")
        with pytest.raises(ValueError, match=error_msg):
            package.run(only=outputs_to_check)
        return

    if ("SRES" in test_filename) and (package.version == 7):
        # MAGICC7 cannot run SRES SCEN files
        with pytest.raises(CalledProcessError):
            package.run(only=outputs_to_check)
        return

    if ".prn" in test_filename and package.version == 7:
        # MAGICC7's prn handling is not working
        with pytest.raises(CalledProcessError):
            package.run(only=outputs_to_check)
        return

    initial_results = package.run(only=outputs_to_check)

    ttweak_factor = 0.9

    mdata = MAGICCData(
        join(package.run_dir, test_filename),
        columns={
            "model": ["unspecified"],
            "scenario": ["unspecified"]
        },
    )
    mdata._data *= ttweak_factor
    mdata.write(join(package.run_dir, test_filename), package.version)

    tweaked_results = package.run(only=outputs_to_check)

    for output_to_check in outputs_to_check:
        result = (tweaked_results.filter(
            variable=output_to_check,
            year=range(time_check_min,
                       time_check_max + 1)).timeseries().values)
        initial = (initial_results.filter(
            variable=output_to_check,
            year=range(time_check_min,
                       time_check_max + 1)).timeseries().values)
        expected = ttweak_factor * initial

        abstol = np.max([result, expected]) * 10**-3
        np.testing.assert_allclose(result, expected, rtol=1e-5, atol=abstol)
예제 #22
0
def test_co2_emms_other_rf_run(package, emms_co2_level):
    package.set_zero_config()

    df = zero_emissions.timeseries()

    time = zero_emissions["time"]
    emms_fossil_co2 = np.zeros(len(time))
    emms_fossil_co2[20:] = emms_co2_level

    df.loc[(
        df.index.get_level_values("variable") ==
        "Emissions|CO2|MAGICC Fossil and Industrial"), :, ] = emms_fossil_co2

    scen = MAGICCData(df)

    forcing_external = 2.0 * np.arange(0, len(time)) / len(time)
    forcing_ext = MAGICCData(
        forcing_external,
        columns={
            "index": time,
            "scenario": ["idealised"],
            "model": ["unspecified"],
            "climate_model": ["unspecified"],
            "variable": ["Radiative Forcing|Extra"],
            "unit": ["W / m^2"],
            "todo": ["SET"],
            "region": ["World"],
        },
    )
    forcing_ext.metadata = {
        "header": "External radiative forcing file for testing"
    }
    forcing_ext_filename = "EXTERNAL_RF.IN"
    forcing_ext.write(join(package.run_dir, forcing_ext_filename),
                      package.version)

    # TODO: fix endyear so it takes from scenario input by default
    results = package.run(
        scen,
        endyear=max(time).year,
        rf_extra_read=1,  # fix writing of 'True'
        file_extra_rf=forcing_ext_filename,
        rf_total_runmodus="all",
        rf_initialization_method="ZEROSTARTSHIFT",
        rf_total_constantafteryr=5000,
    )

    np.testing.assert_allclose(
        results.filter(variable="Em*CO2*Fossil*",
                       region="World").timeseries().values.squeeze(),
        emms_fossil_co2,
    )
    # CO2 temperature feedbacks mean that you get a CO2 outgassing, hence CO2 forcing. As a
    # result radiative forcing values don't match exactly. Numerical precision adds to this.
    ext_rf_output_vals = (results.filter(
        variable="Radiative Forcing",
        region="World").timeseries().values.squeeze())
    zero_rows = (forcing_external == 0) & (ext_rf_output_vals == 0)

    greater_equal_rows = ext_rf_output_vals >= forcing_external
    close_rows_denominator = forcing_external
    close_rows_denominator[zero_rows] = 10**-10  # avoid divide by zero
    close_rows = (np.abs(ext_rf_output_vals - forcing_external) /
                  close_rows_denominator <= 10**-3)
    matching_rows = greater_equal_rows | close_rows
    assert matching_rows.all()
예제 #23
0
)
rcp60 = read_scen_file(
    join(_magicc6_included_distribution_path, "RCP60.SCEN"),
    columns={
        "model": ["AIM"],
        "scenario": ["RCP60"]
    },
)
rcp85 = read_scen_file(
    join(_magicc6_included_distribution_path, "RCP85.SCEN"),
    columns={
        "model": ["MESSAGE"],
        "scenario": ["RCP85"]
    },
)

rcps = deepcopy(rcp26)
for rcp in [rcp45, rcp60, rcp85]:
    rcps = rcps.append(rcp)

zero_emissions = MAGICCData(
    join(dirname(abspath(__file__)), "RCP3PD_EMISSIONS.DAT"),
    columns={
        "scenario": ["idealised"],
        "model": ["unspecified"],
        "climate_model": ["unspecified"],
    },
).filter(region="World")

zero_emissions = zero_emissions * 0.0