Exemple #1
0
    def test_auto_export(self):
        cero = pd.DataFrame.from_dict({"A": [1], "B": [2], "C": [3]},
                                      orient='index',
                                      dtype=pd.np.float32)
        cero.sort_index(inplace=True)
        cero.columns = pd.DatetimeIndex(data=pd.to_datetime([2018], format="%Y"))

        fc = FromCERO(cfg.d_td + "test_procedure_autoexport.yaml")
        fc.exec_procedures(cero)

        df1 = pd.read_csv("auto_csv_export.csv", index_col=0)
        test_list = [1, 2, 3]
        df1_vals = [x[0] for x in df1.values.tolist()]
        self.assertTrue(all([np.isclose(x, y) for (x, y) in zip(test_list, df1_vals)]))
        test_list = ["A", "B", "C"]
        self.assertTrue(all([x == y for (x, y) in zip(test_list, df1.index.tolist())]))

        df1 = pd.read_excel("auto_xlsx_export.xlsx", index_col=0)
        test_list = [1, 2, 3]
        df1_vals = [x[0] for x in df1.values.tolist()]
        self.assertTrue(all([np.isclose(x, y) for (x, y) in zip(test_list, df1_vals)]))
        test_list = ["A", "B", "C"]
        self.assertTrue(all([x == y for (x, y) in zip(test_list, df1.index.tolist())]))

        os.remove("auto_csv_export.csv")
        os.remove("auto_xlsx_export.xlsx")
    def test_replace_har_header_in_file(self):

        dd = os.path.join(os.path.dirname(__file__), "data", "")
        shutil.copy2(dd + "test_Forenew7.shk", "test_Forenew7.shk")

        # CERO path
        df = DataTools.get_test_data(dd +
                                     "test_replace_har_header_in_file.pickle")
        fc = FromCERO(dd + "test_replace_har_header_in_file.yaml")
        fc.exec_procedures(df)

        test_har = DataTools.get_test_data("test_Forenew7.shk")
        tn = test_har.getHeaderArrayNames()

        gn = harpy.HarFileObj.loadFromDisk(
            filename="Forenew7.shk").getHeaderArrayNames()

        try:
            self.assertTrue(all([x == y for x, y in zip(tn, gn)]))
        except AssertionError as e:
            print("Test data headers: ", tn)
            print("Generated data headers: ", gn)
            raise e

        os.remove("test_Forenew7.shk")  # Tidy up
        os.remove("Forenew7.shk")  # Tidy up
Exemple #3
0
    def test_plotoutput(self):

        try:
            import seaborn
        except ImportError:
            raise unittest.SkipTest("PyQt4 not installed, and therefore ConCERO's plotting capabilities cannot be used.")

        nf = "AssociateProfessionals.png"

        # CERO path
        png = DataTools.get_test_data(TestPlotOutput._dd + "test_plotoutput.png")

        cero = CERO.read_xlsx(TestPlotOutput._dd + "test_plotoutput.xlsx")
        fc = FromCERO(TestPlotOutput._dd + "test_plotoutput.yaml")
        fc.exec_procedures(cero)
        plt = DataTools.get_test_data(nf)

        # These lines have been commented out because figures are very hard to compare accurately - defaults seem to \
        # differ depending on operating system.
        # try:
        #     self.assertEqual(plt, png)
        # except AssertionError as e:
        #     raise e

        # Tidy up
        os.remove(os.path.relpath(nf))
Exemple #4
0
    def test_load_set(self):

        set = ["A", "B", "C"]
        ret_set = FromCERO._load_set(set)
        test_set = ["A", "B", "C"]

        self.assertEqual(ret_set, test_set)

        set = ["A,D", "B,E", "C,F"]
        ret_set = FromCERO._load_set(set)
        test_set = [("A", "D"), ("B", "E"), ("C", "F")]

        self.assertEqual(ret_set, test_set)

        set = ["A", ("B", "C")]
        ret_set = FromCERO._load_set(set)
        test_set = ["A", ("B", "C")]

        self.assertEqual(ret_set, test_set)

        set = ["A", ("B", "C"), "A", ("B", "C")]
        ret_set = FromCERO._load_set(set)
        test_set = ["A", ("B", "C"), "A", ("B", "C")]

        self.assertEqual(ret_set, test_set)
    def test_cero2gallme(self):
        '''Tests CERO2GALLME conversion process. Tests procedure but not output to file.
        '''

        dd = os.path.dirname(__file__) + os.sep + "data" + os.sep

        df = DataTools.get_test_data(dd +
                                     r'test_cero_to_gallme_initialdata.pickle')

        c2g = FromCERO(dd + r'test_cero_to_gallme.yaml')
        c2g.exec_procedures(df)

        ser_a = df.loc[(("L_OUTPUT", "Electricity", "CAF"),),].iloc[0] + \
                df.loc[(("L_OUTPUT", "Electricity", "NAF"),),].iloc[0] + \
                df.loc[(("L_OUTPUT", "Electricity", "OSA"),),].iloc[0] + \
                df.loc[(("L_OUTPUT", "Electricity", "ZAF"),),].iloc[0]
        ser_b = df.loc[(("qo", "Electricity", "CAF"),),].iloc[0] + \
                df.loc[(("qo", "Electricity", "NAF"),),].iloc[0] + \
                df.loc[(("qo", "Electricity", "OSA"),),].iloc[0] + \
                df.loc[(("qo", "Electricity", "ZAF"),),].iloc[0]

        ser_b = ser_a * ser_b
        self.assertTrue(
            np.allclose(ser_b.values,
                        c2g.output_procedures['DemandYearly']["Value"].values))

        os.remove("gallme_input_data.gdx")  # Tidy up
Exemple #6
0
    def run(self) -> None:
        """
        Execute a scenario run.
        """

        self.cero = CERO.create_empty()

        ceros = [in_conf.create_cero() for in_conf in self["input_conf"]]
        if ceros:
            self.cero = CERO.combine_ceros(ceros)
            print("Successfully loaded scenario inputs as CERO.")

        FromCERO.dataframe_out(self.cero,
                               (self.get_name() + "_%03d_step_%02d.xlsx" %
                                (self["run_no"], 0)), "xlsx")

        for idx, model in enumerate(self["models"]):
            m_cero = model.run(self.cero)
            print(
                "Completed run of model (%s) at %s." %
                (model["name"], dt.datetime.now().strftime('%Y-%m-%d %H:%M')))

            # If ouput_conf is not defined for a model, then None is returned...
            if m_cero is None:
                continue

            if not CERO.is_cero(m_cero):
                raise TypeError(
                    "Object returned from model run is *not* of CERO format.")

            if model.get("export_mod_xlsx", self.get("export_mod_xlsx", True)):
                # By default, export model outputs automatically to xlsx files
                model_out_file = (self.get_name() + "_%03d_%s.xlsx" %
                                  (self["run_no"], model["name"]))
                print("Exporting output of %s to %s." %
                      (model["name"], model_out_file))
                m_cero.to_excel(model_out_file)

            self.cero = CERO.combine_ceros([self.cero, m_cero])

            if self.get("export_int_xlsx", True):
                # If true (default), export the intermediate steps to xlsx files
                isfn = (self.get_name() + "_%03d_step_%02d.xlsx" %
                        (self["run_no"], idx + 1))
                print("Exporting updated CERO to %s." % (isfn))
                self.cero.to_excel(isfn)

        for out_conf in self["output_conf"]:
            out_conf.exec_procedures(self.cero)

        else:
            print("Completed generation of scenario outputs.")
Exemple #7
0
    def is_valid(self, raise_exception=True):
        """
        Checks the validity of ``self`` as a ``Model`` object. Method does not ensure runtime issues will not occur.
        :param bool raise_exception:
        :return bool: Returns `True` if ``self`` is a valid ``Model``.
        """
        req_keys = ["name", "cmds", "input_conf", "output_conf"]

        if not all([k in self for k in req_keys]):

            msg = (
                "All models must have all of the keys: %s. Attempted to create model"
                + " with at least one of these keys missing.") % req_keys

            Model._logger.error(msg)
            if raise_exception:
                raise TypeError(msg)
            print(msg)
            return False

        for ic in self["input_conf"]:
            if not FromCERO.check_config(
                    ic, raise_exception=raise_exception, runtime=False):
                return False

        for oc in self["output_conf"]:
            if not ToCERO.check_config(
                    oc, raise_exception=raise_exception, runtime=False):
                return False

        return True
Exemple #8
0
    def test_local_libfuncs(self):

        shutil.copy2(TestFromCERO_Procedure._dd + "test_local_libfuncs.py", os.getcwd())

        cero = pd.DataFrame.from_dict({"A": [1], "B": [2], "C": [3]},
                                      orient='index',
                                      dtype=pd.np.float32)
        cero.sort_index(inplace=True)
        cero.columns = pd.DatetimeIndex(data=pd.to_datetime([2018], format="%Y"))

        test_df = pd.DataFrame.from_dict({"A": [2], "B": [4], "C": [6]},
                                      orient='index',
                                      dtype=pd.np.float32)
        test_df.sort_index(inplace=True)
        test_df.columns = pd.DatetimeIndex(data=pd.to_datetime([2018], format="%Y"))

        proc = FromCERO._Procedure({"libfuncs": "test_local_libfuncs.py",
                             "ref_dir": ".",
                             "name": "test_set",
                             "inputs": ["A", "B", "C"],
                             "operations": [{"func": "test_local_recursive_op"}],
                             "file": "test_local_libfuncs.csv"})

        proc.exec_ops(cero)

        tc = ToCERO({"files": [{"file": os.path.join(os.path.abspath("."), "test_local_libfuncs.csv")}]})
        df = tc.create_cero()

        self.assertTrue(df.equals(test_df))

        os.remove("test_local_libfuncs.py")
        os.remove("test_local_libfuncs.csv")
Exemple #9
0
    def is_valid(self, raise_exception=True) -> bool:
        """ Performs static checks on ``self`` to ensure it is a valid Scenario object."""

        req_keys = ["name", "models", "input_conf", "output_conf"]

        if not all([k in self.keys() for k in req_keys]):
            raise TypeError(
                ("Not all required key-value pairs have been defined. " +
                 "It is necessary to define all of %s.") % req_keys)

        if not isinstance(self["models"], list):
            raise TypeError(
                "Scenario property \'models\' must be defined as a list.")

        for model in self["models"]:
            if not issubclass(type(model), Model):
                raise TypeError("Object '%s' is of type '%s', not 'Model'." %
                                (model, type(model)))

            if not model.check_config(raise_exception=raise_exception,
                                      runtime=False):
                return False

        for ic in self["input_conf"]:
            if not ToCERO.check_config(
                    ic, raise_exception=raise_exception, runtime=False):
                return False

        for oc in self["output_conf"]:
            if not FromCERO.check_config(
                    oc, raise_exception=raise_exception, runtime=False):
                return False

        return True
Exemple #10
0
    def test_load_set_inputs(self):

        cero = pd.DataFrame.from_dict({"A": [1, 2, 3, 4, 5],
                                       "B": [6, 4, 5, 6, 7],
                                       "C": [4, 5, 8, 7, 8],
                                       "D": [9, 10, 12, 11, 2]},
                                      orient="index",
                                      dtype=pd.np.float32)

        cero.columns = pd.DatetimeIndex(pd.to_datetime([2017, 2018, 2019, 2020, 2021], format="%Y"))
        cero.sort_index(inplace=True)

        proc = FromCERO._Procedure({"name": "test_proc",
                             "sets": {"a_set": ["A", "B", "C", "D"]},
                             "inputs": ["a_set"],
                             "operations": [{"func": "noop",
                                             "arrays": ["a_set"]}],
                             "file": "test_load_set_inputs.csv",
                             })
        proc.exec_ops(cero)

        tc = ToCERO({"files": [{"file": os.path.join(os.path.abspath("."), "test_load_set_inputs.csv")}]})
        df = tc.create_cero()

        self.assertTrue(df.equals(cero))

        os.remove("test_load_set_inputs.csv")
Exemple #11
0
    def test_load2(self):

        cero = pd.DataFrame.from_dict({"A": [1], "B": [2], "C": [3], "D": [4], "E": [5], "F": [6], }, orient='index',
                                      dtype=pd.np.float32)
        cero.sort_index(inplace=True)
        cero.columns = pd.DatetimeIndex(data=pd.to_datetime([2018], format="%Y"))

        fc = FromCERO(cfg.d_td + "test_fromcero_procedureload2.yaml")
        self.assertEqual(fc["procedures"][0]["name"], "Unnamed_proc_0")
        fc.exec_procedures(cero)

        df1 = pd.read_csv("procedureload2.csv", index_col=0)
        test_list = [1, 2, 3]
        df1_vals = [x[0] for x in df1.values.tolist()]
        self.assertTrue(all([np.isclose(x, y) for (x, y) in zip(test_list, df1_vals)]))
        test_list = ["A", "B", "C"]
        self.assertTrue(all([x == y for (x, y) in zip(test_list, df1.index.tolist())]))

        os.remove("procedureload2.csv")
Exemple #12
0
    def test_exec_ops(self):

        cero = pd.DataFrame.from_dict({"A": [1], "B": [2], "C": [3]}, orient='index',
                                      dtype=pd.np.float32)
        cero.sort_index(inplace=True)
        cero.columns = pd.DatetimeIndex(data=pd.to_datetime([2018], format="%Y"))

        proc = FromCERO._Procedure({"name": "test_output_cero",
                                    "file": "test_output_cero5.csv",
                                    "inputs": ["A", "B", "C"],
                                    "operations": [{"func": "merge"}],
                                    "ref_dir": "."})
Exemple #13
0
    def test_sets_and_mapping(self):

        cero = pd.DataFrame.from_dict(
            {
                "A": [1],
                "B": [2],
                "C": [3],
                "D": [4],
                "E": [5],
                "F": [6],
            },
            orient='index',
            dtype=pd.np.float32)
        cero.sort_index(inplace=True)
        cero.columns = pd.DatetimeIndex(
            data=pd.to_datetime([2018], format="%Y"))
        self.assertTrue(CERO.is_cero(cero))

        fc = FromCERO(TestFromCERO._dd + "test_fromcero_mapping.yaml")
        fc.exec_procedures(cero)

        df1 = pd.read_csv("test_fromcero_mapping1.csv", index_col=0)
        test_list = [1, 2, 3]
        df1_vals = [x[0] for x in df1.values.tolist()]
        self.assertTrue(
            all([np.isclose(x, y) for (x, y) in zip(test_list, df1_vals)]))
        test_list = ["A", "B", "C"]
        self.assertTrue(
            all([x == y for (x, y) in zip(test_list, df1.index.tolist())]))

        df2 = pd.read_csv("test_fromcero_mapping2.csv", index_col=0)
        test_list = [4, 5, 6]
        df2_vals = [x[0] for x in df2.values.tolist()]
        self.assertTrue(all([x == y for (x, y) in zip(test_list, df2_vals)]))
        test_list = ["G", "H", "I"]
        self.assertTrue(
            all([x == y for (x, y) in zip(test_list, df2.index.tolist())]))

        os.remove("test_fromcero_mapping1.csv")
        os.remove("test_fromcero_mapping2.csv")
    def test_cero2har(self):

        output_har = r"test_har_out.har"
        shutil.copy2(TestCERO2HAR._dd + "Mdatnew7.har", output_har)

        # CERO path
        pickled_cero = TestCERO2HAR._dd + r'test_cero_to_har_initdata.pickle'

        df = DataTools.get_test_data(pickled_cero)
        h2c = FromCERO(TestCERO2HAR._dd + r'test_cero_to_har.yaml')
        h2c.exec_procedures(df)

        hf = harpy.HarFileObj.loadFromDisk(output_har)
        header = hf.getHeaderArrayObj(ha_name="MAR1")
        self.assertTrue(np.isclose(header["array"][0, 0, 0, 0, 0, 0], 2.44571))
        self.assertTrue(np.isclose(header["array"][0, 0, 0, 1, 0, 0],
                                   0.637938))
        self.assertTrue(np.isclose(header["array"][0, 0, 2, 0, 0, 0],
                                   0.381556))

        # Tidy up
        os.remove(output_har)
Exemple #15
0
    def test_cero2luto(self):
        '''Tests CERO2LUTO conversion process.
        '''

        dd = os.path.join(os.path.dirname(__file__), "data", "")

        df = DataTools.get_test_data(dd +
                                     "test_cero_to_luto_initialdata.pickle")
        c2l = FromCERO(dd + r'test_cero_to_luto.yaml')
        c2l.exec_procedures(df)

        for procedure in c2l["procedures"]:
            if isinstance(procedure, dict):
                gen_output_file = procedure.get('output_file',
                                                procedure.get('name'))
                output_file = procedure['name']
            elif isinstance(procedure, str):
                gen_output_file = procedure
                output_file = procedure
            gen = np.load(gen_output_file + '.npy')[0]
            old = np.load(dd + 'test_' + output_file + '.npy')
            self.assertTrue(all(np.isclose(gen, old)))
            os.remove(gen_output_file + '.npy')
Exemple #16
0
    def run_checks(self, raise_exception=True):
        """
        Performs runtime checks on ``self`` to ensure it is a valid Model object. Failure of runtime checks indicates that the model is not ready to run.

        :param bool raise_exception: If True, an exception is raised on check failure (as opposed to returning `False`).
        :return bool:
        """

        for ic in self["input_conf"]:
            if not FromCERO.check_config(
                    ic, raise_exception=raise_exception, runtime=True):
                return False

        return True
Exemple #17
0
    def test_cero2austimes(self):
        '''Tests CERO2AusTIMES conversion process.
        '''
        dd = os.path.dirname(__file__) + os.sep + "data" + os.sep

        df = DataTools.get_test_data(dd + r'test_cero_to_luto_initialdata.pickle') # Uses same data as LUTO
        c2a = FromCERO(df, conf_file=(dd + r'test_cero_to_austimes.yaml'))

        for series in c2a.series:
            if isinstance(series, dict):
                gen_output_file = series.get('output_file', series.get('name'))
                output_file = series['name']
            elif isinstance(series, str):
                gen_output_file = series
                output_file = series
            gen = np.load(gen_output_file + '.npy')
            old = np.load('test_' + output_file + '.npy')
            self.assertTrue(all(np.isclose(gen, old)))
Exemple #18
0
    def test_is_valid(self):

        proc = {"operations": "bad_ops_format"}

        with self.assertRaises(KeyError):
            FromCERO._Procedure.is_valid(proc)
        self.assertFalse(FromCERO._Procedure.is_valid(proc, raise_exception=False))

        proc = {"operations": "bad_ops_format", "name": "test_proc"}

        with self.assertRaises(TypeError):
            FromCERO._Procedure.is_valid(proc)
        self.assertFalse(FromCERO._Procedure.is_valid(proc, raise_exception=False))

        proc = {"operations": ["bad_op_type"], "name": "no_libfuncs_defined"}

        with self.assertRaises(TypeError):
            FromCERO._Procedure.is_valid(proc)
        self.assertFalse(FromCERO._Procedure.is_valid(proc, raise_exception=False))

        proc = {"operations": ["bad_op_type"], "name": "bad_libfuncs_type", "libfuncs": True}

        with self.assertRaises(TypeError):
            FromCERO._Procedure.is_valid(proc)
        self.assertFalse(FromCERO._Procedure.is_valid(proc, raise_exception=False))

        proc = {"operations": ["bad_op_type"], "name": "test_proc", "libfuncs": [libfuncs]}

        with self.assertRaises(TypeError):
            FromCERO._Procedure.is_valid(proc)
        self.assertFalse(FromCERO._Procedure.is_valid(proc, raise_exception=False))

        proc = {"operations": [{"func": "replace_har_header_in_file"}], "name": "test_proc", "libfuncs": [libfuncs]}

        self.assertTrue(FromCERO._Procedure.is_valid(proc))

        # Test accidental passing of FromCERO object
        proc = {"procedures": [{"operations": [{"func": "replace_har_header_in_file"}], "name": "test_proc", "libfuncs": [libfuncs]}]}
        with self.assertRaises(FromCERO._Procedure.InvalidProcedure):
            FromCERO._Procedure.is_valid(FromCERO._Procedure(proc))
        self.assertFalse(FromCERO._Procedure.is_valid(proc, raise_exception=False))
Exemple #19
0
    def test_load_sets_from_file(self):
        cero = pd.DataFrame.from_dict({"A": [1], "B": [2], "C": [3]}, orient='index',
                                      dtype=pd.np.float32)
        cero.sort_index(inplace=True)
        cero.columns = pd.DatetimeIndex(data=pd.to_datetime([2018], format="%Y"))

        proc = FromCERO._Procedure({"sets": {"a_set": cfg.d_td + "test_set.yaml"},
                                    "ref_dir": ".",
                                    "name": "test_set",
                                    "inputs": ["a_set"],
                                    "file": "test_sets.csv"})
        proc.exec_ops(cero)
        new_df = pd.read_csv("test_sets.csv", index_col=0)

        test_labels = ["A", "B"]
        test_vals = [[1], [2]]

        self.assertTrue(new_df.index.tolist() == test_labels)
        self.assertTrue(new_df.values.tolist() == test_vals)

        os.remove("test_sets.csv")
Exemple #20
0
    def test_output_cero(self):
        """
        Tests the behaviour of the "outputs" argument is correct.
        """

        cero = pd.DataFrame.from_dict({"A": [1], "B": [2], "C": [3]}, orient='index',
                                      dtype=pd.np.float32)
        cero.sort_index(inplace=True)
        cero.columns = pd.DatetimeIndex(data=pd.to_datetime([2018], format="%Y"))

        proc = FromCERO._Procedure({"name": "test_output_cero",
                                    "file": "test_output_cero.csv",
                                    "inputs": ["A", "B", "C"],
                                    "ref_dir": ".",
                                    "outputs": ["A"]})

        """Because single item in outputs, error may be raised (but shouldn't) on attempting to export a Pandas.Series object instead of a Pandas.DataFrame object."""
        proc.exec_ops(cero)

        tc = ToCERO({"files": [{"file": os.path.join(os.path.abspath("."),"test_output_cero.csv")}]})
        df = tc.create_cero()

        self.assertTrue(cero.loc[["A"]].equals(df))

        # Another test...
        proc = FromCERO._Procedure({"name": "test_output_cero",
                                    "file": "test_output_cero2.csv",
                                    "inputs": ["A", "B", "C"],
                                    "ref_dir": ".",
                                    "outputs": True})

        """Because single item in outputs, error may be raised (but shouldn't) on attempting to export a Pandas.Series object instead of a Pandas.DataFrame object."""
        proc.exec_ops(cero)
        tc = ToCERO({"files": [{"file": os.path.join(os.path.abspath("."), "test_output_cero2.csv")}]})
        df = tc.create_cero()
        self.assertTrue(cero.equals(df))

        # Another test...
        proc = FromCERO._Procedure({"name": "test_output_cero",
                                    "file": "test_output_cero3.csv",
                                    "inputs": ["A", "B", "C"],
                                    "ref_dir": ".",
                                    "outputs": None})

        """Because single item in outputs, error may be raised (but shouldn't) on attempting to export a Pandas.Series object instead of a Pandas.DataFrame object."""
        proc.exec_ops(cero)
        self.assertFalse(os.path.isfile("test_output_cero3.csv"))

        # Another test...
        proc = FromCERO._Procedure({"name": "test_output_cero",
                                    "file": "test_output_cero4.csv",
                                    "inputs": ["A", "B", "C"],
                                    "ref_dir": ".",
                                    "outputs": False})

        """Because single item in outputs, error may be raised (but shouldn't) on attempting to export a Pandas.Series object instead of a Pandas.DataFrame object."""
        proc.exec_ops(cero)
        self.assertFalse(os.path.isfile("test_output_cero4.csv"))


        # Another test...
        proc = FromCERO._Procedure({"name": "test_output_cero",
                                    "file": "test_output_cero5.csv",
                                    "inputs": ["A", "B", "C"],
                                    "ref_dir": "."})

        """Because single item in outputs, error may be raised (but shouldn't) on attempting to export a Pandas.Series object instead of a Pandas.DataFrame object."""
        proc.exec_ops(cero)
        tc = ToCERO({"files": [{"file": os.path.join(os.path.abspath("."), "test_output_cero2.csv")}]})
        df = tc.create_cero()
        self.assertTrue(cero.equals(df))

        os.remove("test_output_cero.csv")
        os.remove("test_output_cero2.csv")
        os.remove("test_output_cero5.csv")
Exemple #21
0
    def __init__(self, model: dict, *args, parent: "Model" = None, **kwargs):
        """

        :param model: A `dict` containing ``Model`` options.
        :param args: Passed to superclass (`dict`) at initialisation.
        :param "Model" parent: If provided, inherits all key-value pairs from ``parent``.
        :param kwargs: Passed to superclass (`dict`) at initialisation.
        """

        defaults = {
            "name": "default_model_name",
            "cmds": [],
            "input_conf": [],
            "output_conf": [],
            "search_paths": [],
            "wd": None
        }
        defaults.update(model)

        if parent is None:
            parent = {}

        defaults.update(parent)

        super().__init__(defaults, *args, **kwargs)

        if self["name"] == "default_model_name":
            Model._logger.warning(
                "Model not named - default name '%s' assigned." % self["name"])

        # Command string processing
        if isinstance(self["cmds"], str):
            self["cmds"] = [self["cmds"]]

        if not self["cmds"]:
            Model._logger.info("No commands specified for model '%s'." %
                               defaults["name"])

        if not self["search_paths"]:
            self["search_paths"].append(os.path.abspath("."))

        if isinstance(self["input_conf"], str):
            self["input_conf"] = [self["input_conf"]]
        if isinstance(self["output_conf"], str):
            self["output_conf"] = [self["output_conf"]]

        # Locate and load configuration files...
        for idx, input_conf in enumerate(self["input_conf"]):
            self["input_conf"][idx] = self.find_file(input_conf)
            par_dict = {
                "ref_dir":
                os.path.abspath(os.path.dirname(self["input_conf"][idx]))
            }
            self["input_conf"][idx] = FromCERO(self["input_conf"][idx],
                                               parent=par_dict)

        # Locate and load configuration files...
        for idx, output_conf in enumerate(self["output_conf"]):
            self["output_conf"][idx] = self.find_file(output_conf)
            par_dict = {
                "search_paths":
                os.path.abspath(os.path.dirname(self["output_conf"][idx]))
            }
            self["output_conf"][idx] = ToCERO(self["output_conf"][idx],
                                              parent=par_dict)
Exemple #22
0
    def __init__(self, sc_def: dict, *args, parent: dict = None, **kwargs):
        """
        :param sc_def: A scenario definition object.
        :param args: Passed to the superclass (dict) as positional arguments at initialisation.
        :param kwargs: Passed to the superclass (dict) as keyword arguments at initialisation.
        """

        defaults = {
            "name": None,
            "run_no": None,
            "search_paths": [],
            "ref_dir": None,
            "models": [],
            "input_conf": [],
            "output_conf": []
        }

        if parent is None:
            parent = {}

        defaults.update(parent)

        try:
            assert isinstance(sc_def, dict)
        except AssertionError:
            raise TypeError(
                "Scenario definition provided in incorrect format - type %s instead of dict."
                % type(sc_def))

        defaults.update(sc_def)

        sc_def = defaults
        super().__init__(sc_def, *args, **kwargs)

        if not self.get("name"):
            self["name"] = "scenario_unnamed"
            self._logger.warn(
                "Scenario name has not been specified - scenario named '%s'." %
                self["name"])

        if not issubclass(type(self.get("run_no")), int):
            self["run_no"] = 1
            self._logger.info(
                "Scenario run_no (run number) has not been specified (or is not of integer type) - defaults to %s."
                % self["run_no"])

        if isinstance(self["search_paths"], str):
            self["search_paths"] = [os.path.abspath(self["search_paths"])]
        elif not self["search_paths"]:
            self["search_paths"].append(os.path.abspath("."))

        if self["ref_dir"] is None:
            self["ref_dir"] = os.path.abspath(".")

        model_parent = {
            "search_paths": self["search_paths"],
            "ref_dir": self["ref_dir"]
        }
        self["models"] = [
            Model(m, parent=model_parent) for m in self.get("models")
        ]

        if isinstance(self["input_conf"], str):
            self["input_conf"] = [self["input_conf"]]
        if isinstance(self["output_conf"], str):
            self["output_conf"] = [self["output_conf"]]

        # Load ToCERO conf
        par_dict = {"search_paths": self["search_paths"]}
        for idx, ic in enumerate(self["input_conf"]):
            self["input_conf"][idx] = self.find_file(ic)
            self["input_conf"][idx] = ToCERO(self["input_conf"][idx],
                                             parent=par_dict)

        # Load FromCERO conf
        par_dict = {"ref_dir": self["ref_dir"]}
        for idx, oc in enumerate(self["output_conf"]):
            self["output_conf"][idx] = self.find_file(oc)
            self["output_conf"][idx] = FromCERO(self["output_conf"][idx],
                                                parent=par_dict)

        self.is_valid()  # Check Scenario is valid
Exemple #23
0
    def test_export_to_gdx(self):

        import gdxpds

        cero = pd.DataFrame.from_dict({"A": [1], "B": [2], "C": [3], "D": [4], "E": [5], "F": [6], }, orient='index',
                                      dtype=pd.np.float32)
        cero.sort_index(inplace=True)
        cero.columns = pd.DatetimeIndex(data=pd.to_datetime([2018], format="%Y"))

        proc = FromCERO._Procedure({"file": "gdx_export.gdx",
                                    "output_kwargs": {"id": "test_gdx"}})
        proc.exec_ops(cero)

        # Read gdx
        dfs = gdxpds.to_dataframes("gdx_export.gdx")

        self.assertEqual(len(dfs), 2)
        self.assertTrue("test_gdx" in dfs)
        df1 = dfs["test_gdx"]
        df1.columns = ["Col1", "Values"]
        df1.set_index("Col1", inplace=True)

        test_list = [1, 2, 3, 4, 5, 6]
        df1_vals = df1.values.tolist()
        self.assertTrue(all([np.isclose(x, y) for (x, y) in zip(test_list, df1_vals)]))
        test_list = ["A", "B", "C", "D", "E", "F"]
        self.assertTrue(all([x == y for (x, y) in zip(test_list, df1.index.tolist())]))

        os.remove("gdx_export.gdx")

        # Test 2

        # Setup test dataframe
        df = pd.DataFrame(data=[[1, 2, 3],
                                [6, 4, 5],
                                [4, 5, 8],
                                [9, 10, 12]], dtype=pd.np.float32)
        df.columns = pd.DatetimeIndex(pd.to_datetime([2017, 2018, 2019], format="%Y"))
        df.index = pd.Index([("a_redundant_identifier", "solar"),
                             ("a_redundant_identifier", "wind"),
                             ("a_redundant_identifier", "oil"),
                             ("a_redundant_identifier", "gas")], tupleize_cols=False)
        df.sort_index(inplace=True)

        # Export dataframe
        proc = FromCERO._Procedure({"file": "gdx_file.gdx",
                                    "output_kwargs": {"id": "fuel_export",
                                                      "index_col": 1}})
        proc.exec_ops(df)

        # Read in created file
        dfs = gdxpds.to_dataframes("gdx_file.gdx")

        # Disect the created file
        self.assertEqual(len(dfs), 3) # One more dimension than previous test, given variability by year
        self.assertTrue("fuel_export" in dfs)
        df1 = dfs["fuel_export"]
        df1.columns = ["Col1", "Year", "Values"]
        df1.set_index(["Col1"], inplace=True)
        df1 = df1.pivot(columns="Year", values="Values")
        df1 = df1.astype(int)

        # Perform tests...

        test_list = [[9, 10, 12], [4, 5, 8], [1, 2, 3], [6, 4, 5]]
        df1_vals = df1.values.tolist()
        self.assertTrue(test_list == df1_vals)

        test_list = ["gas", "oil", "solar", "wind"]
        self.assertTrue(test_list == df1.index.tolist())

        os.remove("gdx_file.gdx")
Exemple #24
0
    def test_rename(self):

        cero = pd.DataFrame.from_dict({"A": [1, 2, 3, 4, 5],
                                       "B": [6, 4, 5, 6, 7],
                                       "C": [4, 5, 8, 7, 8],
                                       "D": [9, 10, 12, 11, 2]},
                                      orient="index",
                                      dtype=pd.np.float32)

        cero.columns = pd.DatetimeIndex(pd.to_datetime([2017, 2018, 2019, 2020, 2021], format="%Y"))
        cero.sort_index(inplace=True)

        proc = FromCERO._Procedure({"name": "test_proc",
                                    "inputs": ["A", "B", "C", "D"]})
        proc.inputs = cero.copy()
        df = proc._exec_op({"func": "noop"})

        self.assertTrue(df.equals(cero))

        test_df = pd.DataFrame.from_dict({"Z": [6, 4, 5, 6, 7]},
                                            orient="index",
                                            dtype=pd.np.float32)
        test_df.columns = pd.DatetimeIndex(pd.to_datetime([2017, 2018, 2019, 2020, 2021], format="%Y"))
        test_df.sort_index(inplace=True)

        proc = FromCERO._Procedure({"name": "test_proc",
                                    "inputs": ["B"]})
        proc._set_inputs(cero)
        df_new = proc._exec_op({"func": "noop", "rename": "Z"})

        self.assertTrue(df.equals(cero))  # Check cero hasn't been modified
        self.assertTrue(df_new.equals(test_df))

        # Another test...
        test_df = pd.DataFrame.from_dict({"Z": [6, 4, 5, 6, 7],
                                          "Y": [4, 5, 8, 7, 8]},
                                         orient="index",
                                         dtype=pd.np.float32)
        test_df.columns = pd.DatetimeIndex(pd.to_datetime([2017, 2018, 2019, 2020, 2021], format="%Y"))
        test_df.sort_index(inplace=True)

        proc = FromCERO._Procedure({"name": "test_proc",
                                    "inputs": ["B", "C"]})
        proc._set_inputs(cero)
        df_new = proc._exec_op({"func": "noop", "rename": ["Z", "Y"]})

        self.assertTrue(df.equals(cero))  # Check cero hasn't been modified
        self.assertTrue(df_new.equals(test_df.loc[df_new.index.tolist()]))

        # Another test...
        test_df = pd.DataFrame.from_dict({"X": [6, 4, 5, 6, 7],
                                          "Z": [4, 5, 8, 7, 8]},
                                         orient="index",
                                         dtype=pd.np.float32)
        test_df.columns = pd.DatetimeIndex(pd.to_datetime([2017, 2018, 2019, 2020, 2021], format="%Y"))
        test_df.sort_index(inplace=True)

        proc = FromCERO._Procedure({"name": "test_proc",
                                    "inputs": ["B", "C"]})
        proc._set_inputs(cero)
        df_new = proc._exec_op({"func": "noop", "rename": {"C":"Z", "B":"X"}})

        self.assertTrue(df.equals(cero))  # Check cero hasn't been modified
        self.assertTrue(df_new.equals(test_df.loc[df_new.index.tolist()]))
Exemple #25
0
    def test_stitch_time(self):

        init = pd.DataFrame.from_dict({"A": [1], "B": [2], "C": [3],
                                       }, orient='index',
                                      dtype=pd.np.float32)
        init.sort_index(inplace=True)
        init.columns = pd.DatetimeIndex(data=pd.to_datetime([2018], format="%Y"))

        cero = pd.DataFrame.from_dict({"D": [100, 200], "E": [50, 0], "F": [-50, 200]},
                                      orient='index',
                                      dtype=pd.np.float32)
        cero.sort_index(inplace=True)
        cero.columns = pd.DatetimeIndex(data=pd.to_datetime([2019, 2020], format="%Y"))

        cero = CERO.combine_ceros([init, cero])

        test_df = pd.DataFrame.from_dict({"A": [1, 2, 6], "B": [2, 3, 3], "C": [3, 1.5, 4.5],
                                          "D": [pd.np.nan, 100, 200], "E": [pd.np.nan, 50, 0], "F": [pd.np.nan, -50, 200]
                                          },
                                      orient='index',
                                      dtype=pd.np.float32)
        test_df.sort_index(inplace=True)
        test_df.columns = pd.DatetimeIndex(data=pd.to_datetime([2018, 2019, 2020], format="%Y"))

        proc = FromCERO._Procedure({"name": "test_stitch_time",
                                    "file": "test_stitch_time.csv",
                                    "sets": {"a_set": ["A", "B", "C"],
                                             "b_set": ["D", "E", "F"]},
                                    "inputs": ["a_set", "b_set"],
                                    "operations": [{"func": "noop",
                                                    "rename": {"b_set": "a_set"}},
                                                   {"func": "pc_change",
                                                    "arrays": ["a_set"],
                                                    "init_cols": [2018]}],
                                    "ref_dir": "."})
        proc.exec_ops(cero)

        tc = ToCERO({"files": [{"file": os.path.join(os.path.abspath("."), "test_stitch_time.csv")}]})
        df = tc.create_cero()

        self.assertTrue(df.equals(test_df))

        os.remove("test_stitch_time.csv")

        proc = FromCERO._Procedure({"name": "test_stitch_time",
                                    "file": "test_stitch_time2.csv",
                                    "sets": {"a_set": ["A", "B", "C"],
                                             "b_set": ["D", "E", "F"]},
                                    "inputs": ["a_set", "b_set"],
                                    "operations": [{"func": "noop",
                                                    "rename": {"b_set": "a_set"}},
                                                   {"func": "pc_change",
                                                    "arrays": ["a_set"],
                                                    "init_cols": 2018}],
                                    "ref_dir": "."})
        proc.exec_ops(cero)

        tc = ToCERO({"files": [{"file": os.path.join(os.path.abspath("."), "test_stitch_time2.csv")}]})
        df = tc.create_cero()

        self.assertTrue(df.equals(test_df))

        os.remove("test_stitch_time2.csv")

        out_file = "test_stitch_time3.csv"
        proc = FromCERO._Procedure({"name": "test_stitch_time",
                                    "file": out_file,
                                    "sets": {"a_set": ["A", "B", "C"],
                                             "b_set": ["D", "E", "F"]},
                                    "inputs": ["a_set", "b_set"],
                                    "operations": [{"func": "noop",
                                                    "rename": {"b_set": "a_set"}},
                                                   {"func": "pc_change",
                                                    "arrays": ["a_set"],
                                                    "init_icols": 0}],
                                    "ref_dir": "."})
        proc.exec_ops(cero)

        tc = ToCERO({"files": [{"file": os.path.join(os.path.abspath("."), out_file)}]})
        df = tc.create_cero()

        self.assertTrue(df.equals(test_df))

        os.remove(out_file)

        out_file = "test_stitch_time4.csv"
        proc = FromCERO._Procedure({"name": "test_stitch_time",
                                    "file": out_file,
                                    "sets": {"a_set": ["A", "B", "C"],
                                             "b_set": ["D", "E", "F"]},
                                    "inputs": ["a_set", "b_set"],
                                    "operations": [{"func": "noop",
                                                    "rename": {"b_set": "a_set"}},
                                                   {"func": "pc_change",
                                                    "arrays": ["a_set"],
                                                    "init_icols": [0]}],
                                    "ref_dir": "."})
        proc.exec_ops(cero)

        tc = ToCERO({"files": [{"file": os.path.join(os.path.abspath("."), out_file)}]})
        df = tc.create_cero()

        self.assertTrue(df.equals(test_df))

        os.remove(out_file)

        out_file = "test_stitch_time5.csv"
        proc = FromCERO._Procedure({"name": "test_stitch_time",
                                    "file": out_file,
                                    "sets": {"a_set": ["A", "B", "C"],
                                             "b_set": ["D", "E", "F"]},
                                    "inputs": ["a_set", "b_set"],
                                    "operations": [{"func": "noop",
                                                    "rename": {"b_set": "a_set"}},
                                                   {"func": "pc_change",
                                                    "arrays": ["a_set"],
                                                    "init_icols": [-3]}],
                                    "ref_dir": "."})
        proc.exec_ops(cero)

        tc = ToCERO({"files": [{"file": os.path.join(os.path.abspath("."), out_file)}]})
        df = tc.create_cero()

        self.assertTrue(df.equals(test_df))

        os.remove(out_file)
Exemple #26
0
    def test_sets_and_mapping2(self):

        cero = pd.DataFrame.from_dict(
            {
                ("A", "1"): [1],
                ("A", "2"): [2],
                ("A", "3"): [3],
                ("B", "1"): [4],
                ("B", "2"): [5],
                ("B", "3"): [6],
                ("C", "1"): [7],
                ("C", "2"): [8],
                ("C", "3"): [9],
            },
            orient='index',
            dtype=pd.np.float32)
        cero.sort_index(inplace=True)
        cero.columns = pd.DatetimeIndex(
            data=pd.to_datetime([2018], format="%Y"))
        self.assertTrue(CERO.is_cero(cero))

        fc = FromCERO(TestFromCERO._dd + "test_fromcero_mapping2.yaml")
        fc.exec_procedures(cero)

        tc = ToCERO({
            "files": [{
                "file": "test_fromcero_complexmapping1.xlsx",
                "sheet": "CERO",
                "index_col": [0, 1]
            }]
        })
        df1 = tc.create_cero()
        test_list = list(range(1, 10))
        df1_vals = [x[0] for x in df1.values.tolist()]
        self.assertTrue(
            all([np.isclose(x, y) for (x, y) in zip(test_list, df1_vals)]))
        test_list = [("G", "1"), ("G", "2"), ("G", "3"), ("H", "1"),
                     ("H", "2"), ("H", "3"), ("I", "1"), ("I", "2"),
                     ("I", "3")]
        self.assertTrue(
            all([x == y for (x, y) in zip(test_list, df1.index.tolist())]))

        tc = ToCERO({
            "files": [{
                "file": "test_fromcero_complexmapping2.xlsx",
                "sheet": "CERO",
                "index_col": [0, 1]
            }]
        })
        df1 = tc.create_cero()
        test_list = list(range(1, 10))
        df1_vals = [x[0] for x in df1.values.tolist()]
        self.assertTrue(
            all([np.isclose(x, y) for (x, y) in zip(test_list, df1_vals)]))
        test_list = [("A", "G"), ("A", "H"), ("A", "I"), ("B", "G"),
                     ("B", "H"), ("B", "I"), ("C", "G"), ("C", "H"),
                     ("C", "I")]
        self.assertTrue(
            all([x == y for (x, y) in zip(test_list, df1.index.tolist())]))

        os.remove("test_fromcero_complexmapping1.xlsx")
        os.remove("test_fromcero_complexmapping2.xlsx")