Esempio n. 1
0
    def check_validity(self, backend):
        super(GridStateFromFileWithForecasts, self).check_validity(backend)
        at_least_one = False

        if self.load_p_forecast is not None:
            if self.load_p_forecast.shape[1] != backend.n_load:
                raise IncorrectNumberOfLoads("for the active part. It should be {} but is in fact {}"
                                             "".format(backend.n_load, len(self.load_p)))
            at_least_one = True

        if self.load_q_forecast is not None:
            if self.load_q_forecast.shape[1] != backend.n_load:
                raise IncorrectNumberOfLoads("for the reactive part. It should be {} but is in fact {}"
                                             "".format(backend.n_load, len(self.load_q)))
            at_least_one = True

        if self.prod_p_forecast is not None:
            if self.prod_p_forecast.shape[1] != backend.n_gen:
                raise IncorrectNumberOfGenerators("for the active part. It should be {} but is in fact {}"
                                                  "".format(backend.n_gen, len(self.prod_p)))
            at_least_one = True

        if self.prod_v_forecast is not None:
            if self.prod_v_forecast.shape[1] != backend.n_gen:
                raise IncorrectNumberOfGenerators("for the voltage part. It should be {} but is in fact {}"
                                                  "".format(backend.n_gen, len(self.prod_v)))
            at_least_one = True

        if self.maintenance_forecast is not None:
            if self.maintenance_forecast.shape[1] != backend.n_line:
                raise IncorrectNumberOfLines("for the _maintenance. It should be {} but is in fact {}"
                                             "".format(backend.n_line, len(self.maintenance)))
            at_least_one = True

        if not at_least_one:
            raise ChronicsError("You used a class that read forecasted data, yet there is no forecasted data in"
                                "\"{}\". Please fall back to using class \"GridStateFromFile\" instead of "
                                "\"{}\"".format(self.path, type(self)))

        for name_arr, arr in zip(["load_q", "load_p", "prod_v", "prod_p", "maintenance"],
                                 [self.load_q_forecast, self.load_p_forecast, self.prod_v_forecast,
                                  self.prod_p_forecast, self.maintenance_forecast]):
            if arr is not None:
                if self.chunk_size is None:
                    if arr.shape[0] < self.n_:
                        raise EnvError("Array for forecast {}_forecasted as not the same number of rows of load_p. "
                                       "The chronics cannot be loaded properly.".format(name_arr))
Esempio n. 2
0
    def _assert_correct(self, dict_convert, order_backend):
        len_backend = len(order_backend)
        len_dict_keys = len(dict_convert)
        vals = set(dict_convert.values())
        lend_dict_values = len(vals)

        if len_dict_keys != len_backend:
            err_msg = "Conversion mismatch between backend data {} elements and converter data {} (keys)"
            raise IncorrectNumberOfElements(
                err_msg.format(len_backend, len_dict_keys))
        if lend_dict_values != len_backend:
            err_msg = "Conversion mismatch between backend data {} elements and converter data {} (values)"
            raise IncorrectNumberOfElements(
                err_msg.format(len_backend, lend_dict_values))

        for el in order_backend:
            if not el in vals:
                raise ChronicsError(
                    "Impossible to find element \"{}\" in the original converter data"
                    .format(el))
Esempio n. 3
0
    def initialize(self, order_backend_loads, order_backend_prods, order_backend_lines, order_backend_subs,
                   names_chronics_to_backend=None):
        self.n_gen = len(order_backend_prods)
        self.n_load = len(order_backend_loads)
        self.n_line = len(order_backend_lines)

        self.names_chronics_to_backend = copy.deepcopy(names_chronics_to_backend)
        if self.names_chronics_to_backend is None:
            self.names_chronics_to_backend = {}
        if not "loads" in self.names_chronics_to_backend:
            self.names_chronics_to_backend["loads"] = {k: k for k in order_backend_loads}
        else:
            self._assert_correct(self.names_chronics_to_backend["loads"], order_backend_loads)
        if not "prods" in self.names_chronics_to_backend:
            self.names_chronics_to_backend["prods"] = {k: k for k in order_backend_prods}
        else:
            self._assert_correct(self.names_chronics_to_backend["prods"], order_backend_prods)
        if not "lines" in self.names_chronics_to_backend:
            self.names_chronics_to_backend["lines"] = {k: k for k in order_backend_lines}
        else:
            self._assert_correct(self.names_chronics_to_backend["lines"], order_backend_lines)
        if not "subs" in self.names_chronics_to_backend:
            self.names_chronics_to_backend["subs"] = {k: k for k in order_backend_subs}
        else:
            self._assert_correct(self.names_chronics_to_backend["subs"], order_backend_subs)

        # print(os.listdir(self.path))
        read_compressed = ".csv"
        if not os.path.exists(os.path.join(self.path, "_N_loads_p.csv")):
            # try to read compressed data
            if os.path.exists(os.path.join(self.path, "_N_loads_p.csv.bz2")):
                read_compressed = ".csv.bz2"
            elif os.path.exists(os.path.join(self.path, "_N_loads_p.zip")):
                read_compressed = ".zip"
            elif os.path.exists(os.path.join(self.path, "_N_loads_p.csv.gzip")):
                read_compressed = ".csv.gzip"
            elif os.path.exists(os.path.join(self.path, "_N_loads_p.csv.xz")):
                read_compressed = ".csv.xz"
            else:
                raise RuntimeError(
                    "GridStateFromFile: unable to locate the data files that should be at \"{}\"".format(self.path))
        load_p = pd.read_csv(os.path.join(self.path, "_N_loads_p{}".format(read_compressed)), sep=self.sep)
        load_q = pd.read_csv(os.path.join(self.path, "_N_loads_q{}".format(read_compressed)), sep=self.sep)
        prod_p = pd.read_csv(os.path.join(self.path, "_N_prods_p{}".format(read_compressed)), sep=self.sep)
        prod_v = pd.read_csv(os.path.join(self.path, "_N_prods_v{}".format(read_compressed)), sep=self.sep)
        hazards = pd.read_csv(os.path.join(self.path, "hazards{}".format(read_compressed)), sep=self.sep)
        maintenance = pd.read_csv(os.path.join(self.path, "maintenance{}".format(read_compressed)), sep=self.sep)

        order_backend_loads = {el: i for i, el in enumerate(order_backend_loads)}
        order_backend_prods = {el: i for i, el in enumerate(order_backend_prods)}
        order_backend_lines = {el: i for i, el in enumerate(order_backend_lines)}

        order_chronics_load_p = np.array([order_backend_loads[self.names_chronics_to_backend["loads"][el]]
                                          for el in load_p.columns]).astype(dt_int)
        order_backend_load_q = np.array([order_backend_loads[self.names_chronics_to_backend["loads"][el]]
                                         for el in load_q.columns]).astype(dt_int)
        order_backend_prod_p = np.array([order_backend_prods[self.names_chronics_to_backend["prods"][el]]
                                         for el in prod_p.columns]).astype(dt_int)
        order_backend_prod_v = np.array([order_backend_prods[self.names_chronics_to_backend["prods"][el]]
                                         for el in prod_v.columns]).astype(dt_int)
        order_backend_hazards = np.array([order_backend_lines[self.names_chronics_to_backend["lines"][el]]
                                          for el in hazards.columns]).astype(dt_int)
        order_backend_maintenance = np.array([order_backend_lines[self.names_chronics_to_backend["lines"][el]]
                                              for el in maintenance.columns]).astype(dt_int)

        self.load_p = copy.deepcopy(load_p.values[:, np.argsort(order_chronics_load_p)])
        self.load_q = copy.deepcopy(load_q.values[:, np.argsort(order_backend_load_q)])
        self.prod_p = copy.deepcopy(prod_p.values[:, np.argsort(order_backend_prod_p)])
        self.prod_v = copy.deepcopy(prod_v.values[:, np.argsort(order_backend_prod_v)])
        self.hazards = copy.deepcopy(hazards.values[:, np.argsort(order_backend_hazards)])
        self.maintenance = copy.deepcopy(maintenance.values[:, np.argsort(order_backend_maintenance)])

        # date and time
        datetimes_ = pd.read_csv(os.path.join(self.path, "_N_datetimes{}".format(read_compressed)), sep=self.sep)
        self.start_datetime = datetime.strptime(datetimes_.iloc[0, 0], "%Y-%b-%d")

        # there are maintenance and hazards only if the value in the file is not 0.
        self.maintenance = self.maintenance != 0.
        self.hazards = self.hazards != 0.

        self.curr_iter = 0
        if self.max_iter == -1:
            # if the number of maximum time step is not set yet, we set it to be the number of
            # data in the chronics (number of rows of the files) -1.
            # the -1 is present because the initial grid state doesn't count as a "time step" but is read
            # from these data.
            self.max_iter = self.load_p.shape[0]-1

        load_p = pd.read_csv(os.path.join(self.path, "_N_loads_p_planned{}".format(read_compressed)), sep=self.sep)
        load_q = pd.read_csv(os.path.join(self.path, "_N_loads_q_planned{}".format(read_compressed)), sep=self.sep)
        prod_p = pd.read_csv(os.path.join(self.path, "_N_prods_p_planned{}".format(read_compressed)), sep=self.sep)
        prod_v = pd.read_csv(os.path.join(self.path, "_N_prods_v_planned{}".format(read_compressed)), sep=self.sep)
        maintenance = pd.read_csv(os.path.join(self.path, "maintenance{}".format(read_compressed)),
                                  sep=self.sep)

        order_backend_loads = {el: i for i, el in enumerate(order_backend_loads)}
        order_backend_prods = {el: i for i, el in enumerate(order_backend_prods)}
        order_backend_lines = {el: i for i, el in enumerate(order_backend_lines)}

        order_chronics_load_p = np.array([order_backend_loads[self.names_chronics_to_backend["loads"][el]]
                                          for el in load_p.columns]).astype(dt_int)
        order_backend_load_q = np.array([order_backend_loads[self.names_chronics_to_backend["loads"][el]]
                                         for el in load_q.columns]).astype(dt_int)
        order_backend_prod_p = np.array([order_backend_prods[self.names_chronics_to_backend["prods"][el]]
                                         for el in prod_p.columns]).astype(dt_int)
        order_backend_prod_v = np.array([order_backend_prods[self.names_chronics_to_backend["prods"][el]]
                                         for el in prod_v.columns]).astype(dt_int)
        order_backend_maintenance = np.array([order_backend_lines[self.names_chronics_to_backend["lines"][el]]
                                              for el in maintenance.columns]).astype(dt_int)

        self.load_p_forecast = copy.deepcopy(load_p.values[:, np.argsort(order_chronics_load_p)])
        self.load_q_forecast = copy.deepcopy(load_q.values[:, np.argsort(order_backend_load_q)])
        self.prod_p_forecast = copy.deepcopy(prod_p.values[:, np.argsort(order_backend_prod_p)])
        self.prod_v_forecast = copy.deepcopy(prod_v.values[:, np.argsort(order_backend_prod_v)])
        self.maintenance_forecast = copy.deepcopy(maintenance.values[:, np.argsort(order_backend_maintenance)])

        # there are maintenance and hazards only if the value in the file is not 0.
        self.maintenance_time = np.zeros(shape=(self.load_p.shape[0], self.n_line), dtype=dt_int) - 1
        self.maintenance_duration = np.zeros(shape=(self.load_p.shape[0], self.n_line), dtype=dt_int)
        self.hazard_duration = np.zeros(shape=(self.load_p.shape[0], self.n_line), dtype=dt_int)
        for line_id in range(self.n_line):
            self.maintenance_time[:, line_id] = self.get_maintenance_time_1d(self.maintenance[:, line_id])
            self.maintenance_duration[:, line_id] = self.get_maintenance_duration_1d(self.maintenance[:, line_id])
            self.hazard_duration[:, line_id] = self.get_maintenance_duration_1d(self.hazards[:, line_id])

        self.maintenance_forecast = self.maintenance != 0.

        self.curr_iter = 0
        if self.maintenance is not None:
            n_ = self.maintenance.shape[0]
        elif self.hazards is not None:
            n_ = self.hazards.shape[0]
        else:
            n_ = None
            for fn in ["prod_p", "load_p", "prod_v", "load_q"]:
                ext_ = self._get_fileext(fn)
                if ext_ is not None:
                    n_ = self._file_len(os.path.join(self.path, "{}{}".format(fn, ext_)), ext_)
                    break
            if n_ is None:
                raise ChronicsError("No files are found in directory \"{}\". If you don't want to load any chronics,"
                                    " use  \"ChangeNothing\" and not \"{}\" to load chronics."
                                    "".format(self.path, type(self)))
        self.n_ = n_  # the -1 is present because the initial grid state doesn't count as a "time step"
        self.tmp_max_index = load_p.shape[0]
Esempio n. 4
0
    def split_and_save(self, datetime_beg, datetime_end, path_out):
        """
        You can use this function to save the values of the chronics in a format that will be loadable
        by :class:`GridStateFromFile`

        Notes
        -----
        Prefer using the :func:`Multifolder.split_and_save` that handles different chronics

        Parameters
        ----------
        datetime_beg: ``str``
            Time stamp of the beginning of the data you want to save (time stamp in "%Y-%m-%d %H:%M"
            format)

        datetime_end: ``str``
            Time stamp of the end of the data you want to save (time stamp in "%Y-%m-%d %H:%M"
            format)

        path_out: ``str``
            Location where to save the data

        """
        # work on a copy of myself
        tmp = copy.deepcopy(self)
        datetime_beg = self._convert_datetime(datetime_beg)
        datetime_end = self._convert_datetime(datetime_end)

        nb_rows = datetime_end - datetime_beg
        nb_rows = nb_rows.total_seconds()
        nb_rows = int(nb_rows / self.time_interval.total_seconds()) + 1
        if nb_rows <= 0:
            raise ChronicsError(
                "Invalid time step to be extracted. Make sure \"datetime_beg\" is lower than "
                "\"datetime_end\" {} - {}".format(datetime_beg, datetime_end))

        # prepare folder
        if not os.path.exists(path_out):
            os.mkdir(path_out)

        # skip until datetime_beg starts
        curr_dt = tmp.current_datetime
        if curr_dt > datetime_beg:
            warnings.warn(
                "split_and_save: you ask for a beginning of the extraction of the chronics after the "
                "current datetime of it. If they ever existed, the data in the chronics prior to {}"
                "will be ignored".format(curr_dt))
        # in the chronics we load the first row to initialize the data, so here we stop just a bit before that
        datetime_start = datetime_beg - self.time_interval
        while curr_dt < datetime_start:
            curr_dt, *_ = tmp.load_next()
        real_init_dt = curr_dt
        arrays = self._init_res_split(nb_rows)
        i = 0
        while curr_dt < datetime_end:
            self._update_res_split(i, tmp, *arrays)
            curr_dt, *_ = tmp.load_next()
            i += 1
        if i < nb_rows:
            warnings.warn(
                "split_and_save: chronics goes up to {} but you want to split it up to {}. Results "
                "has been troncated".format(curr_dt, datetime_end))

        arrays = self._clean_arrays(i, *arrays)
        nms = self._get_name_arrays_for_saving()
        orders_columns = self._get_colorder_arrays_for_saving()
        for el, nm, colnames in zip(arrays, nms, orders_columns):
            nm = "{}{}".format(nm, ".csv.bz2")
            self._save_array(el, path_out, nm, colnames)

        with open(os.path.join(path_out, "start_datetime.info"), "w") as f:
            f.write("{:%Y-%m-%d %H:%M}\n".format(real_init_dt))

        tmp_for_time_delta = datetime(
            year=2018, month=1, day=1, hour=0, minute=0,
            second=0) + self.time_interval
        with open(os.path.join(path_out, "time_interval.info"), "w") as f:
            f.write("{:%H:%M}\n".format(tmp_for_time_delta))
Esempio n. 5
0
    def check_validity(self, backend):
        at_least_one = False
        if self.load_p is not None:
            if self.load_p.shape[1] != backend.n_load:
                msg_err = "for the active part. It should be {} but is in fact {}"
                raise IncorrectNumberOfLoads(
                    msg_err.format(backend.n_load, self.load_p.shape[1]))
            at_least_one = True

        if self.load_q is not None:
            if self.load_q.shape[1] != backend.n_load:
                msg_err = "for the reactive part. It should be {} but is in fact {}"
                raise IncorrectNumberOfLoads(
                    msg_err.format(backend.n_load, self.load_q.shape[1]))
            at_least_one = True
        if self.prod_p is not None:
            if self.prod_p.shape[1] != backend.n_gen:
                msg_err = "for the active part. It should be {} but is in fact {}"
                raise IncorrectNumberOfGenerators(
                    msg_err.format(backend.n_gen, self.prod_p.shape[1]))
            at_least_one = True

        if self.prod_v is not None:
            if self.prod_v.shape[1] != backend.n_gen:
                msg_err = "for the voltage part. It should be {} but is in fact {}"
                raise IncorrectNumberOfGenerators(
                    msg_err.format(backend.n_gen, self.prod_v.shape[1]))
            at_least_one = True

        if self.hazards is not None:
            if self.hazards.shape[1] != backend.n_line:
                msg_err = "for the outage. It should be {} but is in fact {}"
                raise IncorrectNumberOfLines(
                    msg_err.format(backend.n_line, self.hazards.shape[1]))
            at_least_one = True

        if self.maintenance is not None:
            if self.maintenance.shape[1] != backend.n_line:
                msg_err = "for the maintenance. It should be {} but is in fact {}"
                raise IncorrectNumberOfLines(
                    msg_err.format(backend.n_line, self.maintenance.shape[1]))
            at_least_one = True

        if self.maintenance_time is not None:
            if self.maintenance_time.shape[1] != backend.n_line:
                msg_err = "for the maintenance times. It should be {} but is in fact {}"
                raise IncorrectNumberOfLines(
                    msg_err.format(backend.n_line,
                                   self.maintenance_time.shape[1]))
            at_least_one = True

        if self.maintenance_duration is not None:
            if self.maintenance_duration.shape[1] != backend.n_line:
                msg_err = "for the maintenance durations. It should be {} but is in fact {}"
                raise IncorrectNumberOfLines(
                    msg_err.format(backend.n_line,
                                   self.maintenance_duration.shape[1]))
            at_least_one = True

        if self.hazard_duration is not None:
            if self.hazard_duration.shape[1] != backend.n_line:
                msg_err = "for the hazard durations. It should be {} but is in fact {}"
                raise IncorrectNumberOfLines(
                    msg_err.format(backend.n_line,
                                   self.hazard_duration.shape[1]))
            at_least_one = True

        if not at_least_one:
            raise ChronicsError(
                "No files are found in directory \"{}\". If you don't want to load any chronics, use "
                "\"ChangeNothing\" and not \"{}\" to load chronics."
                "".format(self.path, type(self)))

        for name_arr, arr in zip([
                "load_q", "load_p", "prod_v", "prod_p", "maintenance",
                "hazards", "maintenance time", "maintenance duration",
                "hazard duration"
        ], [
                self.load_q, self.load_p, self.prod_v, self.prod_p,
                self.maintenance, self.hazards, self.maintenance_time,
                self.maintenance_duration, self.hazard_duration
        ]):
            if arr is not None:
                if self.chunk_size is None:
                    if arr.shape[0] != self.n_:
                        msg_err = "Array {} has not the same number of rows than the maintenance. " \
                                  "The chronics cannot be loaded properly."
                        raise EnvError(msg_err.format(name_arr))

        if self.max_iter > 0:
            if self.max_iter > self.n_:
                msg_err = "Files count {} rows and you ask this episode to last at {} timestep."
                raise InsufficientData(msg_err.format(self.n_, self.max_iter))
Esempio n. 6
0
    def initialize(self,
                   order_backend_loads,
                   order_backend_prods,
                   order_backend_lines,
                   order_backend_subs,
                   names_chronics_to_backend=None):
        """
        INTERNAL

        .. warning:: /!\\\\ Internal, do not use unless you know what you are doing /!\\\\

            Called at the creation of the environment.

        In this function, the numpy arrays are read from the csv using the panda.dataframe engine.

        In order to be valid, the folder located at :attr:`GridStateFromFile.path` can contain:

          - a file named "load_p.csv" used to initialize :attr:`GridStateFromFile.load_p`
          - a file named "load_q.csv" used to initialize :attr:`GridStateFromFile.load_q`
          - a file named "prod_p.csv" used to initialize :attr:`GridStateFromFile.prod_p`
          - a file named "prod_v.csv" used to initialize :attr:`GridStateFromFile.prod_v`
          - a file named "hazards.csv" used to initialize :attr:`GridStateFromFile.hazards`
          - a file named "maintenance.csv" used to initialize :attr:`GridStateFromFile.maintenance`

        All these csv must have the same separator specified by :attr:`GridStateFromFile.sep`.
        If one of these file is missing, it is equivalent to "change nothing" class.

        If a file named "start_datetime.info" is present, then it will be used to initialized
        :attr:`GridStateFromFile.start_datetime`. If this file exists, it should count only one row, with the
        initial datetime in the "%Y-%m-%d %H:%M" format.

        If a file named "time_interval.info" is present, then it will be used to initialized the
        :attr:`GridStateFromFile.time_interval` attribute.  If this file exists, it should count only one row, with the
        initial datetime in the "%H:%M" format. Only timedelta composed of hours and minutes are supported (time delta
        cannot go above 23 hours 55 minutes and cannot be smaller than 0 hour 1 minutes)

        The first row of these csv is understood as the name of the object concerned by the column. Either this name is
        present in the :class:`grid2op.Backend`, in this case no modification is performed, or in case the name
        is not found in the backend and in this case it must be specified in the "names_chronics_to_backend"
        parameters how to understand it. See the help of :func:`GridValue.initialize` for more information
        about this dictionnary.

        All files should have the same number of rows.

        Parameters
        ----------
        See help of :func:`GridValue.initialize` for a detailed help about the parameters.

        """
        self.n_gen = len(order_backend_prods)
        self.n_load = len(order_backend_loads)
        self.n_line = len(order_backend_lines)

        self._order_backend_loads = order_backend_loads
        self._order_backend_prods = order_backend_prods
        self._order_backend_lines = order_backend_lines

        self.names_chronics_to_backend = copy.deepcopy(
            names_chronics_to_backend)
        if self.names_chronics_to_backend is None:
            self.names_chronics_to_backend = {}
        if not "loads" in self.names_chronics_to_backend:
            self.names_chronics_to_backend["loads"] = {
                k: k
                for k in order_backend_loads
            }
        else:
            self._assert_correct(self.names_chronics_to_backend["loads"],
                                 order_backend_loads)
        if not "prods" in self.names_chronics_to_backend:
            self.names_chronics_to_backend["prods"] = {
                k: k
                for k in order_backend_prods
            }
        else:
            self._assert_correct(self.names_chronics_to_backend["prods"],
                                 order_backend_prods)
        if not "lines" in self.names_chronics_to_backend:
            self.names_chronics_to_backend["lines"] = {
                k: k
                for k in order_backend_lines
            }
        else:
            self._assert_correct(self.names_chronics_to_backend["lines"],
                                 order_backend_lines)
        if not "subs" in self.names_chronics_to_backend:
            self.names_chronics_to_backend["subs"] = {
                k: k
                for k in order_backend_subs
            }
        else:
            self._assert_correct(self.names_chronics_to_backend["subs"],
                                 order_backend_subs)

        self._init_date_time()

        # read the data
        load_p_iter = self._get_data("load_p")
        load_q_iter = self._get_data("load_q")
        prod_p_iter = self._get_data("prod_p")
        prod_v_iter = self._get_data("prod_v")
        read_compressed = self._get_fileext("hazards")
        nrows = None
        if self.max_iter > 0:
            nrows = self.max_iter + 1

        if read_compressed is not None:
            hazards = pd.read_csv(os.path.join(
                self.path, "hazards{}".format(read_compressed)),
                                  sep=self.sep,
                                  nrows=nrows)
        else:
            hazards = None

        read_compressed = self._get_fileext("maintenance")
        if read_compressed is not None:
            maintenance = pd.read_csv(os.path.join(
                self.path, "maintenance{}".format(read_compressed)),
                                      sep=self.sep,
                                      nrows=nrows)
        else:
            maintenance = None

        # put the proper name in order
        order_backend_loads = {
            el: i
            for i, el in enumerate(order_backend_loads)
        }
        order_backend_prods = {
            el: i
            for i, el in enumerate(order_backend_prods)
        }
        order_backend_lines = {
            el: i
            for i, el in enumerate(order_backend_lines)
        }

        if self.chunk_size is None:
            load_p = load_p_iter
            load_q = load_q_iter
            prod_p = prod_p_iter
            prod_v = prod_v_iter
            if load_p is not None:
                self.tmp_max_index = load_p.shape[0]
            elif load_q is not None:
                self.tmp_max_index = load_q.shape[0]
            elif prod_p is not None:
                self.tmp_max_index = prod_p.shape[0]
            elif prod_v is not None:
                self.tmp_max_index = prod_v.shape[0]
            else:
                raise ChronicsError(
                    "No files are found in directory \"{}\". If you don't want to load any chronics,"
                    " use  \"ChangeNothing\" and not \"{}\" to load chronics."
                    "".format(self.path, type(self)))

        else:
            self._data_chunk = {
                "load_p": load_p_iter,
                "load_q": load_q_iter,
                "prod_p": prod_p_iter,
                "prod_v": prod_v_iter
            }
            load_p, load_q, prod_p, prod_v = self._get_next_chunk()

        # get the chronics in order
        order_chronics_load_p, order_backend_load_q, \
        order_backend_prod_p, order_backend_prod_v, \
        order_backend_hazards, order_backend_maintenance \
            = self._get_orders(load_p, load_q, prod_p, prod_v, hazards, maintenance,
                               order_backend_loads, order_backend_prods, order_backend_lines)

        # now "sort" the columns of each chunk of data
        self._order_load_p = np.argsort(order_chronics_load_p)
        self._order_load_q = np.argsort(order_backend_load_q)
        self._order_prod_p = np.argsort(order_backend_prod_p)
        self._order_prod_v = np.argsort(order_backend_prod_v)
        self._order_hazards = np.argsort(order_backend_hazards)
        self._order_maintenance = np.argsort(order_backend_maintenance)

        # retrieve total number of rows
        if maintenance is not None:
            n_ = maintenance.shape[0]
        elif hazards is not None:
            n_ = hazards.shape[0]
        else:
            n_ = None
            for fn in ["prod_p", "load_p", "prod_v", "load_q"]:
                ext_ = self._get_fileext(fn)
                if ext_ is not None:
                    n_ = self._file_len(
                        os.path.join(self.path, "{}{}".format(fn, ext_)), ext_)
                    break
            if n_ is None:
                raise ChronicsError(
                    "No files are found in directory \"{}\". If you don't want to load any chronics,"
                    " use  \"ChangeNothing\" and not \"{}\" to load chronics."
                    "".format(self.path, type(self)))
        self.n_ = n_  # the -1 is present because the initial grid state doesn't count as a "time step"

        if self.max_iter > 0:
            self.n_ = self.max_iter + 1
        else:
            # if the number of maximum time step is not set yet, we set it to be the number of
            # data in the chronics (number of rows of the files) -1.
            # the -1 is present because the initial grid state doesn't count as a "time step" but is read
            # from these data.
            self.max_iter = self.n_ - 1

        self._init_attrs(load_p,
                         load_q,
                         prod_p,
                         prod_v,
                         hazards=hazards,
                         maintenance=maintenance)

        self.curr_iter = 0
Esempio n. 7
0
    def check_validity(self, backend):
        """
        A call to this method ensure that the action that will be sent to the current :class:`grid2op.Environment`
        can be properly implemented by its :class:`grid2op.Backend`.
        This specific method check that the dimension of all vectors are consistent

        Parameters
        ----------
        backend: :class:`grid2op.Backend.Backend`
            The backend used by the :class:`grid2op.Environment.Environment`

        Returns
        -------
        ``None``
        """
        at_least_one = False
        if self.load_p is not None:
            if self.load_p.shape[1] != backend.n_load:
                msg_err = "for the active part. It should be {} but is in fact {}"
                raise IncorrectNumberOfLoads(
                    msg_err.format(backend.n_load, self.load_p.shape[1]))
            at_least_one = True

        if self.load_q is not None:
            if self.load_q.shape[1] != backend.n_load:
                msg_err = "for the reactive part. It should be {} but is in fact {}"
                raise IncorrectNumberOfLoads(
                    msg_err.format(backend.n_load, self.load_q.shape[1]))
            at_least_one = True
        if self.prod_p is not None:
            if self.prod_p.shape[1] != backend.n_gen:
                msg_err = "for the active part. It should be {} but is in fact {}"
                raise IncorrectNumberOfGenerators(
                    msg_err.format(backend.n_gen, self.prod_p.shape[1]))
            at_least_one = True

        if self.prod_v is not None:
            if self.prod_v.shape[1] != backend.n_gen:
                msg_err = "for the voltage part. It should be {} but is in fact {}"
                raise IncorrectNumberOfGenerators(
                    msg_err.format(backend.n_gen, self.prod_v.shape[1]))
            at_least_one = True

        if self.hazards is not None:
            if self.hazards.shape[1] != backend.n_line:
                msg_err = "for the outage. It should be {} but is in fact {}"
                raise IncorrectNumberOfLines(
                    msg_err.format(backend.n_line, self.hazards.shape[1]))
            at_least_one = True

        if self.maintenance is not None:
            if self.maintenance.shape[1] != backend.n_line:
                msg_err = "for the maintenance. It should be {} but is in fact {}"
                raise IncorrectNumberOfLines(
                    msg_err.format(backend.n_line, self.maintenance.shape[1]))
            at_least_one = True

        if self.maintenance_time is not None:
            if self.maintenance_time.shape[1] != backend.n_line:
                msg_err = "for the maintenance times. It should be {} but is in fact {}"
                raise IncorrectNumberOfLines(
                    msg_err.format(backend.n_line,
                                   self.maintenance_time.shape[1]))
            at_least_one = True

        if self.maintenance_duration is not None:
            if self.maintenance_duration.shape[1] != backend.n_line:
                msg_err = "for the maintenance durations. It should be {} but is in fact {}"
                raise IncorrectNumberOfLines(
                    msg_err.format(backend.n_line,
                                   self.maintenance_duration.shape[1]))
            at_least_one = True

        if self.hazard_duration is not None:
            if self.hazard_duration.shape[1] != backend.n_line:
                msg_err = "for the hazard durations. It should be {} but is in fact {}"
                raise IncorrectNumberOfLines(
                    msg_err.format(backend.n_line,
                                   self.hazard_duration.shape[1]))
            at_least_one = True

        if not at_least_one:
            raise ChronicsError(
                "No files are found in directory \"{}\". If you don't want to load any chronics, use "
                "\"ChangeNothing\" and not \"{}\" to load chronics."
                "".format(self.path, type(self)))

        for name_arr, arr in zip([
                "load_q", "load_p", "prod_v", "prod_p", "maintenance",
                "hazards", "maintenance time", "maintenance duration",
                "hazard duration"
        ], [
                self.load_q, self.load_p, self.prod_v, self.prod_p,
                self.maintenance, self.hazards, self.maintenance_time,
                self.maintenance_duration, self.hazard_duration
        ]):
            if arr is not None:
                if self.chunk_size is None:
                    if arr.shape[0] != self.n_:
                        msg_err = "Array {} has not the same number of rows of load_p. The chronics cannot be loaded properly."
                        raise EnvError(msg_err.format(name_arr))

        if self.max_iter > 0:
            if self.max_iter > self.n_:
                msg_err = "Files count {} rows and you ask this episode to last at {} timestep."
                raise InsufficientData(msg_err.format(self.n_, self.max_iter))