def load_next(self): self.current_index += 1 if not self._data_in_memory(): try: self._load_next_chunk_in_memory() except StopIteration as e: raise e if self.current_index >= self.tmp_max_index: raise StopIteration if self.max_iter > 0: if self.curr_iter > self.max_iter: raise StopIteration res = {} dict_ = {} prod_v = None if self.load_p is not None: dict_["load_p"] = 1.0 * self.load_p[self.current_index, :] if self.load_q is not None: dict_["load_q"] = 1.0 * self.load_q[self.current_index, :] if self.prod_p is not None: dict_["prod_p"] = 1.0 * self.prod_p[self.current_index, :] if self.prod_v is not None: prod_v = 1.0 * self.prod_v[self.current_index, :] # dict_["prod_v"] = prod_v if dict_: res["injection"] = dict_ if self.maintenance is not None: res["maintenance"] = self.maintenance[self.current_index, :] if self.hazards is not None: res["hazards"] = self.hazards[self.current_index, :] self.current_datetime += self.time_interval self.curr_iter += 1 if self.maintenance_time is not None: maintenance_time = dt_int( 1 * self.maintenance_time[self.current_index, :]) maintenance_duration = dt_int( 1 * self.maintenance_duration[self.current_index, :]) else: maintenance_time = np.full(self.n_line, fill_value=-1, dtype=dt_int) maintenance_duration = np.full(self.n_line, fill_value=0, dtype=dt_int) if self.hazard_duration is not None: hazard_duration = 1 * self.hazard_duration[self.current_index, :] else: hazard_duration = np.full(self.n_line, fill_value=-1, dtype=dt_int) return self.current_datetime, res, maintenance_time, maintenance_duration, hazard_duration, prod_v
def __init__(self): self._max_budget = None self._budget_per_ts = None self._alarm_cost = None self._current_budget = None self._init_budget = None self._time_last_alarm_raised = dt_int(-1) self._time_last_successful_alarm_raised = dt_int(-1) self._last_alarm_raised = None self._last_successful_alarm_raised = None self._all_successful_alarms = []
def reset(self): """ called each time the scenario is over by the environment Returns ------- """ self._current_budget = self._init_budget self._time_last_alarm_raised = dt_int(-1) self._time_last_successful_alarm_raised = dt_int(-1) self._last_alarm_raised[:] = -1 self._last_successful_alarm_raised[:] = -1 self._all_successful_alarms = []
def initialize_space(self, init_space): if not isinstance(init_space, Box): raise RuntimeError("Impossible to convert a gym space of type {} to a discrete space" " (it should be of " "type space.Box)" "".format(type(init_space))) min_ = init_space.low max_ = init_space.high self._ignored = min_ == max_ # which component are ignored self._res = min_ self._values = np.linspace(min_, max_, num=self._nb_bins+2) self._values = self._values[1:-1, :] # the values that will be used when using #gym_to_glop # TODO there might a cleaner approach here self._bins_size = np.linspace(min_, max_, num=2*self._nb_bins+1) self._bins_size = self._bins_size[2:-1:2, :] # the values defining the "cuts" self._gen_idx = np.arange(self._bins_size.shape[-1]) n_bins = np.ones(min_.shape[0], dtype=dt_int) * dt_int(self._nb_bins) n_bins[self._ignored] = 1 # if min and max are equal, i don't want to have multiple variable space = MultiDiscrete(n_bins) self.base_initialize(space=space, g2op_to_gym=None, gym_to_g2op=None)
def __init__(self, parameters_path=None): """ Build an object representing the _parameters of the game. Parameters ---------- parameters_path: ``str``, optional Path where to look for parameters. """ # if True, then it will not disconnect lines above their thermal limits self.NO_OVERFLOW_DISCONNECTION = False # number of timestep before powerline with an overflow is automatically disconnected self.NB_TIMESTEP_OVERFLOW_ALLOWED = dt_int(2) # number of timestep before a line can be reconnected if it has suffer a forced disconnection self.NB_TIMESTEP_RECONNECTION = dt_int(10) # number of timestep before a substation topology can be modified again self.NB_TIMESTEP_COOLDOWN_LINE = dt_int(0) self.NB_TIMESTEP_COOLDOWN_SUB = dt_int(0) # threshold above which a powerline is instantly disconnected by protections # this is expressed in relative value of the thermal limits # for example setting "HARD_OVERFLOW_THRESHOLD = 2" is equivalent, if a powerline has a thermal limit of # 243 A, to disconnect it instantly if it has a powerflow higher than 2 * 243 = 486 A self.HARD_OVERFLOW_THRESHOLD = dt_float(2.) # are the powerflow performed by the environment in DC mode (dc powerflow) or AC (ac powerflow) self.ENV_DC = False # same as above, but for the forecast states self.FORECAST_DC = False # DEPRECATED use "change_forecast_parameters(new_param)" with "new_param.ENV_DC=..." # maximum number of substations that can be change in one action self.MAX_SUB_CHANGED = dt_int(1) # maximum number of powerline status that can be changed in one action self.MAX_LINE_STATUS_CHANGED = dt_int(1) # ignore the min_uptime and downtime for the generators: allow them to be connected / disconnected # at will self.IGNORE_MIN_UP_DOWN_TIME = True # allow dispatch on turned off generator (if ``True`` you can actually dispatch a turned on geenrator) self.ALLOW_DISPATCH_GEN_SWITCH_OFF = True # storage capacity (NOT in pct) self.INIT_STORAGE_CAPACITY = 0.5 # do i take into account the storage loss in the step function self.ACTIVATE_STORAGE_LOSS = True if parameters_path is not None: if os.path.isfile(parameters_path): self.init_from_json(parameters_path) else: warn_msg = "Parameters: the file {} is not found. Continuing with default parameters." warnings.warn(warn_msg.format(parameters_path))
def reset(self): """ Reset the :class:`BaseObservation` to a blank state, where everything is set to either ``None`` or to its default value. """ # vecorized _grid self.timestep_overflow[:] = 0 # 0. (line is disconnected) / 1. (line is connected) self.line_status[:] = True # topological vector self.topo_vect[:] = 0 # generators information self.prod_p[:] = np.NaN self.prod_q[:] = np.NaN self.prod_v[:] = np.NaN # loads information self.load_p[:] = np.NaN self.load_q[:] = np.NaN self.load_v[:] = np.NaN # lines origin information self.p_or[:] = np.NaN self.q_or[:] = np.NaN self.v_or[:] = np.NaN self.a_or[:] = np.NaN # lines extremity information self.p_ex[:] = np.NaN self.q_ex[:] = np.NaN self.v_ex[:] = np.NaN self.a_ex[:] = np.NaN # lines relative flows self.rho[:] = np.NaN # cool down and reconnection time after hard overflow, soft overflow or cascading failure self.time_before_cooldown_line[:] = -1 self.time_before_cooldown_sub[:] = -1 self.time_next_maintenance[:] = -1 self.duration_next_maintenance[:] = -1 # calendar data self.year = dt_int(1970) self.month = dt_int(0) self.day = dt_int(0) self.hour_of_day = dt_int(0) self.minute_of_hour = dt_int(0) self.day_of_week = dt_int(0) # forecasts self._forecasted_inj = [] self._forecasted_grid_act = {} # redispatching self.target_dispatch[:] = np.NaN self.actual_dispatch[:] = np.NaN
def update(self, env, with_forecast=True): """ This use the environement to update properly the BaseObservation. Parameters ---------- env: :class:`grid2op.Environment.Environment` The environment from which to update this observation. """ # reset the matrices self._reset_matrices() self.reset() # extract the time stamps self.year = dt_int(env.time_stamp.year) self.month = dt_int(env.time_stamp.month) self.day = dt_int(env.time_stamp.day) self.hour_of_day = dt_int(env.time_stamp.hour) self.minute_of_hour = dt_int(env.time_stamp.minute) self.day_of_week = dt_int(env.time_stamp.weekday()) # get the values related to topology self.timestep_overflow = copy.copy(env.timestep_overflow) self.line_status = copy.copy(env.backend.get_line_status()) self.topo_vect = copy.copy(env.backend.get_topo_vect()) # get the values related to continuous values self.prod_p[:], self.prod_q[:], self.prod_v[:] = env.backend.generators_info( ) self.load_p[:], self.load_q[:], self.load_v[:] = env.backend.loads_info( ) self.p_or[:], self.q_or[:], self.v_or[:], self.a_or[:] = env.backend.lines_or_info( ) self.p_ex[:], self.q_ex[:], self.v_ex[:], self.a_ex[:] = env.backend.lines_ex_info( ) # handles forecasts here if with_forecast: self._forecasted_inj = env.chronics_handler.forecasts() for grid_act in self._forecasted_grid_act.values(): # in the action, i assign the lat topology known, it's a choice here... grid_act["inj_action"]["setbus"] = self.topo_vect self._forecasted_grid = [None for _ in self._forecasted_inj] self.rho = env.backend.get_relative_flow().astype(dt_float) # cool down and reconnection time after hard overflow, soft overflow or cascading failure self.time_before_cooldown_line[:] = env.times_before_line_status_actionable self.time_before_cooldown_sub[:] = env.times_before_topology_actionable self.time_next_maintenance[:] = env.time_next_maintenance self.duration_next_maintenance[:] = env.duration_next_maintenance # redispatching self.target_dispatch[:] = env.target_dispatch self.actual_dispatch[:] = env.actual_dispatch
def init_from_dict(self, dict_): """ Initialize the object given a dictionary. All keys are optional. If a key is not present in the dictionnary, the default parameters is used. Parameters ---------- dict_: ``dict`` The dictionary representing the parameters to load. """ if "NO_OVERFLOW_DISCONNECTION" in dict_: self.NO_OVERFLOW_DISCONNECTION = Parameters._isok_txt( dict_["NO_OVERFLOW_DISCONNECTION"]) if "NB_TIMESTEP_POWERFLOW_ALLOWED" in dict_: self.NB_TIMESTEP_POWERFLOW_ALLOWED = dt_int( dict_["NB_TIMESTEP_POWERFLOW_ALLOWED"]) if "NB_TIMESTEP_RECONNECTION" in dict_: self.NB_TIMESTEP_RECONNECTION = dt_int( dict_["NB_TIMESTEP_RECONNECTION"]) if "HARD_OVERFLOW_THRESHOLD" in dict_: self.HARD_OVERFLOW_THRESHOLD = dt_float( dict_["HARD_OVERFLOW_THRESHOLD"]) if "ENV_DC" in dict_: self.ENV_DC = Parameters._isok_txt(dict_["ENV_DC"]) if "FORECAST_DC" in dict_: self.FORECAST_DC = Parameters._isok_txt(dict_["FORECAST_DC"]) if "MAX_SUB_CHANGED" in dict_: self.MAX_SUB_CHANGED = dt_int(dict_["MAX_SUB_CHANGED"]) if "MAX_LINE_STATUS_CHANGED" in dict_: self.MAX_LINE_STATUS_CHANGED = dt_int( dict_["MAX_LINE_STATUS_CHANGED"]) if "NB_TIMESTEP_TOPOLOGY_REMODIF" in dict_: self.NB_TIMESTEP_TOPOLOGY_REMODIF = dt_int( dict_["NB_TIMESTEP_TOPOLOGY_REMODIF"]) if "NB_TIMESTEP_LINE_STATUS_REMODIF" in dict_: self.NB_TIMESTEP_TOPOLOGY_REMODIF = dt_int( dict_["NB_TIMESTEP_LINE_STATUS_REMODIF"]) ignored_keys = dict_.keys() - self.__dict__.keys() if len(ignored_keys): warnings.warn( "Parameters: The _parameters \"{}\" used to build the Grid2Op.Parameters " "class are not recognized and will be ignored.".format( ignored_keys))
def update(self, env, with_forecast=True): # reset the matrices self._reset_matrices() self.reset() # extract the time stamps self.year = dt_int(env.time_stamp.year) self.month = dt_int(env.time_stamp.month) self.day = dt_int(env.time_stamp.day) self.hour_of_day = dt_int(env.time_stamp.hour) self.minute_of_hour = dt_int(env.time_stamp.minute) self.day_of_week = dt_int(env.time_stamp.weekday()) # get the values related to topology self.timestep_overflow[:] = env._timestep_overflow self.line_status[:] = env.backend.get_line_status() self.topo_vect[:] = env.backend.get_topo_vect() # get the values related to continuous values self.prod_p[:], self.prod_q[:], self.prod_v[:] = env.backend.generators_info( ) self.load_p[:], self.load_q[:], self.load_v[:] = env.backend.loads_info( ) self.p_or[:], self.q_or[:], self.v_or[:], self.a_or[:] = env.backend.lines_or_info( ) self.p_ex[:], self.q_ex[:], self.v_ex[:], self.a_ex[:] = env.backend.lines_ex_info( ) # handles forecasts here if with_forecast: inj_action = {} dict_ = {} dict_["load_p"] = dt_float(1.0 * self.load_p) dict_["load_q"] = dt_float(1.0 * self.load_q) dict_["prod_p"] = dt_float(1.0 * self.prod_p) dict_["prod_v"] = dt_float(1.0 * self.prod_v) inj_action["injection"] = dict_ # inj_action = self.action_helper(inj_action) timestamp = self.get_time_stamp() self._forecasted_inj = [(timestamp, inj_action)] self._forecasted_inj += env.chronics_handler.forecasts() self._forecasted_grid = [None for _ in self._forecasted_inj] self.rho[:] = env.backend.get_relative_flow().astype(dt_float) # cool down and reconnection time after hard overflow, soft overflow or cascading failure self.time_before_cooldown_line[:] = env._times_before_line_status_actionable self.time_before_cooldown_sub[:] = env._times_before_topology_actionable self.time_next_maintenance[:] = env._time_next_maintenance self.duration_next_maintenance[:] = env._duration_next_maintenance # redispatching self.target_dispatch[:] = env._target_dispatch self.actual_dispatch[:] = env._actual_dispatch
def __init__(self, parameters_path=None): """ Build an object representing the _parameters of the game. Parameters ---------- parameters_path: ``str``, optional Path where to look for parameters. """ # if True, then it will not disconnect lines above their thermal limits self.NO_OVERFLOW_DISCONNECTION = False # number of timestep before powerline with an overflow is automatically disconnected self.NB_TIMESTEP_POWERFLOW_ALLOWED = dt_int(2) # number of timestep before a line can be reconnected if it has suffer a forced disconnection self.NB_TIMESTEP_RECONNECTION = dt_int(10) # number of timestep before a substation topology can be modified again self.NB_TIMESTEP_TOPOLOGY_REMODIF = dt_int(0) self.NB_TIMESTEP_LINE_STATUS_REMODIF = dt_int(0) # threshold above which a powerline is instantly disconnected by protections # this is expressed in relative value of the thermal limits # for example setting "HARD_OVERFLOW_THRESHOLD = 2" is equivalent, if a powerline has a thermal limit of # 243 A, to disconnect it instantly if it has a powerflow higher than 2 * 243 = 486 A self.HARD_OVERFLOW_THRESHOLD = dt_int(2) # are the powerflow performed by the environment in DC mode (dc powerflow) or AC (ac powerflow) self.ENV_DC = False # same as above, but for the forecast states self.FORECAST_DC = False # maximum number of substations that can be change in one action self.MAX_SUB_CHANGED = dt_int(1) # maximum number of powerline status that can be changed in one action self.MAX_LINE_STATUS_CHANGED = dt_int(1) if parameters_path is not None: if os.path.isfile(parameters_path): self.init_from_json(parameters_path) else: warn_msg = "Parameters: the file {} is not found. Continuing with default parameters." warnings.warn(warn_msg.format(parameters_path))
def __init__(self, init_space, nb_bins): if not isinstance(init_space, Box): raise RuntimeError( "Impossible to convert a gym space of type {} to a discrete space" " (it should be of " "type space.Box)" "".format(type(init_space))) if nb_bins < 2: raise RuntimeError( "This do not work with less that 1 bin (if you want to ignored some part " "of the action_space or observation_space please use the " "\"gym_space.ignore_attr\" or \"gym_space.keep_only_attr\"") min_ = init_space.low max_ = init_space.high self._ignored = min_ == max_ # which component are ignored self._res = min_ self._values = np.linspace(min_, max_, num=nb_bins + 2) self._values = self._values[ 1:-1, :] # the values that will be used when using #gym_to_glop # TODO there might a cleaner approach here self._bins_size = np.linspace(min_, max_, num=2 * nb_bins + 1) self._bins_size = self._bins_size[ 2:-1:2, :] # the values defining the "cuts" self._gen_idx = np.arange(self._bins_size.shape[-1]) n_bins = np.ones(min_.shape[0], dtype=dt_int) * dt_int(nb_bins) n_bins[ self. _ignored] = 1 # if min and max are equal, i don't want to have multiple variable space = MultiDiscrete(n_bins) BaseGymAttrConverter.__init__( self, space=space, )
def init_from_dict(self, dict_): """ Initialize the object given a dictionary. All keys are optional. If a key is not present in the dictionnary, the default parameters is used. Parameters ---------- dict_: ``dict`` The dictionary representing the parameters to load. """ if "NO_OVERFLOW_DISCONNECTION" in dict_: self.NO_OVERFLOW_DISCONNECTION = Parameters._isok_txt( dict_["NO_OVERFLOW_DISCONNECTION"]) if "IGNORE_MIN_UP_DOWN_TIME" in dict_: self.IGNORE_MIN_UP_DOWN_TIME = Parameters._isok_txt( dict_["IGNORE_MIN_UP_DOWN_TIME"]) if "ALLOW_DISPATCH_GEN_SWITCH_OFF" in dict_: self.ALLOW_DISPATCH_GEN_SWITCH_OFF = Parameters._isok_txt( dict_["ALLOW_DISPATCH_GEN_SWITCH_OFF"]) if "NB_TIMESTEP_POWERFLOW_ALLOWED" in dict_: self.NB_TIMESTEP_OVERFLOW_ALLOWED = dt_int( dict_["NB_TIMESTEP_POWERFLOW_ALLOWED"]) if "NB_TIMESTEP_OVERFLOW_ALLOWED" in dict_: self.NB_TIMESTEP_OVERFLOW_ALLOWED = dt_int( dict_["NB_TIMESTEP_OVERFLOW_ALLOWED"]) if "NB_TIMESTEP_RECONNECTION" in dict_: self.NB_TIMESTEP_RECONNECTION = dt_int( dict_["NB_TIMESTEP_RECONNECTION"]) if "HARD_OVERFLOW_THRESHOLD" in dict_: self.HARD_OVERFLOW_THRESHOLD = dt_float( dict_["HARD_OVERFLOW_THRESHOLD"]) if "ENV_DC" in dict_: self.ENV_DC = Parameters._isok_txt(dict_["ENV_DC"]) if "FORECAST_DC" in dict_: self.FORECAST_DC = Parameters._isok_txt(dict_["FORECAST_DC"]) if "MAX_SUB_CHANGED" in dict_: self.MAX_SUB_CHANGED = dt_int(dict_["MAX_SUB_CHANGED"]) if "MAX_LINE_STATUS_CHANGED" in dict_: self.MAX_LINE_STATUS_CHANGED = dt_int( dict_["MAX_LINE_STATUS_CHANGED"]) if "NB_TIMESTEP_TOPOLOGY_REMODIF" in dict_: # for backward compatibility (in case of old dataset) self.NB_TIMESTEP_COOLDOWN_SUB = dt_int( dict_["NB_TIMESTEP_TOPOLOGY_REMODIF"]) if "NB_TIMESTEP_COOLDOWN_SUB" in dict_: self.NB_TIMESTEP_COOLDOWN_SUB = dt_int( dict_["NB_TIMESTEP_COOLDOWN_SUB"]) if "NB_TIMESTEP_LINE_STATUS_REMODIF" in dict_: # for backward compatibility (in case of old dataset) self.NB_TIMESTEP_COOLDOWN_LINE = dt_int( dict_["NB_TIMESTEP_LINE_STATUS_REMODIF"]) if "NB_TIMESTEP_COOLDOWN_LINE" in dict_: self.NB_TIMESTEP_COOLDOWN_LINE = dt_int( dict_["NB_TIMESTEP_COOLDOWN_LINE"]) authorized_keys = set(self.__dict__.keys()) authorized_keys = authorized_keys | { 'NB_TIMESTEP_POWERFLOW_ALLOWED', 'NB_TIMESTEP_TOPOLOGY_REMODIF', "NB_TIMESTEP_LINE_STATUS_REMODIF" } ignored_keys = dict_.keys() - authorized_keys if len(ignored_keys): warnings.warn( "Parameters: The _parameters \"{}\" used to build the Grid2Op.Parameters " "class are not recognized and will be ignored.".format( ignored_keys))
def __init__(self, obs_env=None, action_helper=None, seed=None): GridObjects.__init__(self) self.action_helper = action_helper # time stamp information self.year = 1970 self.month = 0 self.day = 0 self.hour_of_day = 0 self.minute_of_hour = 0 self.day_of_week = 0 # for non deterministic observation that would not use default np.random module self.seed = None # handles the forecasts here self._forecasted_grid_act = {} self._forecasted_inj = [] self.timestep_overflow = np.zeros(shape=(self.n_line, ), dtype=dt_int) # 0. (line is disconnected) / 1. (line is connected) self.line_status = np.ones(shape=self.n_line, dtype=dt_bool) # topological vector self.topo_vect = np.full(shape=self.dim_topo, dtype=dt_int, fill_value=0) # generators information self.prod_p = np.full(shape=self.n_gen, dtype=dt_float, fill_value=np.NaN) self.prod_q = np.full(shape=self.n_gen, dtype=dt_float, fill_value=np.NaN) self.prod_v = np.full(shape=self.n_gen, dtype=dt_float, fill_value=np.NaN) # loads information self.load_p = np.full(shape=self.n_load, dtype=dt_float, fill_value=np.NaN) self.load_q = np.full(shape=self.n_load, dtype=dt_float, fill_value=np.NaN) self.load_v = np.full(shape=self.n_load, dtype=dt_float, fill_value=np.NaN) # lines origin information self.p_or = np.full(shape=self.n_line, dtype=dt_float, fill_value=np.NaN) self.q_or = np.full(shape=self.n_line, dtype=dt_float, fill_value=np.NaN) self.v_or = np.full(shape=self.n_line, dtype=dt_float, fill_value=np.NaN) self.a_or = np.full(shape=self.n_line, dtype=dt_float, fill_value=np.NaN) # lines extremity information self.p_ex = np.full(shape=self.n_line, dtype=dt_float, fill_value=np.NaN) self.q_ex = np.full(shape=self.n_line, dtype=dt_float, fill_value=np.NaN) self.v_ex = np.full(shape=self.n_line, dtype=dt_float, fill_value=np.NaN) self.a_ex = np.full(shape=self.n_line, dtype=dt_float, fill_value=np.NaN) # lines relative flows self.rho = np.full(shape=self.n_line, dtype=dt_float, fill_value=np.NaN) # cool down and reconnection time after hard overflow, soft overflow or cascading failure self.time_before_cooldown_line = np.full(shape=self.n_line, dtype=dt_int, fill_value=-1) self.time_before_cooldown_sub = np.full(shape=self.n_sub, dtype=dt_int, fill_value=-1) self.time_next_maintenance = np.full(shape=self.n_line, dtype=dt_int, fill_value=-1) self.duration_next_maintenance = np.full(shape=self.n_line, dtype=dt_int, fill_value=-1) # calendar data self.year = dt_int(1970) self.month = dt_int(0) self.day = dt_int(0) self.hour_of_day = dt_int(0) self.minute_of_hour = dt_int(0) self.day_of_week = dt_int(0) # forecasts self._forecasted_inj = [] self._forecasted_grid = [] self._obs_env = obs_env # redispatching self.target_dispatch = np.full(shape=self.n_gen, dtype=dt_float, fill_value=np.NaN) self.actual_dispatch = np.full(shape=self.n_gen, dtype=dt_float, fill_value=np.NaN) # value to assess if two observations are equal self._tol_equal = 5e-1 self.attr_list_vect = None
def __init__(self, obs_env=None, action_helper=None, seed=None): GridObjects.__init__(self) self.action_helper = action_helper # time stamp information self.year = 1970 self.month = 0 self.day = 0 self.hour_of_day = 0 self.minute_of_hour = 0 self.day_of_week = 0 # for non deterministic observation that would not use default np.random module self.seed = None # handles the forecasts here self._forecasted_grid_act = {} self._forecasted_inj = [] self._obs_env = obs_env self.timestep_overflow = np.zeros(shape=(self.n_line,), dtype=dt_int) # 0. (line is disconnected) / 1. (line is connected) self.line_status = np.ones(shape=self.n_line, dtype=dt_bool) # topological vector self.topo_vect = np.zeros(shape=self.dim_topo, dtype=dt_int) # generators information self.prod_p = np.full(shape=self.n_gen, dtype=dt_float, fill_value=np.NaN) self.prod_q = 1.0 * self.prod_p self.prod_v = 1.0 * self.prod_p # loads information self.load_p = np.full(shape=self.n_load, dtype=dt_float, fill_value=np.NaN) self.load_q = 1.0 * self.load_p self.load_v = 1.0 * self.load_p # lines origin information self.p_or = np.full(shape=self.n_line, dtype=dt_float, fill_value=np.NaN) self.q_or = 1.0 * self.p_or self.v_or = 1.0 * self.p_or self.a_or = 1.0 * self.p_or # lines extremity information self.p_ex = 1.0 * self.p_or self.q_ex = 1.0 * self.p_or self.v_ex = 1.0 * self.p_or self.a_ex = 1.0 * self.p_or # lines relative flows self.rho = 1.0 * self.p_or # cool down and reconnection time after hard overflow, soft overflow or cascading failure self.time_before_cooldown_line = np.full(shape=self.n_line, dtype=dt_int, fill_value=-1) self.time_before_cooldown_sub = np.full(shape=self.n_sub, dtype=dt_int, fill_value=-1) self.time_next_maintenance = 1 * self.time_before_cooldown_line self.duration_next_maintenance = 1 * self.time_before_cooldown_line # calendar data self.year = dt_int(1970) self.month = dt_int(0) self.day = dt_int(0) self.hour_of_day = dt_int(0) self.minute_of_hour = dt_int(0) self.day_of_week = dt_int(0) # redispatching self.target_dispatch = 1.0 * self.prod_p self.actual_dispatch = 1.0 * self.prod_p # to save some computation time self._connectivity_matrix_ = None self._bus_connectivity_matrix_ = None self._dictionnarized = None # for shunt (these are not stored!) if self.shunts_data_available: self._shunt_p = np.full(shape=self.n_shunt, dtype=dt_float, fill_value=np.NaN) self._shunt_q = 1.0 * self._shunt_p self._shunt_v = 1.0 * self._shunt_p self._shunt_bus = np.full(shape=self.n_shunt, dtype=dt_int, fill_value=1)
def check_valid(self): """ check the parameter is valid (ie it checks that all the values are of correct types and within the correct range. Raises ------- An exception if the parameter is not valid """ try: if not isinstance(self.NO_OVERFLOW_DISCONNECTION, (bool, dt_bool)): raise RuntimeError( "NO_OVERFLOW_DISCONNECTION should be a boolean") self.NO_OVERFLOW_DISCONNECTION = dt_bool( self.NO_OVERFLOW_DISCONNECTION) except Exception as exc_: raise RuntimeError( f"Impossible to convert NO_OVERFLOW_DISCONNECTION to bool with error \n:\"{exc_}\"" ) try: self.NB_TIMESTEP_OVERFLOW_ALLOWED = int( self.NB_TIMESTEP_OVERFLOW_ALLOWED) # to raise if numpy array self.NB_TIMESTEP_OVERFLOW_ALLOWED = dt_int( self.NB_TIMESTEP_OVERFLOW_ALLOWED) except Exception as exc_: raise RuntimeError( f"Impossible to convert NB_TIMESTEP_OVERFLOW_ALLOWED to int with error \n:\"{exc_}\"" ) if self.NB_TIMESTEP_OVERFLOW_ALLOWED < 0: raise RuntimeError( "NB_TIMESTEP_OVERFLOW_ALLOWED < 0., this should be >= 0.") try: self.NB_TIMESTEP_RECONNECTION = int( self.NB_TIMESTEP_RECONNECTION) # to raise if numpy array self.NB_TIMESTEP_RECONNECTION = dt_int( self.NB_TIMESTEP_RECONNECTION) except Exception as exc_: raise RuntimeError( f"Impossible to convert NB_TIMESTEP_RECONNECTION to int with error \n:\"{exc_}\"" ) if self.NB_TIMESTEP_RECONNECTION < 0: raise RuntimeError( "NB_TIMESTEP_RECONNECTION < 0., this should be >= 0.") try: self.NB_TIMESTEP_COOLDOWN_LINE = int( self.NB_TIMESTEP_COOLDOWN_LINE) self.NB_TIMESTEP_COOLDOWN_LINE = dt_int( self.NB_TIMESTEP_COOLDOWN_LINE) except Exception as exc_: raise RuntimeError( f"Impossible to convert NB_TIMESTEP_COOLDOWN_LINE to int with error \n:\"{exc_}\"" ) if self.NB_TIMESTEP_COOLDOWN_LINE < 0: raise RuntimeError( "NB_TIMESTEP_COOLDOWN_LINE < 0., this should be >= 0.") try: self.NB_TIMESTEP_COOLDOWN_SUB = int( self.NB_TIMESTEP_COOLDOWN_SUB) # to raise if numpy array self.NB_TIMESTEP_COOLDOWN_SUB = dt_int( self.NB_TIMESTEP_COOLDOWN_SUB) except Exception as exc_: raise RuntimeError( f"Impossible to convert NB_TIMESTEP_COOLDOWN_SUB to int with error \n:\"{exc_}\"" ) if self.NB_TIMESTEP_COOLDOWN_SUB < 0: raise RuntimeError( "NB_TIMESTEP_COOLDOWN_SUB < 0., this should be >= 0.") try: self.HARD_OVERFLOW_THRESHOLD = float( self.HARD_OVERFLOW_THRESHOLD) # to raise if numpy array self.HARD_OVERFLOW_THRESHOLD = dt_float( self.HARD_OVERFLOW_THRESHOLD) except Exception as exc_: raise RuntimeError( f"Impossible to convert HARD_OVERFLOW_THRESHOLD to float with error \n:\"{exc_}\"" ) if self.HARD_OVERFLOW_THRESHOLD < 1.: raise RuntimeError( "HARD_OVERFLOW_THRESHOLD < 1., this should be >= 1. (use env.set_thermal_limit " "to modify the thermal limit)") try: if not isinstance(self.ENV_DC, (bool, dt_bool)): raise RuntimeError( "NO_OVERFLOW_DISCONNECTION should be a boolean") self.ENV_DC = dt_bool(self.ENV_DC) except Exception as exc_: raise RuntimeError( f"Impossible to convert ENV_DC to bool with error \n:\"{exc_}\"" ) try: self.MAX_SUB_CHANGED = int( self.MAX_SUB_CHANGED) # to raise if numpy array self.MAX_SUB_CHANGED = dt_int(self.MAX_SUB_CHANGED) except Exception as exc_: raise RuntimeError( f"Impossible to convert MAX_SUB_CHANGED to int with error \n:\"{exc_}\"" ) if self.MAX_SUB_CHANGED < 0: raise RuntimeError( "MAX_SUB_CHANGED should be >=0 (or -1 if you want to be able to change every " "substation at once)") try: self.MAX_LINE_STATUS_CHANGED = int( self.MAX_LINE_STATUS_CHANGED) # to raise if numpy array self.MAX_LINE_STATUS_CHANGED = dt_int(self.MAX_LINE_STATUS_CHANGED) except Exception as exc_: raise RuntimeError( f"Impossible to convert MAX_LINE_STATUS_CHANGED to int with error \n:\"{exc_}\"" ) if self.MAX_LINE_STATUS_CHANGED < 0: raise RuntimeError( "MAX_LINE_STATUS_CHANGED should be >=0 " "(or -1 if you want to be able to change every powerline at once)" ) try: if not isinstance(self.IGNORE_MIN_UP_DOWN_TIME, (bool, dt_bool)): raise RuntimeError( "IGNORE_MIN_UP_DOWN_TIME should be a boolean") self.IGNORE_MIN_UP_DOWN_TIME = dt_bool( self.IGNORE_MIN_UP_DOWN_TIME) except Exception as exc_: raise RuntimeError( f"Impossible to convert IGNORE_MIN_UP_DOWN_TIME to bool with error \n:\"{exc_}\"" ) try: if not isinstance(self.ALLOW_DISPATCH_GEN_SWITCH_OFF, (bool, dt_bool)): raise RuntimeError( "ALLOW_DISPATCH_GEN_SWITCH_OFF should be a boolean") self.ALLOW_DISPATCH_GEN_SWITCH_OFF = dt_bool( self.ALLOW_DISPATCH_GEN_SWITCH_OFF) except Exception as exc_: raise RuntimeError( f"Impossible to convert ALLOW_DISPATCH_GEN_SWITCH_OFF to bool with error \n:\"{exc_}\"" ) try: self.INIT_STORAGE_CAPACITY = float( self.INIT_STORAGE_CAPACITY) # to raise if numpy array self.INIT_STORAGE_CAPACITY = dt_float(self.INIT_STORAGE_CAPACITY) except Exception as exc_: raise RuntimeError( f"Impossible to convert INIT_STORAGE_CAPACITY to float with error \n:\"{exc_}\"" ) if self.INIT_STORAGE_CAPACITY < 0.: raise RuntimeError( "INIT_STORAGE_CAPACITY < 0., this should be within range [0., 1.]" ) if self.INIT_STORAGE_CAPACITY > 1.: raise RuntimeError( "INIT_STORAGE_CAPACITY > 1., this should be within range [0., 1.]" ) try: if not isinstance(self.ACTIVATE_STORAGE_LOSS, (bool, dt_bool)): raise RuntimeError("ACTIVATE_STORAGE_LOSS should be a boolean") self.ACTIVATE_STORAGE_LOSS = dt_bool(self.ACTIVATE_STORAGE_LOSS) except Exception as exc_: raise RuntimeError( f"Impossible to convert ACTIVATE_STORAGE_LOSS to bool with error \n:\"{exc_}\"" ) if self.ALARM_WINDOW_SIZE <= 0: raise RuntimeError( "self.ALARM_WINDOW_SIZE should be a positive integer !") if self.ALARM_BEST_TIME <= 0: raise RuntimeError( "self.ALARM_BEST_TIME should be a positive integer !")
def init_from_dict(self, dict_): """ Initialize the object given a dictionary. All keys are optional. If a key is not present in the dictionary, the default parameters is used. Parameters ---------- dict_: ``dict`` The dictionary representing the parameters to load. """ if "NO_OVERFLOW_DISCONNECTION" in dict_: self.NO_OVERFLOW_DISCONNECTION = Parameters._isok_txt( dict_["NO_OVERFLOW_DISCONNECTION"]) if "IGNORE_MIN_UP_DOWN_TIME" in dict_: self.IGNORE_MIN_UP_DOWN_TIME = Parameters._isok_txt( dict_["IGNORE_MIN_UP_DOWN_TIME"]) if "ALLOW_DISPATCH_GEN_SWITCH_OFF" in dict_: self.ALLOW_DISPATCH_GEN_SWITCH_OFF = Parameters._isok_txt( dict_["ALLOW_DISPATCH_GEN_SWITCH_OFF"]) if "NB_TIMESTEP_POWERFLOW_ALLOWED" in dict_: self.NB_TIMESTEP_OVERFLOW_ALLOWED = dt_int( dict_["NB_TIMESTEP_POWERFLOW_ALLOWED"]) if "NB_TIMESTEP_OVERFLOW_ALLOWED" in dict_: self.NB_TIMESTEP_OVERFLOW_ALLOWED = dt_int( dict_["NB_TIMESTEP_OVERFLOW_ALLOWED"]) if "NB_TIMESTEP_RECONNECTION" in dict_: self.NB_TIMESTEP_RECONNECTION = dt_int( dict_["NB_TIMESTEP_RECONNECTION"]) if "HARD_OVERFLOW_THRESHOLD" in dict_: self.HARD_OVERFLOW_THRESHOLD = dt_float( dict_["HARD_OVERFLOW_THRESHOLD"]) if "ENV_DC" in dict_: self.ENV_DC = Parameters._isok_txt(dict_["ENV_DC"]) if "FORECAST_DC" in dict_: new_val = Parameters._isok_txt(dict_["FORECAST_DC"]) if new_val != self.FORECAST_DC: warnings.warn( "The FORECAST_DC attributes is deprecated. Please change the parameters of the " "\"forecast\" backend with \"env.change_forecast_parameters(new_param)\" function " "with \"new_param.ENV_DC=...\" ") self.FORECAST_DC = new_val if "MAX_SUB_CHANGED" in dict_: self.MAX_SUB_CHANGED = dt_int(dict_["MAX_SUB_CHANGED"]) if "MAX_LINE_STATUS_CHANGED" in dict_: self.MAX_LINE_STATUS_CHANGED = dt_int( dict_["MAX_LINE_STATUS_CHANGED"]) if "NB_TIMESTEP_TOPOLOGY_REMODIF" in dict_: # for backward compatibility (in case of old dataset) self.NB_TIMESTEP_COOLDOWN_SUB = dt_int( dict_["NB_TIMESTEP_TOPOLOGY_REMODIF"]) if "NB_TIMESTEP_COOLDOWN_SUB" in dict_: self.NB_TIMESTEP_COOLDOWN_SUB = dt_int( dict_["NB_TIMESTEP_COOLDOWN_SUB"]) if "NB_TIMESTEP_LINE_STATUS_REMODIF" in dict_: # for backward compatibility (in case of old dataset) self.NB_TIMESTEP_COOLDOWN_LINE = dt_int( dict_["NB_TIMESTEP_LINE_STATUS_REMODIF"]) if "NB_TIMESTEP_COOLDOWN_LINE" in dict_: self.NB_TIMESTEP_COOLDOWN_LINE = dt_int( dict_["NB_TIMESTEP_COOLDOWN_LINE"]) # storage parameters if "INIT_STORAGE_CAPACITY" in dict_: self.INIT_STORAGE_CAPACITY = dt_float( dict_["INIT_STORAGE_CAPACITY"]) if "ACTIVATE_STORAGE_LOSS" in dict_: self.ACTIVATE_STORAGE_LOSS = Parameters._isok_txt( dict_["ACTIVATE_STORAGE_LOSS"]) # alarm parameters if "ALARM_BEST_TIME" in dict_: self.ALARM_BEST_TIME = dt_int(dict_["ALARM_BEST_TIME"]) if "ALARM_WINDOW_SIZE" in dict_: self.ALARM_WINDOW_SIZE = dt_int(dict_["ALARM_WINDOW_SIZE"]) authorized_keys = set(self.__dict__.keys()) authorized_keys = authorized_keys | { 'NB_TIMESTEP_POWERFLOW_ALLOWED', 'NB_TIMESTEP_TOPOLOGY_REMODIF', "NB_TIMESTEP_LINE_STATUS_REMODIF" } ignored_keys = dict_.keys() - authorized_keys if len(ignored_keys): warnings.warn( "Parameters: The _parameters \"{}\" used to build the Grid2Op.Parameters " "class are not recognized and will be ignored.".format( ignored_keys))
def load_redispacthing_data(self, path, name='prods_charac.csv'): """ .. warning:: /!\\\\ Internal, do not use unless you know what you are doing /!\\\\ This method will load everything needed for the redispatching and unit commitment problem. Parameters ---------- path: ``str`` Location of the datafram containing the redispatching data. name: ``str`` Name of the dataframe containing the redispatching data """ self._fill_names() # for redispatching fullpath = os.path.join(path, name) if not os.path.exists(fullpath): self.redispatching_unit_commitment_availble = False return try: df = pd.read_csv(fullpath) except Exception as e: return for el in [ "type", "Pmax", "Pmin", "max_ramp_up", "max_ramp_down", "start_cost", "shut_down_cost", "marginal_cost", "min_up_time", "min_down_time" ]: if el not in df.columns: return gen_info = {} for _, row in df.iterrows(): gen_info[row["name"]] = { "type": row["type"], "pmax": row["Pmax"], "pmin": row["Pmin"], "max_ramp_up": row["max_ramp_up"], "max_ramp_down": row["max_ramp_down"], "start_cost": row["start_cost"], "shut_down_cost": row["shut_down_cost"], "marginal_cost": row["marginal_cost"], "min_up_time": row["min_up_time"], "min_down_time": row["min_down_time"] } self.redispatching_unit_commitment_availble = True self.gen_type = np.full(self.n_gen, fill_value="aaaaaaaaaa") self.gen_pmin = np.full(self.n_gen, fill_value=1., dtype=dt_float) self.gen_pmax = np.full(self.n_gen, fill_value=1., dtype=dt_float) self.gen_redispatchable = np.full(self.n_gen, fill_value=False, dtype=dt_bool) self.gen_max_ramp_up = np.full(self.n_gen, fill_value=0., dtype=dt_float) self.gen_max_ramp_down = np.full(self.n_gen, fill_value=0., dtype=dt_float) self.gen_min_uptime = np.full(self.n_gen, fill_value=-1, dtype=dt_int) self.gen_min_downtime = np.full(self.n_gen, fill_value=-1, dtype=dt_int) self.gen_cost_per_MW = np.full(self.n_gen, fill_value=1., dtype=dt_float) # marginal cost self.gen_startup_cost = np.full(self.n_gen, fill_value=1., dtype=dt_float) # start cost self.gen_shutdown_cost = np.full(self.n_gen, fill_value=1., dtype=dt_float) # shutdown cost for i, gen_nm in enumerate(self.name_gen): tmp_gen = gen_info[gen_nm] self.gen_type[i] = str(tmp_gen["type"]) self.gen_pmin[i] = dt_float(tmp_gen["pmin"]) self.gen_pmax[i] = dt_float(tmp_gen["pmax"]) self.gen_redispatchable[i] = dt_bool( tmp_gen["type"] not in ["wind", "solar"]) tmp = dt_float(tmp_gen["max_ramp_up"]) if np.isfinite(tmp): self.gen_max_ramp_up[i] = tmp tmp = dt_float(tmp_gen["max_ramp_down"]) if np.isfinite(tmp): self.gen_max_ramp_down[i] = tmp self.gen_min_uptime[i] = dt_int(tmp_gen["min_up_time"]) self.gen_min_downtime[i] = dt_int(tmp_gen["min_down_time"]) self.gen_cost_per_MW[i] = dt_float(tmp_gen["marginal_cost"]) self.gen_startup_cost[i] = dt_float(tmp_gen["start_cost"]) self.gen_shutdown_cost[i] = dt_float(tmp_gen["shut_down_cost"])
def update(self, env, with_forecast=True): # reset the matrices self._reset_matrices() self.reset() # counter self.current_step = env.nb_time_step # extract the time stamps self.year = dt_int(env.time_stamp.year) self.month = dt_int(env.time_stamp.month) self.day = dt_int(env.time_stamp.day) self.hour_of_day = dt_int(env.time_stamp.hour) self.minute_of_hour = dt_int(env.time_stamp.minute) self.day_of_week = dt_int(env.time_stamp.weekday()) # get the values related to topology self.timestep_overflow[:] = env._timestep_overflow self.line_status[:] = env.backend.get_line_status() self.topo_vect[:] = env.backend.get_topo_vect() # get the values related to continuous values self.gen_p[:], self.gen_q[:], self.gen_v[:] = env.backend.generators_info( ) self.load_p[:], self.load_q[:], self.load_v[:] = env.backend.loads_info( ) self.p_or[:], self.q_or[:], self.v_or[:], self.a_or[:] = env.backend.lines_or_info( ) self.p_ex[:], self.q_ex[:], self.v_ex[:], self.a_ex[:] = env.backend.lines_ex_info( ) # storage units self.storage_charge[:] = env._storage_current_charge self.storage_power_target[:] = env._action_storage self.storage_power[:] = env._storage_power # handles forecasts here if with_forecast: inj_action = {} dict_ = {} dict_["load_p"] = dt_float(1.0 * self.load_p) dict_["load_q"] = dt_float(1.0 * self.load_q) dict_["prod_p"] = dt_float(1.0 * self.gen_p) dict_["prod_v"] = dt_float(1.0 * self.gen_v) inj_action["injection"] = dict_ # inj_action = self.action_helper(inj_action) timestamp = self.get_time_stamp() self._forecasted_inj = [(timestamp, inj_action)] self._forecasted_inj += env.chronics_handler.forecasts() self._forecasted_grid = [None for _ in self._forecasted_inj] self.rho[:] = env.backend.get_relative_flow().astype(dt_float) # cool down and reconnection time after hard overflow, soft overflow or cascading failure self.time_before_cooldown_line[:] = env._times_before_line_status_actionable self.time_before_cooldown_sub[:] = env._times_before_topology_actionable self.time_next_maintenance[:] = env._time_next_maintenance self.duration_next_maintenance[:] = env._duration_next_maintenance # redispatching self.target_dispatch[:] = env._target_dispatch self.actual_dispatch[:] = env._actual_dispatch # handle shunts (if avaialble) if self.shunts_data_available: sh_p, sh_q, sh_v, sh_bus = env.backend.shunt_info() self._shunt_p[:] = sh_p self._shunt_q[:] = sh_q self._shunt_v[:] = sh_v self._shunt_bus[:] = sh_bus self._thermal_limit[:] = env.get_thermal_limit() if self.redispatching_unit_commitment_availble: self.gen_p_before_curtail[:] = env._gen_before_curtailment self.curtailment[:] = (self.gen_p_before_curtail - self.gen_p) / self.gen_pmax self.curtailment[~self.gen_renewable] = 0. self.curtailment_limit[:] = env._limit_curtailment self.curtailment_limit[self.curtailment_limit >= 1.] = 1.0 else: self.curtailment[:] = 0. self.gen_p_before_curtail[:] = self.gen_p self.curtailment_limit[:] = 1.0 if env.backend.can_output_theta: self.support_theta = True # backend supports the computation of theta self.theta_or[:], self.theta_ex[:], self.load_theta[:], self.gen_theta[:], self.storage_theta[:] = \ env.backend.get_theta()
def reset(self): """ .. warning:: /!\\\\ Internal, do not use unless you know what you are doing /!\\\\ Resetting a single observation is unlikely to do what you want to do. Reset the :class:`BaseObservation` to a blank state, where everything is set to either ``None`` or to its default value. """ # vecorized _grid self.timestep_overflow[:] = 0 # 0. (line is disconnected) / 1. (line is connected) self.line_status[:] = True # topological vector self.topo_vect[:] = 0 # generators information self.prod_p[:] = np.NaN self.prod_q[:] = np.NaN self.prod_v[:] = np.NaN # loads information self.load_p[:] = np.NaN self.load_q[:] = np.NaN self.load_v[:] = np.NaN # lines origin information self.p_or[:] = np.NaN self.q_or[:] = np.NaN self.v_or[:] = np.NaN self.a_or[:] = np.NaN # lines extremity information self.p_ex[:] = np.NaN self.q_ex[:] = np.NaN self.v_ex[:] = np.NaN self.a_ex[:] = np.NaN # lines relative flows self.rho[:] = np.NaN # cool down and reconnection time after hard overflow, soft overflow or cascading failure self.time_before_cooldown_line[:] = -1 self.time_before_cooldown_sub[:] = -1 self.time_next_maintenance[:] = -1 self.duration_next_maintenance[:] = -1 # calendar data self.year = dt_int(1970) self.month = dt_int(0) self.day = dt_int(0) self.hour_of_day = dt_int(0) self.minute_of_hour = dt_int(0) self.day_of_week = dt_int(0) # forecasts self._forecasted_inj = [] self._forecasted_grid_act = {} # redispatching self.target_dispatch[:] = np.NaN self.actual_dispatch[:] = np.NaN # to save up computation time self._dictionnarized = None self._connectivity_matrix_ = None self._bus_connectivity_matrix_ = None if self.shunts_data_available: self._shunt_p[:] = np.NaN self._shunt_q[:] = np.NaN self._shunt_v[:] = np.NaN self._shunt_bus[:] = -1