Exemple #1
0
    def copy(self, with_optimizer: bool = False):
        model_copy = clone(self)
        if not with_optimizer:
            model_copy.optimizer = None
            model_copy.scheduler = None

        return model_copy
Exemple #2
0
def dottable2dict(config):
    if isinstance(config, float):
        return str(config)
    if not isinstance(config, dict):
        return clone(config)
    rt = {}
    for k, v in config.items():
        rt[k] = dottable2dict(v)
    return rt
Exemple #3
0
 def __init__(self, model_dict: dict, optimizer_opt: Union[dict, tuple],
              loss_func_dict: dict, hyper_params: DQNHyperParams):
     if model_dict.get("target", None) is None:
         model_dict["target"] = clone(model_dict["eval"])
     super().__init__(model_dict, optimizer_opt, loss_func_dict,
                      hyper_params)
     self._train_cnt = 0
     self._device = torch.device(
         "cuda" if torch.cuda.is_available() else "cpu")
Exemple #4
0
 def __init__(self, model_dict: dict, optimizer_opt: Union[dict, tuple],
              loss_func_dict: dict, hyper_params: DQNHyperParams):
     """
     DQN algorithm. The model_dict must contain the key "eval". Optionally a model corresponding to
     the key "target" can be provided. If the key "target" is absent or model_dict["target"] is None,
     the target model will be a deep copy of the provided eval model.
     """
     if model_dict.get("target", None) is None:
         model_dict["target"] = clone(model_dict["eval"])
     super().__init__(model_dict, optimizer_opt, loss_func_dict,
                      hyper_params)
     self._train_cnt = 0
     self._device = torch.device(
         'cuda' if torch.cuda.is_available() else 'cpu')
 def dumps(self):
     """Return a deep copy of store contents."""
     return clone(dict(self._store))
Exemple #6
0
 def copy(self):
     return clone(self)