def __init__(self, name: str, initialisation: torch.tensor, cache: bool = False): # might change cache to arbitrary length self.name = name self.attr = initialisation self.cache = cache if cache: self.prev_attr = initialisation.copy()
def act(self, obs: torch.tensor, r, done, info): return obs.copy(), {}