def test_distances_not_crashes(self, walkers, observs): observs = judo.to_backend(observs) with numpy.errstate(**NUMPY_IGNORE_WARNINGS_PARAMS): walkers.env_states.update(observs=observs) walkers.calculate_distances() assert dtype.is_float(walkers.states.distances[0]) assert len(walkers.states.distances.shape) == 1 assert walkers.states.distances.shape[0] == walkers.n
def _merge_data(data_dicts: List[Dict[str, Tensor]]): kwargs = {} for k in data_dicts[0].keys(): try: grouped = judo.concatenate( [judo.to_backend(ddict[k]) for ddict in data_dicts]) except Exception: val = str([ddict[k].shape for ddict in data_dicts]) raise ValueError(val) kwargs[k] = grouped return kwargs
def distribute(self, name, **kwargs): chunk_data = self._split_inputs_in_chunks(**kwargs) results = [ env.execute(name=name, blocking=self._blocking, **chunk) for env, chunk in zip(self._envs, chunk_data) ] split_results = [ result if self._blocking else result() for result in results ] if isinstance(split_results[0], dict): merged = self._merge_data(split_results) else: # Assumes batch of tensors split_results = [judo.to_backend(res) for res in split_results] merged = judo.concatenate(split_results) return merged
def distribute(self, name, **kwargs): """Execute the target function in all the different workers.""" chunk_data = self._split_inputs_in_chunks(**kwargs) from fragile.distributed.ray import ray results = [ env.execute.remote(name=name, **chunk) for env, chunk in zip(self.envs, chunk_data) ] split_results = ray.get(results) if isinstance(split_results[0], dict): merged = self._merge_data(split_results) else: # Assumes batch of tensors split_results = [judo.to_backend(res) for res in split_results] merged = judo.concatenate(split_results) return merged
def append(self, **kwargs): for name, val in kwargs.items(): if name not in self.names: raise KeyError("%s not in self.names: %s" % (name, self.names)) # Scalar vectors are transformed to columns val = judo.to_backend(val) if len(val.shape) == 0: val = judo.unsqueeze(val) if len(val.shape) == 1: val = val.reshape(-1, 1) try: processed = (val if getattr(self, name) is None else judo.concatenate([getattr(self, name), val])) if len(processed) > self.max_size: processed = processed[:self.max_size] except Exception as e: print(name, val.shape, getattr(self, name).shape) raise e setattr(self, name, processed) self._log.info("Memory now contains %s samples" % len(self))