def set_binary_maps_as_target(dataset: Dataset, invert: bool = False, binary_images: List[np.ndarray] = None, smoothen_labels: bool = False) -> Dataset: if binary_images is None: binary_images = parse_binary_maps(copy.deepcopy(dataset.observations), invert=invert) binary_images = torch.stack([torch.as_tensor(b).unsqueeze(0) for b in binary_images], dim=0) # smoothen binary maps to punish line predictions close to line less severely if smoothen_labels: binary_images = gaussian_blur2d(binary_images, kernel_size=(11, 11), sigma=(4, 4)) dataset.actions = [b.squeeze() for b in binary_images] return dataset
def get_data_batch(self) -> Generator[Dataset, None, None]: if len(self._dataset) == 0 or self._config.loop_over_hdf5_files: self.load_dataset() index = 0 while index < len(self._dataset): batch = Dataset() end_index = min(index + self._config.batch_size, len(self._dataset)) \ if self._config.batch_size != -1 else len(self._dataset) batch.observations = self._dataset.observations[index:end_index] batch.actions = self._dataset.actions[index:end_index] batch.done = self._dataset.done[index:end_index] batch.rewards = self._dataset.rewards[index:end_index] index = index + self._config.batch_size if self._config.batch_size != -1 else len( self._dataset) yield batch
def set_binary_maps_as_target(dataset: Dataset, invert: bool = False, binary_images: List[np.ndarray] = None) -> Dataset: if binary_images is None: binary_images = parse_binary_maps(copy.deepcopy(dataset.observations), invert=invert) dataset.actions = [torch.as_tensor(b) for b in binary_images] return dataset