def to_var(*arrays: ArrayLike, device: Device = 'cpu', requires_grad: bool = False): """ Convert numpy arrays to torch Tensors. Parameters ---------- arrays: array-like objects, that will be converted to torch Tensors. device the device on which to move ``x``. See `get_device` for details. requires_grad whether the tensors require grad. Notes ----- If ``arrays`` contains a single argument the result will not be contained in a tuple: >>> x = to_var(x) >>> x, y = to_var(x, y) If this is not the desired behaviour, use `sequence_to_var`, which always returns a tuple of tensors. """ return squeeze_first( tuple( sequence_to_var(*arrays, device=device, requires_grad=requires_grad)))
def get_random_patch(*arrays: np.ndarray, patch_size: AxesLike, axes: AxesLike = None, distribution: Callable = uniform): """ Get a random patch of size ``path_size`` along the ``axes`` for each of the ``arrays``. The patch position is equal for all the ``arrays``. Parameters ---------- arrays patch_size axes distribution: Callable(shape) function that samples a random number in the range ``[0, n)`` for each axis. Defaults to a uniform distribution. """ if not arrays: raise ValueError('No arrays given.') axes = expand_axes(axes, patch_size) check_shape_along_axis(*arrays, axis=axes) shape = extract(arrays[0].shape, axes) start = distribution(shape_after_convolution(shape, patch_size)) box = np.array([start, start + patch_size]) return squeeze_first(tuple(crop_to_box(arr, box, axes) for arr in arrays))
def to_np(*tensors: torch.Tensor): """ Convert torch Tensors to numpy arrays. Notes ----- If ``tensors`` contains a single argument the result will not be contained in a tuple: >>> x = to_np(x) >>> x, y = to_np(x, y) If this is not the desired behaviour, use `sequence_to_np`, which always returns a tuple of arrays. """ return squeeze_first(tuple(sequence_to_np(*tensors)))
def forward(self, xs): bs, n_slices = len(xs), xs.shape[self.axis] # join self.axis with batch dim xs = moveaxis(xs, self.axis, 1) xs = xs.reshape(-1, *xs.shape[2:]) xs = self.network(xs) # handling multiple outputs if isinstance(xs, torch.Tensor): xs = xs, # move self.axis back results = [] for x in xs: x = x.reshape(bs, n_slices, *x.shape[1:]) x = moveaxis(x, 1, self.axis) results.append(x) return squeeze_first(results)
def load_by_random_id(*loaders: Callable, ids: Sequence, weights: Sequence[float] = None, random_state: Union[np.random.RandomState, int] = None): """ Infinitely yield objects loaded by ``loaders`` according to the identifier from ``ids``. The identifiers are randomly sampled from ``ids`` according to the ``weights``. Parameters ---------- loaders: Callable function, which loads object by its id. ids: Sequence the sequence of identifiers to sample from. weights: Sequence[float], None, optional The weights associated with each id. If ``None``, the weights are assumed to be equal. Should be the same size as ``ids``. random_state if not None - used to set the random seed for reproducibility reasons. """ for id_ in sample(ids, weights, random_state): yield squeeze_first(tuple(pam(loaders, id_)))