def test_no_change(self, mode, padding_mode): # scale=1 n_samples, n_channels, lookback, n_assets = 2, 3, 4, 5 X = torch.rand(n_samples, n_channels, lookback, n_assets) scale = torch.ones(n_samples, dtype=X.dtype) layer = Zoom(mode=mode, padding_mode=padding_mode) x_zoomed = layer(X, scale) assert torch.allclose(x_zoomed, X)
def test_basic(self, Xy_dummy, mode, padding_mode): X, _, _, _ = Xy_dummy dtype, device = X.dtype, X.device n_samples, _, _, n_assets = X.shape layer = Zoom(mode=mode, padding_mode=padding_mode) scale = torch.rand(n_samples, dtype=dtype, device=device) x_zoomed = layer(X, scale) assert torch.is_tensor(x_zoomed) assert x_zoomed.shape == X.shape assert x_zoomed.dtype == X.dtype assert x_zoomed.device == X.device
def test_equality_with_warp(self): n_samples, n_channels, lookback, n_assets = 2, 3, 4, 5 X = torch.rand(n_samples, n_channels, lookback, n_assets) scale = torch.ones(n_samples, dtype=X.dtype) * 0.5 tform = torch.stack(n_samples * [torch.linspace(0, end=1, steps=lookback)], dim=0) layer_zoom = Zoom() layer_warp = Warp() x_zoomed = layer_zoom(X, scale) x_warped = layer_warp(X, tform) assert torch.allclose(x_zoomed, x_warped)
lookback, n_assets = 40, 50 scale_grid = [1, 0.5, 2] padding_grid = ['zeros', 'reflection'] dtype = torch.float x = torch.as_tensor(np.stack(n_assets * [-sin_single(lookback, freq=1 / lookback)], axis=1), dtype=dtype) x = x[None, None, ...] # add sample and channel dimension fig, axs = plt.subplots(len(padding_grid), len(scale_grid), sharex=True, sharey=True) for r, padding_mode in enumerate(padding_grid): for c, scale in enumerate(scale_grid): layer = Zoom(padding_mode=padding_mode) x_out = layer(x, torch.ones(1) * scale) axs[r, c].imshow(x_out.numpy()[0, 0]) axs[r, c].set_title('scale={}\npad={}'.format(scale, padding_mode)) plt.setp(axs[-1, :], xlabel='Assets') plt.setp(axs[:, 0], ylabel='Time') fig.subplots_adjust(hspace=1)
def test_n_parameters(self): n_parameters = sum(p.numel() for p in Zoom().parameters() if p.requires_grad) assert n_parameters == 0