def forward(x, h0, h1, skip_hps, o_dim, ri_dim, mode): mode = int_to_mode(mode) dims = get_dimensions5(o_dim, ri_dim) o_dim, ri_dim = dims[0], dims[1] ll, highr, highi = fwd_j1(x, h0, h1, skip_hps, o_dim, mode) if not skip_hps: highs = torch.stack((highr, highi), dim=ri_dim) else: highs = ll.new_zeros([]) return ll, highs
def forward(lows, highs, g0, g1, o_dim, ri_dim, mode): mode = int_to_mode(mode) dims = get_dimensions5(o_dim, ri_dim) o_dim, ri_dim, h_dim, w_dim = dims if highs is not None and highs.shape != torch.Size([]): highr, highi = torch.unbind(highs, dim=ri_dim) else: highr = lows.new_zeros([]) highi = lows.new_zeros([]) y = inv_j1(lows, highr, highi, g0, g1, o_dim, h_dim, w_dim, mode) return y
def forward(ctx, x, h0, h1, skip_hps, o_dim, ri_dim, mode): mode = int_to_mode(mode) ctx.mode = mode ctx.save_for_backward(h0, h1) ctx.dims = get_dimensions5(o_dim, ri_dim) o_dim, ri_dim = ctx.dims[0], ctx.dims[1] ll, highr, highi = fwd_j1(x, h0, h1, skip_hps, o_dim, mode) if not skip_hps: highs = torch.stack((highr, highi), dim=ri_dim) else: highs = ll.new_zeros([]) return ll, highs