def test_gradients_against_finite_difference(self, temp=1.0, j=0): def num_grad(f, x, eps=1e-4): shape = x.shape x_flat = x.ravel() grad_flat = np.zeros_like(x_flat) for j in range(len(x_flat)): e = np.zeros_like(x_flat) e[j] = 1 call1 = f((x_flat + eps * e).reshape(shape)) call2 = f((x_flat - eps * e).reshape(shape)) grad_flat[j] = (call1 - call2) / (2 * eps) return grad_flat.reshape(shape) toy_sub = self._toy_sub.numpy() toy_gap_open = self._toy_gap_open.numpy() toy_gap_extend = self._toy_gap_extend.numpy() _, g_sim_mat, g_gap_open, g_gap_extend = ( npy_ops._soft_sw_affine(sim_mat=toy_sub[j], gap_open=toy_gap_open[j], gap_extend=toy_gap_extend[j], temperature=temp, ret_grads=True)) # Gradient w.r.t. sim_mat. def f_sim_mat(x): return npy_ops._soft_sw_affine(sim_mat=x, gap_open=toy_gap_open[j], gap_extend=toy_gap_extend[j], temperature=temp, ret_grads=False) g_sim_mat_num = num_grad(f_sim_mat, toy_sub[j].astype(np.float64)) self.assertAllClose(g_sim_mat, g_sim_mat_num) # Gradient w.r.t. gap_open. def f_gap_open(x): return npy_ops._soft_sw_affine(sim_mat=toy_sub[j], gap_open=x, gap_extend=toy_gap_extend[j], temperature=temp, ret_grads=False) g_gap_open_num = num_grad(f_gap_open, toy_gap_open[j].astype(np.float64)) self.assertAllClose(g_gap_open, g_gap_open_num) # Gradient w.r.t. gap_extend. def f_gap_ext(x): return npy_ops._soft_sw_affine(sim_mat=toy_sub[j], gap_open=toy_gap_open[j], gap_extend=x, temperature=temp, ret_grads=False) g_gap_extend_num = num_grad(f_gap_ext, toy_gap_extend[j].astype(np.float64)) self.assertAllClose(g_gap_extend, g_gap_extend_num)
def f_gap_ext(x): return npy_ops._soft_sw_affine(sim_mat=toy_sub[j], gap_open=toy_gap_open[j], gap_extend=x, temperature=temp, ret_grads=False)