def check_grad(self, x, y=None): """Check gradients of the model using data x and label y if available """ if y is not None: # encode labels y = self._transform_labels([y])[0] print("Checking gradient... ", end='') diff = scipy_check_grad(self._get_loss_check_grad, self._get_grad_check_grad, [np.random.random()], x, y) print("diff = %.8f" % diff) return diff
def check_grad(self, x, y=None): """Check gradients of the model using data x and label y if available """ self._init() if y is not None: # encode labels y = self._encode_labels(y) # initialize weights self._init_params(x) print("Checking gradient... ", end='') diff = scipy_check_grad(self._get_loss_check_grad, self._get_grad_check_grad, self._roll_params(), x, y) print("diff = %.8f" % diff) return diff
def check_grad(model, param_name, f, g): from scipy.optimize import check_grad as scipy_check_grad p = ModelParameterAcessor(model, param_name) def eval_f(param_as_list): old_value = p.get() # Save old p.set_flattened(param_as_list) # Set new f_val = f() p.set(old_value) # Restore old value return f_val def eval_g(param_as_list): old_value = p.get() # Save old p.set_flattened(param_as_list) # Set new g_val = ravel(g()) p.set(old_value) # Restore old value return g_val x0 = ravel(p.get()) return scipy_check_grad(eval_f, eval_g, x0)