def loss(self, args, X): if type(args) is not list: args = args_from_vector(args, self.params) # Reshape theta -> args args += [X] # Compute gradient from args loss = self._loss(*args) return loss
def f_df(self, args, X): if type(args) is not list: args = args_from_vector(args, self.params) args += [X] # Compute gradient from args #grad = vector_from_args(self.grads(*args)) grad = self.grads(*args) loss = self._loss(*args) return loss,grad
def get_weights(self, theta): args = args_from_vector(theta, self.params) for arg in args: if arg.ndim == 2: return arg
def get_args(self, theta): args = args_from_vector(theta, self.params) return args
def set_params(self, theta): params = args_from_vector(theta, [self.W, self.hidbias, self.visbias]) self.W = params[0] self.hidbias = params[1] self.visbias = params[2]