def construct_proposal(self, y): """ proposal is a mixture of normals, centred at y and with covariance gamma^2 I + nu^2 MHaa'HM', where a are the eigenvectors of centred kernel matrix Kc=HKH """ assert (len(shape(y)) == 1) m = MixtureDistribution(self.distribution.dimension, self.num_eigen) m.mixing_proportion = Discrete( (self.eigvalues + 1) / (sum(self.eigvalues) + self.num_eigen)) # print "current mixing proportion: ", m.mixing_proportion.omega M = 2 * self.kernel.gradient(y, self.Z) H = Kernel.centring_matrix(len(self.Z)) for ii in range(self.num_eigen): Sigma = self.gamma ** 2 * eye(len(y)) + \ self.nu2 * (M.T).dot(H.dot(outer(self.eigvectors[:, ii], self.eigvectors[:, ii]).dot(H.dot(M)))) m.components[ii] = Gaussian(y, Sigma) return m
def construct_proposal(self, y): """ proposal is a mixture of normals, centred at y and with covariance gamma^2 I + nu^2 MHaa'HM', where a are the eigenvectors of centred kernel matrix Kc=HKH """ assert len(shape(y)) == 1 m = MixtureDistribution(self.distribution.dimension, self.num_eigen) m.mixing_proportion = Discrete((self.eigvalues + 1) / (sum(self.eigvalues) + self.num_eigen)) # print "current mixing proportion: ", m.mixing_proportion.omega M = 2 * self.kernel.gradient(y, self.Z) H = Kernel.centring_matrix(len(self.Z)) for ii in range(self.num_eigen): Sigma = self.gamma ** 2 * eye(len(y)) + self.nu2 * (M.T).dot( H.dot(outer(self.eigvectors[:, ii], self.eigvectors[:, ii]).dot(H.dot(M))) ) m.components[ii] = Gaussian(y, Sigma) return m
def mean_and_cov_adapt(self,learn_scale): current_1d=reshape(self.current_sample_object.samples, (self.distribution.dimension,)) difference=current_1d - self.mean_est self.cov_est += learn_scale * (outer(difference, difference) - self.cov_est) self.mean_est += learn_scale * (current_1d - self.mean_est)