def compute_XD_results(n_components=10, max_iter=500): clf = XDGMM(n_components, max_iter=max_iter, tol=1e-03, verbose=False, random_state=None) clf.fit(Z, Zerr) return clf
def _xdFit(X, XErr, nGauss, n_iter=10): gmm = GMM(nGauss, n_iter=n_iter, covariance_type='full').fit(X) amp = gmm.weights_ mean = gmm.means_ covar = gmm.covars_ xd.extreme_deconvolution(X, XErr, amp, mean, covar) clf = XDGMM(nGauss) clf.alpha = amp clf.mu = mean clf.V = covar return clf
def getMarginalClf(self, cols=None): if cols is None: raise ValueError( "You have to specify the columns you want to keep so that I can marginalizse over the rest." ) rowsV, colsV = np.meshgrid(cols, cols, indexing='ij') xdMarginal = XDClf(ngStar=self.ngStar, ngGal=self.ngGal, priorStar=self.priorStar) xdMarginal.clfStar = XDGMM(self.ngStar) xdMarginal.clfStar.alpha = self.clfStar.alpha xdMarginal.clfStar.mu = self.clfStar.mu[:, cols] xdMarginal.clfStar.V = self.clfStar.V[:, rowsV, colsV] xdMarginal.clfGal = XDGMM(self.ngGal) xdMarginal.clfGal.alpha = self.clfGal.alpha xdMarginal.clfGal.mu = self.clfGal.mu[:, cols] xdMarginal.clfGal.V = self.clfGal.V[:, rowsV, colsV] if self.priorStar == 'auto': xdMarginal._priorStar = self._priorStar return xdMarginal
def compute_XD_results(x, y, dx, dy, n_components=6, n_iter=50): X = np.vstack([x,y]).T Xerr = np.zeros(X.shape + X.shape[-1:]) diag = np.arange(X.shape[-1]) Xerr[:, diag, diag] = np.vstack([dx ** 2, dy ** 2]).T clf = None while clf is None: try: clf = XDGMM(n_components, n_iter=n_iter,verbose=True) clf.fit(X, Xerr) except: print('Error: Singular Matrix. Retrying...') clf = None return clf
def test_XDGMM_1D_gaussian(N=100, sigma=0.1): np.random.seed(0) mu = 0 V = 1 X = np.random.normal(mu, V, size=(N, 1)) X += np.random.normal(0, sigma, size=(N, 1)) Xerr = sigma**2 * np.ones((N, 1, 1)) xdgmm = XDGMM(1).fit(X, Xerr) # because of sample variance, results will be similar # but not identical. We'll use a fudge factor of 0.1 assert_allclose(mu, xdgmm.mu[0], atol=0.1) assert_allclose(V, xdgmm.V[0], atol=0.1)
def initialise(self): nmeas, ndim = self.data.shape lower_idxs = np.tril_indices(ndim, k=-1) if self.data_covariances is not None: xdgmm = XDGMM(1, 1000, verbose=True) xdgmm.fit(self.data, self.data_covariances) guess_mu = xdgmm.mu[0] guess_Sigma = xdgmm.V[0] else: gmm = GaussianMixture(1, max_iter=1000, covariance_type='full').fit(self.data) guess_mu = gmm.means_[0] guess_Sigma = gmm.covariances_[0] guess_chol = np.linalg.cholesky(guess_Sigma) guess_packed_chol = guess_chol[lower_idxs] return guess_mu, guess_Sigma, guess_packed_chol, guess_chol
def check_single_gaussian(N=100, D=3, sigma=0.1): np.random.seed(0) mu = np.random.random(D) V = np.random.random((D, D)) V = np.dot(V, V.T) X = np.random.multivariate_normal(mu, V, size=N) Xerr = np.zeros((N, D, D)) Xerr[:, range(D), range(D)] = sigma**2 X += np.random.normal(0, sigma, X.shape) xdgmm = XDGMM(1) xdgmm.fit(X, Xerr) # because of sample variance, results will be similar # but not identical. We'll use a fudge factor of 0.1 assert_allclose(mu, xdgmm.mu[0], atol=0.1) assert_allclose(V, xdgmm.V[0], atol=0.1)
def compute_XD_results(n_components=10, n_iter=500): clf = XDGMM(n_components, n_iter=n_iter) clf.fit(X, Xerr) return clf
def compute_XD(n_clusters=2, rseed=0, n_iter=30, verbose=True): np.random.seed(rseed) clf = XDGMM(n_clusters, n_iter=n_iter, tol=1E-5, verbose=verbose) clf.fit(newZ, Zcov) return clf
def compute_XD(n_clusters=12, rseed=0, max_iter=100, verbose=True): np.random.seed(rseed) clf = XDGMM(n_clusters, max_iter=max_iter, tol=1E-5, verbose=verbose) clf.fit(X, Xcov) return clf