Esempio n. 1
0
 def testFortran(self):
     """Test if `normalize_logspace` works with a F-contiguous array"""
     np.random.seed(0)
     mat = random((NCOL, NROW)).T
     self.assertTrue(mat.flags["F_CONTIGUOUS"])
     mat_out = normalize_logspace(mat)
     row_sum = mat_out.sum(1)
     approx_equal = arrays_almost_equal(row_sum, np.ones(NROW), accuracy=ACC)
     self.assertTrue(approx_equal)
Esempio n. 2
0
    def _update_latent_resp(data, smm_dof, posterior_nws_scale,
                            posterior_nws_dof, log_smm_mixweight,
                            log_det_precision, scatter):
        """ Update `latent_resp` (Eq 22 in Arch2007) """
        num_features = data.shape[1]

        latent_resp = (gammaln((num_features + smm_dof) / 2) - 
                       gammaln(smm_dof / 2) - 
                       (num_features / 2) * np.log(smm_dof * pi) + 
                       log_smm_mixweight + log_det_precision / 2 - 
                       ((num_features + smm_dof) / 2) * 
                       np.log(1 + 
                              (posterior_nws_dof / smm_dof).T * scatter.T + 
                              (num_features / 
                               (smm_dof * posterior_nws_scale)).T))

        latent_resp = normalize_logspace(latent_resp)
        return latent_resp
Esempio n. 3
0
    def _update_latent_resp(data, smm_dof, posterior_nws_scale,
                            posterior_nws_dof, log_smm_mixweight,
                            log_det_precision, scatter):
        """ Update `latent_resp` (Eq 22 in Arch2007) """
        num_features = data.shape[1]
        num_comp = len(log_smm_mixweight)

        def get_exp_latent(k):
            return (gammaln((num_features + smm_dof[k]) / 2) -
                    gammaln(smm_dof[k] / 2) -
                    (num_features / 2) * log(smm_dof[k] * pi) +
                    log_smm_mixweight[k] + log_det_precision[k] / 2 -
                    ((num_features + smm_dof[k]) / 2) *
                    log(1 + (posterior_nws_dof[k] / smm_dof[k]) *
                        scatter[k, :] +
                        num_features / (smm_dof[k] * posterior_nws_scale[k])))

        latent_resp = np.array([get_exp_latent(k)
                                for k in range(num_comp)]).T

        latent_resp = normalize_logspace(latent_resp)
        return latent_resp
Esempio n. 4
0
def init_d2_weighting(data, num_comp):

    num_obs = data.shape[0]

    cov_inv = np.linalg.inv(np.cov(data, rowvar=0))

    select_prob = np.ones(num_obs) / num_obs
    shortest_dist = np.inf * np.ones(num_obs)
    centroid = np.ones(num_comp)

    for k in range(num_comp):
        # Select a random data point as centroid
        centroid[k] = np.nonzero(multinomial(1, select_prob))[0]

        # Recompute distances
        for i, d in enumerate(shortest_dist):
            d_new = mahalanobis(data[centroid[k], :], data[i, :], cov_inv)
            if d_new < d: shortest_dist[i] = d_new

        select_prob = normalize_logspace(
            pow(shortest_dist.reshape(1, len(shortest_dist)), 2, 1))
        select_prob = select_prob.flatten()

    return centroid