Example #1
0
 def _priors(self):
     """ Measure likelihood of seeing topic proportions"""
     loss = None
     for categorical_feature_name in self.categorical_feature_names:
         name = categorical_feature_name + "_mixture"
         dl = dirichlet_likelihood(self[name].weights)
         loss = dl if loss is None else dl + loss
     return loss
Example #2
0
 def _priors(self, contexts):
     """ Measure likelihood of seeing topic proportions"""
     loss = None
     for categorical_feature_name in self.categorical_feature_names:
         name = categorical_feature_name + "_mixture"
         dl = dirichlet_likelihood(self[name].weights)
         loss = dl if loss is None else dl + loss
     return loss
Example #3
0
 def _priors(self):
     """ Measure likelihood of seeing topic proportions"""
     loss = None
     for cat_feat_name, vals in self.categorical_features.items():
         embedding, transform, loss_func, penalty = vals
         name = cat_feat_name + "_mixture"
         dl = dirichlet_likelihood(self[name].weights)
         if penalty:
             factors = self[name].factors.W
             cc = F.cross_covariance(factors, factors)
             dl += cc
         loss = dl if loss is None else dl + loss
     return loss
Example #4
0
 def _priors(self):
     """ Measure likelihood of seeing topic proportions"""
     loss = None
     for cat_feat_name, vals in self.categorical_features.items():
         embedding, transform, loss_func, penalty = vals
         name = cat_feat_name + "_mixture"
         dl = dirichlet_likelihood(self[name].weights)
         if penalty:
             factors = self[name].factors.W
             cc = F.cross_covariance(factors, factors)
             dl += cc
         loss = dl if loss is None else dl + loss
     return loss
Example #5
0
 def prior(self):
     # defaults to inialization with uniform prior (1/n_topics)
     return DL.dirichlet_likelihood(self.mixture.Doc_Embedding,
                                    alpha=self.alpha)