Exemple #1
0
 def __init__(self, mln, mrf, pmbMethod="old", diffMethod="blocking", **params):
     '''
     pmbMethod: 'excl' or 'old'
         concerns the calculation of the probability of a ground atom assignment given the ground atom's Markov blanket
         If set to 'old', consider only the two assignments of the ground atom x (i.e. add the weights of any ground
         formulas within which x appears for both cases and then use the appriopriate fraction).
         If set to 'excl', consider mutual exclusiveness and exhaustiveness by looking at all the assignments of the
         block that x is in (and all the formulas that are affected by any of the atoms in the block). We obtain an exp. sum of
         weights for each block assignment and consider the fraction of those block assignments where x has a given value.
     
     diffMethod: "blocking" or "simple"
         This applies to parameter learning with pseudo-likelihood, where, for each ground atom x, the difference in the number
         of true groundings of a formula is computed for the case where x's truth value is flipped and where x's truth value
         remains the same (as indicated by the training db).
         If set to 'blocking', then we not only consider the effects of flipping x itself but also flips of any
         ground atoms with which x appears together in a block, because flipping them may (or may not) affect the truth
         value of x and thus the truth of ground formulas within which x appears.        
     '''
     AbstractLearner.__init__(self, mln, mrf, **params)
     self.pmbMethod = pmbMethod
     self.diffMethod = diffMethod
     if len(filter(lambda b: isinstance(b, SoftMutexVariable), self.mrf.variables)) > 0:
         raise Exception('%s cannot handle soft-functional constraints' % self.__class__.__name__)
Exemple #2
0
 def __init__(self, mln, mrf, pmbMethod="old", diffMethod="blocking", **params):
     '''
     pmbMethod: 'excl' or 'old'
         concerns the calculation of the probability of a ground atom assignment given the ground atom's Markov blanket
         If set to 'old', consider only the two assignments of the ground atom x (i.e. add the weights of any ground
         formulas within which x appears for both cases and then use the appriopriate fraction).
         If set to 'excl', consider mutual exclusiveness and exhaustiveness by looking at all the assignments of the
         block that x is in (and all the formulas that are affected by any of the atoms in the block). We obtain an exp. sum of
         weights for each block assignment and consider the fraction of those block assignments where x has a given value.
     
     diffMethod: "blocking" or "simple"
         This applies to parameter learning with pseudo-likelihood, where, for each ground atom x, the difference in the number
         of true groundings of a formula is computed for the case where x's truth value is flipped and where x's truth value
         remains the same (as indicated by the training db).
         If set to 'blocking', then we not only consider the effects of flipping x itself but also flips of any
         ground atoms with which x appears together in a block, because flipping them may (or may not) affect the truth
         value of x and thus the truth of ground formulas within which x appears.        
     '''
     AbstractLearner.__init__(self, mln, mrf, **params)
     self.pmbMethod = pmbMethod
     self.diffMethod = diffMethod
     if len(filter(lambda b: isinstance(b, SoftMutexVariable), self.mrf.variables)) > 0:
         raise Exception('%s cannot handle soft-functional constraints' % self.__class__.__name__)
Exemple #3
0
 def __init__(self, mrf, **params):
     AbstractLearner.__init__(self, mrf, **params)
     self.partitions = []
     self.repart = 0
Exemple #4
0
 def __init__(self, mrf, **params):
     AbstractLearner.__init__(self, mrf, **params)
     self.partitions = []
     self.repart = 0