def compute_prior(self):
     self.prior = GaussianLOTHypothesis.compute_prior(self)
     self.prior += sum(
         map(lambda x: normlogpdf(x, 0.0, CONSTANT_SD),
             self.CONSTANT_VALUES))
     self.posterior_score = self.prior + self.likelihood
     return self.prior
 def toMaximize(fit_params):
     self.CONSTANT_VALUES = fit_params.tolist()  # set these
     # And return the original likelihood, which by get_function_responses above uses this
     constant_prior = sum(
         map(lambda x: normlogpdf(x, 0.0, CONSTANT_SD),
             self.CONSTANT_VALUES))
     return -(GaussianLOTHypothesis.compute_likelihood(self, data) +
              constant_prior)
Beispiel #3
0
 def to_maximize(fit_params):
     self.parameters = fit_params.tolist()  # set these
     # And return the original likelihood, which by get_function_responses above uses this
     constant_prior = sum(
         map(lambda x: normlogpdf(x, 0.0, self.constant_sd),
             self.parameters))
     return -(LOTHypothesis.compute_likelihood(self, data) +
              constant_prior)
Beispiel #4
0
    def compute_single_likelihood(self, datum):
        """ Compute the likelihood with a Gaussian. Wraps to avoid nan"""

        ret = normlogpdf(self(*datum.input), datum.output, datum.ll_sd)

        if isnan(ret):
            return -Infinity
        else:
            return ret
Beispiel #5
0
    def compute_single_likelihood(self, datum):
        """ Compute the likelihood with a Gaussian. Wraps to avoid nan"""

        ret = normlogpdf(self(*datum.input), datum.output, datum.ll_sd)

        if isnan(ret):
            return -Infinity
        else:
            return ret
Beispiel #6
0
    def compute_single_likelihood(self, datum):
        v = self(*datum.input)

        ret = sum([normlogpdf(vi, di, datum.ll_sd) for vi, di in zip(v, datum.output)])

        if isnan(ret):
            return -Infinity
        else:
            return ret
Beispiel #7
0
    def generate(self, x='*USE_START*', d=0):
        """
              RealValueGrammar.generate may create gaussians or uniforms when given "*gaussian*" and "*uniform*" as the nonterminal type.
              Otherwise, this is identical to LOTlib.Grammar
        """
            if x == '*USE_START*':
                x = self.start

            if x=='*gaussian*':
                # TODO: HIGHLY EXPERIMENTAL!!
                # Wow this is really terrible for mixing...
                v = np.random.normal()
                gp = normlogpdf(v, 0.0, 1.0)
                return FunctionNode(returntype=x, name=str(v), args=None, generation_probability=gp, ruleid=0, resample_p=CONSTANT_RESAMPLE_P ) ##TODO: FIX THE ruleid

            elif x=='*uniform*':
                v = np.random.rand()
                gp = 0.0
                return FunctionNode(returntype=x, name=str(v), args=None, generation_probability=gp, ruleid=0, resample_p=CONSTANT_RESAMPLE_P ) ##TODO: FIX THE ruleid
            else:
                # Else call normal generation
                Grammar.generate(self,x,d=d)
Beispiel #8
0
 def to_maximize(fit_params):
     self.parameters = fit_params.tolist() # set these
     # And return the original likelihood, which by get_function_responses above uses this
     constant_prior = sum(map(lambda x: normlogpdf(x,0.0,self.constant_sd), self.parameters))
     return -(LOTHypothesis.compute_likelihood(self, data) + constant_prior)
Beispiel #9
0
 def compute_prior(self):
     # Add together the LOT prior and the constant prior, here just a gaussian
     return LOTHypothesis.compute_prior(self) +\
            sum(map(lambda x: normlogpdf(x,0.0,self.constant_sd), self.parameters))
Beispiel #10
0
 def to_maximize(fit_params):
     self.CONSTANT_VALUES = fit_params.tolist() # set these
     # And return the original likelihood, which by get_function_responses above uses this
     constant_prior = sum(map(lambda x: normlogpdf(x,0.0,CONSTANT_SD), self.CONSTANT_VALUES))
     return -(GaussianLOTHypothesis.compute_likelihood(self, data) + constant_prior)
Beispiel #11
0
 def compute_prior(self):
     self.prior = GaussianLOTHypothesis.compute_prior(self)
     self.prior += sum(map(lambda x: normlogpdf(x,0.0,CONSTANT_SD), self.CONSTANT_VALUES))
     self.posterior_score = self.prior + self.likelihood
     return self.prior
Beispiel #12
0
 def compute_prior(self):
     # Add together the LOT prior and the constant prior, here just a gaussian
     return LOTHypothesis.compute_prior(self) +\
            sum(map(lambda x: normlogpdf(x,0.0,self.constant_sd), self.parameters))
Beispiel #13
0
 def compute_prior(self):
     # Add together the structural prior and the constant prior
     return LOTHypothesis.compute_prior(self) +\
            sum(map(lambda x: normlogpdf(x,0.0,self.constant_sd), self.CONSTANT_VALUES))