Exemplo n.º 1
0
 def gen_proposal(self, ancestor = None):
     assert()
     #this needs to be updated
     assert(ancestor is not None)
     if np.linalg.norm(ancestor.other["gr"]) < 10**-10:
         #we are close to a local maximum
         f = ancestor.other["lrate"]
         prop_dist = self.jump_dist
     else:
         #we are at a distance to a local maximum
         #step in direction of gradient.
         (f, theta_1, fval_1, back_off_tmp) = find_step_size(ancestor.sample, ancestor.other["lrate"], ancestor.lpost, ancestor.other["conj"], func = self.lpost)
         self.back_off_count += back_off_tmp
         step_mean = f * 0.5 * ancestor.other["conj"]
         cov = ideal_covar(step_mean, main_var_scale = 1, other_var = 0.5) # , fix_main_var=1
         prop_dist = mvnorm(step_mean, cov)
       
     step = prop_dist.rvs()
     samp = ancestor.sample + prop_dist.rvs()
     (lp, gr) = self.lpost_and_grad(samp)
     momentum = max(0, np.float(gr.T.dot(gr - ancestor.other["gr"]) / ancestor.other["gr"].T.dot(ancestor.other["gr"])))
     conj_dir_1 = gr + momentum * ancestor.other["conj"]
     lprop =  prop_dist.logpdf(step)
     rval = PmcSample(ancestor = ancestor,
                        sample = samp,
                        lpost = lp,
                        lprop = lprop, 
                        prop_obj = self,
                        lweight = lp - prop_dist.logpdf(step),
                        other = {"lrate":f, "gr":gr, "conj":conj_dir_1})
                        
     return rval
Exemplo n.º 2
0
 def gen_proposal(self, ancestor = None):
     assert()
     #this needs to be updated
     assert(ancestor is not None)
     if np.linalg.norm(ancestor.other["gr"]) < 10**-10:
         #we are close to a local maximum
         f = ancestor.other["lrate"]
         prop_dist = self.jump_dist
     else:
         #we are at a distance to a local maximum
         #step in direction of gradient.
         (f, theta_1, fval_1, back_off_tmp) = find_step_size(ancestor.sample, ancestor.other["lrate"], ancestor.lpost, ancestor.other["conj"], func = self.lpost)
         self.back_off_count += back_off_tmp
         step_mean = f * 0.5 * ancestor.other["conj"]
         cov = ideal_covar(step_mean, main_var_scale = 1, other_var = 0.5) # , fix_main_var=1
         prop_dist = mvnorm(step_mean, cov)
       
     step = prop_dist.rvs()
     samp = ancestor.sample + prop_dist.rvs()
     (lp, gr) = self.lpost_and_grad(samp)
     momentum = max(0, np.float(gr.T.dot(gr - ancestor.other["gr"]) / ancestor.other["gr"].T.dot(ancestor.other["gr"])))
     conj_dir_1 = gr + momentum * ancestor.other["conj"]
     lprop =  prop_dist.logpdf(step)
     rval = PmcSample(ancestor = ancestor,
                        sample = samp,
                        lpost = lp,
                        lprop = lprop, 
                        prop_obj = self,
                        lweight = lp - prop_dist.logpdf(step),
                        other = {"lrate":f, "gr":gr, "conj":conj_dir_1})
                        
     return rval
Exemplo n.º 3
0
    def gen_proposal(self, ancestor=None):
        self.prepare_ancestor(ancestor)
        f = ancestor.other["lrate"]

        if True:
            (f, theta_1, lpost_1, grad_1,
             foo) = find_step_size(ancestor.sample,
                                   f,
                                   ancestor.lpost,
                                   ancestor.other["gr"],
                                   func_and_grad=self.lpost_and_grad)
            sc_gr = f * 0.5 * ancestor.other["gr"]
            cov = ideal_covar(sc_gr,
                              fix_main_var=self.main_var,
                              other_var=self.other_var)  # , fix_main_var=1
            step_dist = mvnorm(sc_gr, cov)
            #prop_dist = mvnorm(ancestor.sample + sc_gr, self.cov)
            #print(ancestor.lpost, lpost_1)
        else:
            step_dist = mvnorm(np.zeros(ancestor.sample.size),
                               np.eye(ancestor.sample.size) * self.main_var)

        (new_samp,
         step) = gen_sample_prototype(ancestor,
                                      self,
                                      step_dist=step_dist,
                                      lpost_and_grad_func=self.lpost_and_grad)
        new_samp.other["lrate"] = f
        return new_samp
Exemplo n.º 4
0
 def gen_proposal(self, ancestor = None):
     assert(ancestor is not None)        
     assert(ancestor.sample is not None)
     
     rval = []
     if ancestor.lpost is None:
         if "gr" not in ancestor.other:
             (ancestor.lpost, ancestor.other["gr"]) = self.lpost_and_grad(ancestor.sample)
         else:
             ancestor.lpost = self.lpost_and_grad(ancestor.sample)[0]
     elif "gr" not in ancestor.other:
         ancestor.other["old"] = False
         ancestor.other["gr"] = self.lpost_and_grad(ancestor.sample)[1]
         assert(ancestor.other["gr"].size == ancestor.sample.size)
     if "lrate" in ancestor.other:
         f = ancestor.other["lrate"]
     else:
         f = self.lrate
     
     if np.linalg.norm(ancestor.other["gr"]) < 10**-10:
         #we are close to a local maximum
         print("jumping")
         prop_dist = self.jump_dist
     else:
         #we are at a distance to a local maximum
         #step in direction of gradient.
         assert(ancestor.other["gr"].size == ancestor.sample.size)
         (f, theta_1, lpost_1, grad_1, back_off_tmp)  = find_step_size(ancestor.sample, f, ancestor.lpost, ancestor.other["gr"], func_and_grad = self.lpost_and_grad)
         self.back_off_count += len(back_off_tmp)
         if False and ancestor.lprop is not None:
             for (f, samp, lp, gr) in back_off_tmp:
                 rval.append(PmcSample(ancestor = ancestor,
                        sample = samp,
                        lpost = lp,
                        lprop = ancestor.lprop,
                        lweight = lp - ancestor.lprop,
                        prop_obj = ancestor.prop_obj,
                        other = {"lrate":f, "gr":gr, "old":True}))
         mean_step = f * self.prop_mean_on_line * ancestor.other["gr"]
         prop_mean = ancestor.sample + mean_step
         cov = ideal_covar(mean_step, main_var_scale = self.main_var_scale, other_var = self.other_var, fix_main_var = self.fix_main_var) # , fix_main_var=1
         prop_dist = mvnorm(prop_mean, cov)
         #print(ancestor.lpost, lpost_1)
     samp = prop_dist.rvs()
     (lp, gr) = self.lpost_and_grad(samp)
     print(ancestor.lpost, self.lpost_and_grad(prop_mean)[0])
     lprop =  prop_dist.logpdf(samp)
     assert(ancestor.other["gr"].size == ancestor.sample.size)
     assert(gr.size == samp.size)
     rval.append(PmcSample(ancestor = ancestor,
                        sample = samp,
                        lpost = lp,
                        lprop = lprop,
                        lweight = lp - lprop,
                        prop_obj = self,
                        other = {"lrate":f, "gr":gr, "old":True}))
     return rval
Exemplo n.º 5
0
 def gen_proposal(self, ancestor = None):
     assert(ancestor is not None)
     rval = []
     old = True
     if "gr" not in ancestor.other:
         old = False
         ancestor.other["old"] = False
         ancestor.other["gr"] = self.lpost_and_grad(ancestor.sample)[1]
         assert(ancestor.other["gr"].size == ancestor.sample.size)
     if "lrate" in ancestor.other:
         f = ancestor.other["lrate"]
     else:
         f = self.lrate
     if np.linalg.norm(ancestor.other["gr"]) < 10**-10:
         #we are close to a local maximum
         print("jumping")
         prop_dist = self.jump_dist
     else:
         #we are at a distance to a local maximum
         #step in direction of gradient.
         assert(ancestor.other["gr"].size == ancestor.sample.size)
         (f, theta_1, lpost_1, grad_1, back_off_tmp)  = find_step_size(ancestor.sample, f, ancestor.lpost, ancestor.other["gr"], func_and_grad = self.lpost_and_grad)
         self.back_off_count += len(back_off_tmp)
         if False and ancestor.lprop is not None:
             for (f, samp, lp, gr) in back_off_tmp:
                 rval.append(PmcSample(ancestor = ancestor,
                        sample = samp,
                        lpost = lp,
                        lprop = ancestor.lprop,
                        lweight = lp - ancestor.lprop,
                        prop_obj = ancestor.prop_obj,
                        other = {"lrate":f, "gr":gr, "old":True}))
         step_mean = f * self.prop_mean_on_line * ancestor.other["gr"]
         cov = ideal_covar(step_mean, main_var_scale = self.main_var_scale, other_var = self.other_var, fix_main_var = self.fix_main_var) # , fix_main_var=1
         prop_dist = mvnorm(step_mean, cov)
     step = prop_dist.rvs()
     samp = ancestor.sample + step
     (lp, gr) = self.lpost_and_grad(samp)
     lprop =  prop_dist.logpdf(step)
     assert(ancestor.other["gr"].size == ancestor.sample.size)
     assert(gr.size == samp.size)
     rval.append(PmcSample(ancestor = ancestor,
                        sample = samp,
                        lpost = lp,
                        lprop = lprop,
                        lweight = lp - lprop,
                        prop_obj = self,
                        other = {"lrate":f, "gr":gr, "old":True}))
     return rval  
Exemplo n.º 6
0
 def gen_proposal(self, ancestor = None):
     self.prepare_ancestor(ancestor)
     f = ancestor.other["lrate"]
     
     if True:
         (f, theta_1, lpost_1,
          grad_1, foo)  = find_step_size(ancestor.sample, f, ancestor.lpost,
                                         ancestor.other["gr"],
                                         func_and_grad = self.lpost_and_grad)
         sc_gr = f * 0.5* ancestor.other["gr"]
         cov = ideal_covar(sc_gr, fix_main_var = self.main_var, other_var = self.other_var) # , fix_main_var=1
         step_dist = mvnorm(sc_gr, cov)
         #prop_dist = mvnorm(ancestor.sample + sc_gr, self.cov)
         #print(ancestor.lpost, lpost_1)
     else:
         step_dist = mvnorm(np.zeros(ancestor.sample.size), np.eye(ancestor.sample.size)*self.main_var)
     
     (new_samp, step) = gen_sample_prototype(ancestor, self,
                                             step_dist = step_dist,
                                             lpost_and_grad_func = self.lpost_and_grad)
     new_samp.other["lrate"] = f
     return new_samp