Exemple #1
0
    eps_p_f_list = []
    N_pos_list = []

    # Obtain the spline representation of the log of the Monte Carlo-integrated
    # likelihood function at each datapoint. The nodes are at .01,.02,...,.98,.99 .
    junk, splreps = age_corr_likelihoods(all_pts, 10000,
                                         np.arange(.01, 1., .01), norun_name)
    for i in xrange(len(splreps)):
        splreps[i] = list(splreps[i])

    # Don't worry, these are just reasonable initial values...
    val_now = pm.logit(
        np.array(all_pts.PF + 1, dtype=float) / (all_pts.EXAMINED + 2))
    if with_stukel:
        val_now = pm.stukel_logit(
            np.array(all_pts.PF + 1, dtype=float) / (all_pts.EXAMINED + 2),
            a1.value, a2.value)

    if data_mesh.shape[0] % chunk == 0:
        additional_index = 0
    else:
        additional_index = 1

    for i in xrange(0, data_mesh.shape[0] / chunk + additional_index):

        this_slice = slice(chunk * i, min((i + 1) * chunk, data_mesh.shape[0]))

        # epsilon plus f, given f.
        @pm.stochastic(trace=False, dtype=np.float)
        def eps_p_f_now(value=val_now[this_slice],
                        f=f_eval,
Exemple #2
0
            init_OK = True
        except pm.ZeroProbability, msg:
            print "Trying again: %s" % msg
            init_OK = False
            gc.collect()

    # ===========================
    # = Create likelihood layer =
    # ===========================

    eps_p_f_list = []
    N_pos_list = []

    # Don't worry, these are just reasonable initial values...
    if with_stukel:
        val_now = pm.stukel_logit((pos + 1.0) / (pos + neg + 2.0), a1.value, a2.value)
    else:
        val_now = pm.logit((pos + 1.0) / (pos + neg + 2.0))

    if data_mesh.shape[0] % chunk == 0:
        additional_index = 0
    else:
        additional_index = 1

    for i in xrange(0, data_mesh.shape[0] / chunk + additional_index):

        this_slice = slice(chunk * i, min((i + 1) * chunk, data_mesh.shape[0]))

        # epsilon plus f, given f.
        @pm.stochastic(trace=False, dtype=np.float)
        def eps_p_f_now(value=val_now[this_slice], f=sp_sub.f_eval, V=V, sl=this_slice):
Exemple #3
0
    # = Create likelihood layer =
    # ===========================
        
    eps_p_f_list = []
    N_pos_list = []
    
    # Obtain the spline representation of the log of the Monte Carlo-integrated 
    # likelihood function at each datapoint. The nodes are at .01,.02,...,.98,.99 .
    junk, splreps = age_corr_likelihoods(all_pts, 10000, np.arange(.01,1.,.01), norun_name)
    for i in xrange(len(splreps)):
        splreps[i] = list(splreps[i])

    # Don't worry, these are just reasonable initial values...
    val_now = pm.logit(np.array(all_pts.PF+1,dtype=float)/(all_pts.EXAMINED+2))
    if with_stukel:
        val_now = pm.stukel_logit(np.array(all_pts.PF+1,dtype=float)/(all_pts.EXAMINED+2), a1.value, a2.value)
    
    if data_mesh.shape[0] % chunk == 0:
        additional_index = 0
    else:
        additional_index = 1
    
    for i in xrange(0,data_mesh.shape[0] / chunk + additional_index):
        
        this_slice = slice(chunk*i, min((i+1)*chunk, data_mesh.shape[0]))

        # epsilon plus f, given f.
        @pm.stochastic(trace=False, dtype=np.float)
        def eps_p_f_now(value=val_now[this_slice], f=f_eval, V=V, this_slice = this_slice):
            return pm.normal_like(value, f[this_slice], 1./V)
        eps_p_f_now.__name__ = "eps_p_f%i"%i
 def pripred_check(m=m,amp=amp,V=V,a=a):
     p_above = scipy.stats.distributions.norm.cdf(m-pm.stukel_logit(threshold_val,*a), 0, np.sqrt(amp**2+V))
     if p_above <= max_p_above:
         return 0.
     else:
         return -np.inf
Exemple #5
0
def make_model(lon,lat,input_data,covariate_keys,pos,neg,cpus=1):
    """
    This function is required by the generic MBG code.
    """
    ra = csv2rec(input_data)
    
    if np.any(pos+neg==0):
        where_zero = np.where(pos+neg==0)[0]
        raise ValueError, 'Pos+neg = 0 in the rows (starting from zero):\n %s'%where_zero
    
    
    # How many nuggeted field points to handle with each step method
    grainsize = 10
        
    # Non-unique data locations
    data_mesh = combine_spatial_inputs(lon, lat)
    
    s_hat = (pos+1.)/(pos+neg+2.)
    
    # Uniquify the data locations.
    locs = [(lon[0], lat[0])]
    fi = [0]
    ui = [0]
    for i in xrange(1,len(lon)):

        # If repeat location, add observation
        loc = (lon[i], lat[i])
        if loc in locs:
            fi.append(locs.index(loc))

        # Otherwise, new obs
        else:
            locs.append(loc)
            fi.append(max(fi)+1)
            ui.append(i)
    fi = np.array(fi)
    ti = [np.where(fi == i)[0] for i in xrange(max(fi)+1)]
    ui = np.asarray(ui)

    lon = np.array(locs)[:,0]
    lat = np.array(locs)[:,1]

    # Unique data locations
    logp_mesh = combine_spatial_inputs(lon,lat)
    
    # Create the mean & its evaluation at the data locations.
    @pm.deterministic
    def M():
        return pm.gp.Mean(mean_fn)
        
    @pm.deterministic
    def M_eval(M=M):
        return M(logp_mesh)

    init_OK = False
    while not init_OK:
        try:        
            # The partial sill.
            amp = pm.Exponential('amp', .1, value=1.)

            a1 = pm.Uninformative('a1',1,observed=True)
            a2 = pm.Uninformative('a2',1,observed=True)
            
            p0 = pm.Uniform('p0',0,1,value=.01)
            
            # The range parameters. Units are RADIANS. 
            # 1 radian = the radius of the earth, about 6378.1 km
            scale = pm.Exponential('scale', .1, value=.08)
            scale_in_km = scale*6378.1

            # This parameter controls the degree of differentiability of the field.
            diff_degree = pm.Uniform('diff_degree', .01, 3)

            # The nugget variance.
            V = pm.Exponential('V', .1, value=1.)
            tau = 1./V
            
            # Create the covariance & its evaluation at the data locations.
            @pm.deterministic(trace=True)
            def C(amp=amp, scale=scale, diff_degree=diff_degree):
                """A covariance function created from the current parameter values."""
                eval_fun = CovarianceWithCovariates(pm.gp.cov_funs.matern.geo_rad, input_data, covariate_keys, ui, fac=1.e4, ra=ra)
                return pm.gp.FullRankCovariance(eval_fun, amp=amp, scale=scale, diff_degree=diff_degree)
            
            sp_sub = pm.gp.GPSubmodel('sp_sub',M,C,logp_mesh)
        
            # Loop over data clusters
            eps_p_f_d = []
            s_d = []
            data_d = []

            for i in xrange(len(pos)/grainsize+1):
                sl = slice(i*grainsize,(i+1)*grainsize,None)
                # Nuggeted field in this cluster
                this_f = sp_sub.f_eval[fi[sl]]
                if len(this_f.value)>0:
                    eps_p_f_d.append(pm.Normal('eps_p_f_%i'%i, this_f, tau, value=pm.stukel_logit(s_hat[sl],a1.value,a2.value),trace=False))

                    # The allele frequency
                    s_d.append(pm.Lambda('s_%i'%i,lambda lt=eps_p_f_d[-1]: stukel_invlogit(lt,a1,a2),trace=False))

                    # The observed allele frequencies
                    @pm.stochastic(name='data_%i'%i, observed=True)
                    def d_now(value=pos[sl], p0=p0, n=pos[sl]+neg[sl], p=s_d[-1]):
                        return zib(value, p0, n, p)
            
            # The field plus the nugget
            @pm.deterministic
            def eps_p_f(eps_p_fd = eps_p_f_d):
                """Concatenated version of eps_p_f, for postprocessing & Gibbs sampling purposes"""
                return np.concatenate(eps_p_fd)
            
            init_OK = True
        except pm.ZeroProbability, msg:
            print 'Trying again: %s'%msg
            init_OK = False
            gc.collect()