Example #1
0
def umbrella_logp(value, rain):
    """
    Umbrella node. Initial value is False.

    P(umbrella=True | rain=True)  = 0.9 
    P(umbrella=True | rain=False) = 0.2
    """
    p_umb_given_rain = 0.9
    p_umb_given_no_rain = 0.2
    if rain:
        logp = pymc.bernoulli_like(value, p_umb_given_rain)
    else:
        logp = pymc.bernoulli_like(value, p_umb_given_no_rain)
        return logp
def prior(value, n_pix = n_pix, max_value = K, hyper_params = Lambda[np.triu_indices(n_pix)]):
    """2nd order prior for object maps"""
    #num_pairs = hyper_params.size
    if (np.min(value) < 1) or (np.max(value) > max_value):
        return -np.Inf
    else:
        on_offs = outer_map(value, n_pix, max_value)
        on_offs = on_offs[np.triu_indices(n_pix)].astype('int')
    
    return pm.bernoulli_like(on_offs, hyper_params.ravel())
    def batchmodelComp(self, stim1, stim2, response, returnLog=False):
        import pymc
        infoffset = -10000  # replace infinity with very very low value
        data = {}
        modelRes = {}
        bestFitParms = {}
        parms = {}
        output = {}
        data['stim1'] = stim1
        data['stim2'] = stim2
        ML = {}
        for mod in self.prior:
            for par in self.models[mod]['parameters']:
                parms[par] = self.models[mod]['parameters'][par]['searchGrid']
            parCombs = cartesian(parms.values())
            keys = parms.keys()
            tmp = np.empty(parCombs.shape[0])
            for j in range(0, parCombs.shape[0]):
                for i in range(0, len(keys)):
                    data[keys[i]] = parCombs[:, i][j]
                likeli = Lik(data, psyfun=mod)
                likeli = likeli
                tmp[j] = pymc.bernoulli_like(response, likeli)
            modelRes[mod] = tmp
            tmp[~np.isfinite(tmp)] = infoffset
            maxInd = np.argmax(tmp)
            bestFitParms[mod] = {
            }  # dict to store most likely parameters for this model
            for i in range(0, len(keys)):
                bestFitParms[mod][keys[i]] = parCombs[:, i][
                    maxInd]  # most likely parameters out of grid

            logPos = modelRes[mod] + np.log(
                self.prior[mod]['jointPriorNreshape'])
            ML[mod] = logsumexp(logPos)
        for mod in self.prior:
            output[mod] = {}
            output[mod]['LogL'] = ML[mod]
            output[mod]['ModelProbability'] = np.exp(
                ML[mod] - max(ML.values())) / float(
                    np.sum(np.exp(ML.values() - max(ML.values()))))
            output[mod]['BestFitParms'] = bestFitParms[mod]
        return output
Example #4
0
def y(logit_p=logit_p, value=df[11]):
    return pm.bernoulli_like(df[11], pm.invlogit(logit_p))
 def y(logit_p=logit_p, value=data.LL):
     return pymc.bernoulli_like(data.LL, pymc.invlogit(logit_p))