def importance(beliefs, params, num_samples):
    """Does importance sampling to obtain prob of world states given responses
    and second-order responses.
    Args: responses: np.array, counting from 0
          meta: 2d np.arr, rows are options, cols are ppl
          model_params: dict
          num_samples: natural num giving num samples to take.
    Returns: np.array giving prob of different worlds.
    Note: some of this refers binary worlds and signals.
    """
    noise = np.ones(beliefs.shape[1]) * params['own_noise_default']
    indiv = None
    samples = [fwd.generate_actual_nosigs(params, indiv)         #generate signals, but ignore them.
               for i in range(num_samples)]
    weights = np.array([beliefs_loglike_marg(beliefs, s['world'], s['sm'],
                                             s['wp'], params['own_noise_type'],
                                             noise) 
                        for s in samples])
    normalised_weights = jmutils.normalise_log_likelihoods(weights)
    sampled_worlds = np.array([s['world'] for s in samples])
    world_prob0 = np.sum(normalised_weights[sampled_worlds == 0])
    #world_probs = sum([w_s[0] for w_s in zip(weights, sampled_worlds) if w_s[1] == 1]) / sum(weights)   
    print(world_prob0)
    #return world_prob0, samples, weights
    return world_prob0
def importance_quick(beliefs, params, num_samples):
    noise = np.ones(beliefs.shape[1]) * params['own_noise_default']
    indiv = None
    wp0 = npr.uniform(size=num_samples)
    worlds = npr.binomial(1, wp0)
    sm0 = npr.uniform(size=num_samples)
    epsilon = sm0 / 1000
    upper = sm0 - epsilon
    sm1 = npr.uniform(high=upper)
    weights = bel_ll_vec(beliefs, worlds, sm0, sm1, wp0,
                         params['own_noise_type'], noise)
    normalised_weights = jmutils.normalise_log_likelihoods(weights)
    world_prob0 = np.sum(normalised_weights[worlds.astype(bool)])
    1/0
    return world_prob0
def importance_sampling_old(consistent, responses, meta, model_params, num_samples):
    """Does importance sampling to obtain prob of world states given responses
    and second-order responses.
    Args: responses: np.array, counting from 0
          meta: 2d np.arr, rows are options, cols are ppl
          model_params: dict
          num_samples: natural num giving num samples to take.
    Returns: np.array giving prob of different worlds.
    Note: some of this refers binary worlds and signals.
    """
    num_respondents = len(responses)
    samples = [fwd.generate_actual(num_respondents, model_params['prior_wp_sm'])
               for i in range(num_samples)]
    weights = np.array([reported_loglike(responses, meta, s, model_params) 
                       for s in samples])
    normalised_weights = jmutils.normalise_log_likelihoods(weights)
    sampled_worlds = np.array([s['world'] for s in samples])
    world_prob0 = np.sum(normalised_weights[sampled_worlds == 0])
    #world_probs = sum([w_s[0] for w_s in zip(weights, sampled_worlds) if w_s[1] == 1]) / sum(weights)   
    print(world_prob0)
    #return world_prob0, samples, weights
    return world_prob0
Exemplo n.º 4
0
def geom_probs_ll(ll, wp_grid, sm_grid):
    ll_flat = np.ravel(ll)
    probs_flat = jmutils.normalise_log_likelihoods(ll_flat)
    probs = np.reshape(probs_flat, ((2, wp_grid-1, sm_grid, sm_grid)))
    return probs