def _test_scalar_hp_inference(view,
                              prior_fn,
                              w,
                              grid_min,
                              grid_max,
                              grid_n,
                              likelihood_model,
                              scalar_hp_key,
                              burnin=1000,
                              nsamples=1000,
                              every=10,
                              trials=100,
                              places=2):
    """
    view must be 1D
    """
    r = rng()

    hparams = {0: {scalar_hp_key: (prior_fn, w)}}

    def score_fn(scalar):
        d = latent.get_feature_hp(0)
        prev_scalar = d[scalar_hp_key]
        d[scalar_hp_key] = scalar
        latent.set_feature_hp(0, d)
        score = prior_fn(scalar) + latent.score_data(0, None, r)
        d[scalar_hp_key] = prev_scalar
        latent.set_feature_hp(0, d)
        return score

    defn = model_definition(len(view), [likelihood_model])
    latent = initialize(defn, view, r=r)
    model = bind(latent, view)

    def sample_fn():
        for _ in xrange(every):
            slice_hp(model, r, hparams=hparams)
        return latent.get_feature_hp(0)[scalar_hp_key]

    for _ in xrange(burnin):
        slice_hp(model, r, hparams=hparams)
    print 'finished burnin of', burnin, 'iterations'

    print 'grid_min', grid_min, 'grid_max', grid_max
    assert_1d_cont_dist_approx_emp(sample_fn,
                                   score_fn,
                                   grid_min,
                                   grid_max,
                                   grid_n,
                                   trials,
                                   nsamples,
                                   places)
def _test_scalar_hp_inference(view,
                              prior_fn,
                              w,
                              grid_min,
                              grid_max,
                              grid_n,
                              likelihood_model,
                              scalar_hp_key,
                              burnin=1000,
                              nsamples=1000,
                              every=10,
                              trials=100,
                              places=2):
    """
    view must be 1D
    """
    r = rng()

    hparams = {0: {scalar_hp_key: (prior_fn, w)}}

    def score_fn(scalar):
        d = latent.get_feature_hp(0)
        prev_scalar = d[scalar_hp_key]
        d[scalar_hp_key] = scalar
        latent.set_feature_hp(0, d)
        score = prior_fn(scalar) + latent.score_data(0, None, r)
        d[scalar_hp_key] = prev_scalar
        latent.set_feature_hp(0, d)
        return score

    defn = model_definition(len(view), [likelihood_model])
    latent = initialize(defn, view, r=r)
    model = bind(latent, view)

    def sample_fn():
        for _ in xrange(every):
            slice_hp(model, r, hparams=hparams)
        return latent.get_feature_hp(0)[scalar_hp_key]

    for _ in xrange(burnin):
        slice_hp(model, r, hparams=hparams)
    print 'finished burnin of', burnin, 'iterations'

    print 'grid_min', grid_min, 'grid_max', grid_max
    assert_1d_cont_dist_approx_emp(sample_fn, score_fn, grid_min, grid_max,
                                   grid_n, trials, nsamples, places)
def test_kernel_slice_hp_noninform():
    def init_inf_kernel_state_fn(s):
        hparams = {
            0: {
                ('alpha', 'beta'): (log_noninformative_beta_prior, 1.0),
            }
        }
        return hparams

    def prior_fn(raw):
        return log_noninformative_beta_prior(raw['alpha'], raw['beta'])

    kernel_fn = lambda s, arg, rng: slice_hp(s, rng, hparams=arg)
    _test_kernel_slice_hp(initialize, init_inf_kernel_state_fn, prior_fn,
                          numpy_dataview, bind, kernel_fn,
                          'grid_slice_hp_noninform_samples.pdf', rng())
def test_kernel_slice_cluster_hp():
    prior_fn = log_exponential(1.5)

    def init_inf_kernel_state_fn(s):
        cparam = {'alpha': (prior_fn, 1.)}
        return cparam
    kernel_fn = lambda s, arg, rng: slice_hp(s, rng, cparam=arg)
    grid_min, grid_max, grid_n = 0.0, 50., 100
    _test_cluster_hp_inference(initialize,
                               prior_fn,
                               grid_min,
                               grid_max,
                               grid_n,
                               numpy_dataview,
                               bind,
                               init_inf_kernel_state_fn,
                               kernel_fn,
                               map_actual_postprocess_fn=lambda x: x,
                               prng=rng())
def test_kernel_slice_hp():
    indiv_prior_fn = log_exponential(1.2)

    def init_inf_kernel_state_fn(s):
        hparams = {
            0: {
                'alpha': (indiv_prior_fn, 1.5),
                'beta': (indiv_prior_fn, 1.5),
            }
        }
        return hparams

    def prior_fn(raw):
        return indiv_prior_fn(raw['alpha']) + indiv_prior_fn(raw['beta'])

    kernel_fn = lambda s, arg, rng: slice_hp(s, rng, hparams=arg)
    _test_kernel_slice_hp(initialize, init_inf_kernel_state_fn, prior_fn,
                          numpy_dataview, bind, kernel_fn,
                          'grid_slice_hp_samples.pdf', rng())
def test_kernel_slice_hp_noninform():
    def init_inf_kernel_state_fn(s):
        hparams = {
            0: {
                ('alpha', 'beta'): (log_noninformative_beta_prior, 1.0),
            }
        }
        return hparams

    def prior_fn(raw):
        return log_noninformative_beta_prior(raw['alpha'], raw['beta'])
    kernel_fn = lambda s, arg, rng: slice_hp(s, rng, hparams=arg)
    _test_kernel_slice_hp(initialize,
                          init_inf_kernel_state_fn,
                          prior_fn,
                          numpy_dataview,
                          bind,
                          kernel_fn,
                          'grid_slice_hp_noninform_samples.pdf',
                          rng())
def test_kernel_slice_cluster_hp():
    prior_fn = log_exponential(1.5)

    def init_inf_kernel_state_fn(s):
        cparam = {'alpha': (prior_fn, 1.)}
        return cparam

    kernel_fn = lambda s, arg, rng: slice_hp(s, rng, cparam=arg)
    grid_min, grid_max, grid_n = 0.0, 50., 100
    _test_cluster_hp_inference(initialize,
                               prior_fn,
                               grid_min,
                               grid_max,
                               grid_n,
                               numpy_dataview,
                               bind,
                               init_inf_kernel_state_fn,
                               kernel_fn,
                               map_actual_postprocess_fn=lambda x: x,
                               prng=rng())
def test_kernel_slice_hp():
    indiv_prior_fn = log_exponential(1.2)

    def init_inf_kernel_state_fn(s):
        hparams = {
            0: {
                'alpha': (indiv_prior_fn, 1.5),
                'beta': (indiv_prior_fn, 1.5),
            }
        }
        return hparams

    def prior_fn(raw):
        return indiv_prior_fn(raw['alpha']) + indiv_prior_fn(raw['beta'])
    kernel_fn = lambda s, arg, rng: slice_hp(s, rng, hparams=arg)
    _test_kernel_slice_hp(initialize,
                          init_inf_kernel_state_fn,
                          prior_fn,
                          numpy_dataview,
                          bind,
                          kernel_fn,
                          'grid_slice_hp_samples.pdf',
                          rng())
 def sample_fn():
     for _ in xrange(every):
         slice_hp(model, r, hparams=hparams)
     return latent.get_feature_hp(0)[scalar_hp_key]
Example #10
0
 def sample_fn():
     for _ in xrange(every):
         slice_hp(model, r, hparams=hparams)
     return latent.get_feature_hp(0)[scalar_hp_key]