def _test_cluster_hp_inference(initialize_fn, prior_fn, grid_min, grid_max, grid_n, dataview, bind_fn, init_inf_kernel_state_fn, inf_kernel_fn, map_actual_postprocess_fn, prng, burnin=1000, nsamples=1000, skip=10, trials=100, places=2): print '_test_cluster_hp_inference: burnin', burnin, 'nsamples', nsamples, \ 'skip', skip, 'trials', trials, 'places', places N = 1000 D = 5 # create random binary data, doesn't really matter what the values are Y = np.random.random(size=(N, D)) < 0.5 Y = np.array([tuple(y) for y in Y], dtype=[('', np.bool)] * D) view = dataview(Y) defn = model_definition(N, [bb] * D) latent = initialize_fn(defn, view, r=prng) model = bind_fn(latent, view) def score_alpha(alpha): prev_alpha = latent.get_cluster_hp()['alpha'] latent.set_cluster_hp({'alpha': alpha}) score = prior_fn(alpha) + latent.score_assignment() latent.set_cluster_hp({'alpha': prev_alpha}) return score def sample_fn(): for _ in xrange(skip - 1): inf_kernel_fn(model, opaque, prng) inf_kernel_fn(model, opaque, prng) return latent.get_cluster_hp()['alpha'] alpha0 = np.random.uniform(grid_min, grid_max) print 'start alpha:', alpha0 latent.set_cluster_hp({'alpha': alpha0}) opaque = init_inf_kernel_state_fn(latent) for _ in xrange(burnin): inf_kernel_fn(model, opaque, prng) print 'finished burnin of', burnin, 'iterations' print 'grid_min', grid_min, 'grid_max', grid_max assert_1d_cont_dist_approx_emp(sample_fn, score_alpha, grid_min, grid_max, grid_n, trials, nsamples, places)
def _test_scalar_hp_inference(view, prior_fn, w, grid_min, grid_max, grid_n, likelihood_model, scalar_hp_key, burnin=1000, nsamples=1000, every=10, trials=100, places=2): """ view must be 1D """ r = rng() hparams = {0: {scalar_hp_key: (prior_fn, w)}} def score_fn(scalar): d = latent.get_feature_hp(0) prev_scalar = d[scalar_hp_key] d[scalar_hp_key] = scalar latent.set_feature_hp(0, d) score = prior_fn(scalar) + latent.score_data(0, None, r) d[scalar_hp_key] = prev_scalar latent.set_feature_hp(0, d) return score defn = model_definition(len(view), [likelihood_model]) latent = initialize(defn, view, r=r) model = bind(latent, view) def sample_fn(): for _ in xrange(every): slice_hp(model, r, hparams=hparams) return latent.get_feature_hp(0)[scalar_hp_key] for _ in xrange(burnin): slice_hp(model, r, hparams=hparams) print 'finished burnin of', burnin, 'iterations' print 'grid_min', grid_min, 'grid_max', grid_max assert_1d_cont_dist_approx_emp(sample_fn, score_fn, grid_min, grid_max, grid_n, trials, nsamples, places)
def _test_scalar_hp_inference(view, prior_fn, w, grid_min, grid_max, grid_n, likelihood_model, scalar_hp_key, burnin=1000, nsamples=1000, every=10, trials=100, places=2): """ view must be 1D """ r = rng() hparams = {0: {scalar_hp_key: (prior_fn, w)}} def score_fn(scalar): d = latent.get_feature_hp(0) prev_scalar = d[scalar_hp_key] d[scalar_hp_key] = scalar latent.set_feature_hp(0, d) score = prior_fn(scalar) + latent.score_data(0, None, r) d[scalar_hp_key] = prev_scalar latent.set_feature_hp(0, d) return score defn = model_definition(len(view), [likelihood_model]) latent = initialize(defn, view, r=r) model = bind(latent, view) def sample_fn(): for _ in xrange(every): slice_hp(model, r, hparams=hparams) return latent.get_feature_hp(0)[scalar_hp_key] for _ in xrange(burnin): slice_hp(model, r, hparams=hparams) print 'finished burnin of', burnin, 'iterations' print 'grid_min', grid_min, 'grid_max', grid_max assert_1d_cont_dist_approx_emp(sample_fn, score_fn, grid_min, grid_max, grid_n, trials, nsamples, places)
def _test_cluster_hp_inference(initialize_fn, prior_fn, grid_min, grid_max, grid_n, dataview, bind_fn, init_inf_kernel_state_fn, inf_kernel_fn, map_actual_postprocess_fn, prng, burnin=1000, nsamples=1000, skip=10, trials=100, places=2): print '_test_cluster_hp_inference: burnin', burnin, 'nsamples', nsamples, \ 'skip', skip, 'trials', trials, 'places', places N = 1000 D = 5 # create random binary data, doesn't really matter what the values are Y = np.random.random(size=(N, D)) < 0.5 Y = np.array([tuple(y) for y in Y], dtype=[('', np.bool)] * D) view = dataview(Y) defn = model_definition(N, [bb] * D) latent = initialize_fn(defn, view, r=prng) model = bind_fn(latent, view) def score_alpha(alpha): prev_alpha = latent.get_cluster_hp()['alpha'] latent.set_cluster_hp({'alpha': alpha}) score = prior_fn(alpha) + latent.score_assignment() latent.set_cluster_hp({'alpha': prev_alpha}) return score def sample_fn(): for _ in xrange(skip - 1): inf_kernel_fn(model, opaque, prng) inf_kernel_fn(model, opaque, prng) return latent.get_cluster_hp()['alpha'] alpha0 = np.random.uniform(grid_min, grid_max) print 'start alpha:', alpha0 latent.set_cluster_hp({'alpha': alpha0}) opaque = init_inf_kernel_state_fn(latent) for _ in xrange(burnin): inf_kernel_fn(model, opaque, prng) print 'finished burnin of', burnin, 'iterations' print 'grid_min', grid_min, 'grid_max', grid_max assert_1d_cont_dist_approx_emp(sample_fn, score_alpha, grid_min, grid_max, grid_n, trials, nsamples, places)