Ejemplo n.º 1
0
def test_flow_forward_apply(flow_spec):
    z0 = pm.tt_rng().normal(size=(10, 20))
    flow = flow_spec(dim=20, z0=z0)
    with change_flags(compute_test_value='off'):
        dist = flow.forward
        shape_dist = dist.shape.eval()
    assert tuple(shape_dist) == (10, 20)
Ejemplo n.º 2
0
def test_flow_det_shape(flow_spec):
    with change_flags(compute_test_value='off'):
        z0 = pm.tt_rng().normal(size=(10, 20))
        flow = flow_spec(dim=20, z0=z0)
        det = flow.logdet
        det_dist = det.shape.eval()
    assert tuple(det_dist) == (10,)
def test_flow_det_shape(flow_spec):
    with change_flags(compute_test_value='off'):
        z0 = pm.tt_rng().normal(size=(10, 20))
        flow = flow_spec(dim=20, z0=z0)
        det = flow.logdet
        det_dist = det.shape.eval()
    assert tuple(det_dist) == (10, )
def test_flow_forward_apply(flow_spec):
    z0 = pm.tt_rng().normal(size=(10, 20))
    flow = flow_spec(dim=20, z0=z0)
    with change_flags(compute_test_value='off'):
        dist = flow.forward
        shape_dist = dist.shape.eval()
    assert tuple(shape_dist) == (10, 20)
Ejemplo n.º 5
0
 def test_gen_cloning_with_shape_change(self, datagen):
     gen = generator(datagen)
     gen_r = tt_rng().normal(size=gen.shape).T
     X = gen.dot(gen_r)
     res, _ = theano.scan(lambda x: x.sum(), X, n_steps=X.shape[0])
     assert res.eval().shape == (50,)
     shared = theano.shared(datagen.data.astype(gen.dtype))
     res2 = theano.clone(res, {gen: shared**2})
     assert res2.eval().shape == (1000,)
Ejemplo n.º 6
0
 def test_gen_cloning_with_shape_change(self, datagen):
     gen = generator(datagen)
     gen_r = tt_rng().normal(size=gen.shape).T
     X = gen.dot(gen_r)
     res, _ = theano.scan(lambda x: x.sum(), X, n_steps=X.shape[0])
     assert res.eval().shape == (50,)
     shared = theano.shared(datagen.data.astype(gen.dtype))
     res2 = theano.clone(res, {gen: shared**2})
     assert res2.eval().shape == (1000,)
Ejemplo n.º 7
0
 def rslice(total, size, seed):
     if size is None:
         return slice(None)
     elif isinstance(size, int):
         return (pm.tt_rng(seed).uniform(size=(size, ),
                                         low=0.0,
                                         high=pm.floatX(total) -
                                         1e-16).astype('int64'))
     else:
         raise TypeError('Unrecognized size type, %r' % size)
Ejemplo n.º 8
0
 def rslice(self, total, size, seed):
     if size is None:
         return slice(None)
     elif isinstance(size, int):
         rng = pm.tt_rng(seed)
         Minibatch.RNG[id(self)].append(rng)
         return rng.uniform(size=(size, ),
                            low=0.0,
                            high=pm.floatX(total) - 1e-16).astype("int64")
     else:
         raise TypeError("Unrecognized size type, %r" % size)
Ejemplo n.º 9
0
 def rslice(self, total, size, seed):
     if size is None:
         return slice(None)
     elif isinstance(size, int):
         rng = pm.tt_rng(seed)
         Minibatch.RNG[id(self)].append(rng)
         return (rng
                 .uniform(size=(size, ), low=0.0, high=pm.floatX(total) - 1e-16)
                 .astype('int64'))
     else:
         raise TypeError('Unrecognized size type, %r' % size)
Ejemplo n.º 10
0
def run_model(prior, cov, lik, R, K, M, N, tau, h2, data):
    nmp.random.seed(42)
    pmc.tt_rng(42)

    res = cln.infer(data,
                    model_args={
                        'K': 20,
                        'prior': prior,
                        'cov': cov,
                        'lik': lik,
                        'threshold': 0.0
                    },
                    pymc3_args={
                        'niters': 40000,
                        'method': 'advi',
                        'flow': 'scale-loc',
                        'learning_rate': 1e-2,
                        'random_seed': 42
                    })

    z_true = data[['MUTID', 'CLUSTERID']].drop_duplicates().CLUSTERID.values
    z_pred = res['data'][['MUTID',
                          'CLUSTERID']].drop_duplicates().CLUSTERID.values

    return pnd.DataFrame(
        {
            'REP': R,
            'NCLUSTERS': K,
            'NSAMPLES': M,
            'NMUTS': N,
            'TAU': tau,
            'H2': h2,
            'PRIOR': prior,
            'COV': cov,
            'LIK': lik,
            'ARI': mtr.adjusted_rand_score(z_true, z_pred),
            'AMI': mtr.adjusted_mutual_info_score(z_true, z_pred),
            'FMI': mtr.fowlkes_mallows_score(z_true, z_pred),
        },
        index=[0]).reset_index(drop=True)
Ejemplo n.º 11
0
y_data = hist.flatten()
#%% pymc3 minibatch setup
# Not suitable for 2D mapping problem, overestimated lengthscale
batchsize = 10
Xbatch = pm.Minibatch(x_data, batchsize**2)
Ybatch = pm.Minibatch(y_data, batchsize**2)
#%% set up minibatch
data = hist
batchsize = 10
z1, z2 = batchsize, batchsize
s1, s2 = np.shape(data)
yshared = theano.shared(data)
x1shared = theano.shared(ycenters[:, np.newaxis].repeat(64, axis=1))
x2shared = theano.shared(xcenters[:, np.newaxis].T.repeat(64, axis=0))

ixs1 = pm.tt_rng().uniform(size=(1, ), low=0,
                           high=s1 - z1 - 1e-10).astype('int64')
ixs2 = pm.tt_rng().uniform(size=(1, ), low=0,
                           high=s2 - z2 - 1e-10).astype('int64')
range1 = tt.arange(ixs1.squeeze(), (ixs1 + z1).squeeze())
range2 = tt.arange(ixs2.squeeze(), (ixs2 + z2).squeeze())
Ybatch = yshared[range1][:, range2].flatten()
Xbatch1 = x1shared[range1][:, range2].flatten()
Xbatch2 = x2shared[range1][:, range2].flatten()
Xbatch = tt.stack((Xbatch1, Xbatch2)).T

import theano

theano.config.compute_test_value = 'off'
#%%
with pm.Model() as model:
    #hyper-parameter priors
Ejemplo n.º 12
0
def test_flow_init_loop(flow_spec):
    flow = pm.tt_rng().normal(size=(10, 2))
    for i in range(10):
        flow = flow_spec(z0=flow, dim=2)
    flow.forward.eval()
Ejemplo n.º 13
0
def test_flow_init_loop(flow_spec):
    flow = pm.tt_rng().normal(size=(10, 2))
    for i in range(10):
        flow = flow_spec(z0=flow, dim=2)
    flow.forward.eval()