Example #1
0
    def test_overcomplete(self):
        H = self.H
        D = self.D

        Wgt = barstest.generate_bars_dict(H)

        W = np.zeros((D, H + 2))
        W[:, :H] = Wgt[:, ::-1]

        perm = barstest.find_permutation(W, Wgt)

        self.assertTrue((W[:, perm] == Wgt).all())
Example #2
0
    def test_find_permutation(self):
        H = self.H
        D = self.D

        Wgt = barstest.generate_bars_dict(H)

        # perform random permutation
        for r in range(10):
            perm_gt = np.random.permutation(H)
            W = Wgt[:, perm_gt]

            self.assertEqual(W.shape, (D, H))

            perm = barstest.find_permutation(W, Wgt)
            perm2 = barstest.find_permutation2(W, Wgt)

            self.assertTrue((perm == perm2).all())

            self.assertTrue((W[:, perm] == Wgt).all())
Example #3
0
# Number of datapoints to generate
N = 1000

# Each datapoint is of D = size*size
size = 5

# Dimensionality of the model
H=2*size     # number of latents
D=size**2    # dimensionality of observed data

# Approximation parameters for Expectation Truncation
Hprime = 7
gamma = 5
 
# Import and instantiate a model
from prosper.em.camodels.tsc_et import Ternary_ET
model = Ternary_ET(D, H, Hprime, gamma)


# Ground truth parameters. Only used to generate training data.
params_gt = {
    'W'     :  10*generate_bars_dict(H),
    'pi'    :  1.0 / size,
    'sigma' :  2.0
}

from prosper.em.annealing import LinearAnnealing
anneal = LinearAnnealing(300)
anneal['T'] = [(0, 2.), (.7, 1.)]
anneal['Ncut_factor'] = [(0,0.),(2./3,1.)]
anneal['anneal_prior'] = False
Example #4
0
from prosper.utils.barstest import generate_bars_dict

np.random.seed(1)

# Number of datapoints to generate
N = 1000

# Each datapoint is of D = size*size
size = 5

# Dimensionality of the model
H = 2 * size  # number of latents
D = size**2  # dimensionality of observed data

# Approximation parameters for Expectation Truncation
Hprime = 8
gamma = 5

# Import and instantiate a model
from prosper.em.camodels.mca_et import MCA_ET
model = MCA_ET(D, H, Hprime, gamma)

# Ground truth parameters. Only used to generate training data.
params_gt = {'W': 10 * generate_bars_dict(H), 'pi': 1.0 / size, 'sigma': 2.0}

# Choose annealing schedule
from prosper.em.annealing import LinearAnnealing
anneal = LinearAnnealing(300)
anneal['T'] = [(0, 4.), (.8, 1.)]
anneal['Ncut_factor'] = [(0, 0.), (2. / 3, 1.)]
anneal['anneal_prior'] = False
Example #5
0
    def test_generate_negbars(self):
        W = barstest.generate_bars_dict(self.H, neg_bars=True)

        self.assertTrue(((W == -1.) + (W == 0.) + (W == 1.)).all())
        self.assertTrue((W == -1.).any())
        self.assertEqual(W.shape, (self.D, self.H))
Example #6
0
    def test_generate(self):
        W = barstest.generate_bars_dict(self.H)

        self.assertTrue(((W == 0.) + (W == 1.)).all())
        self.assertEqual(W.shape, (self.D, self.H))
Example #7
0
# slightly more costly.

#sigma_sq_type = 'full'   # uncomment to make the model assume independent observation
# noise per observed dimension d of D. Makes the algorithm
# even more costly in terms of both computation and memory.
# Should be avoided for large D.

# Import and instantiate a model
from prosper.em.camodels.gsc_et import GSC
model = GSC(D, H, Hprime, gamma, sigma_sq_type)

# Model parameters used when artificially generating
# ground-truth data. This will NOT be used for the learning
# process.
params_gt = {
    'W': 10 * generate_bars_dict(H),  # this function is in bars-create-data
    'pi': 2. / H * np.ones(H),
    'mu': np.ones(H),
    'psi_sq': np.eye(H)
}

if sigma_sq_type == 'scalar':
    params_gt['sigma_sq'] = 1.0
elif sigma_sq_type == 'diagonal':
    params_gt['sigma_sq'] = np.ones(D)
elif sigma_sq_type == 'full':
    params_gt['sigma_sq'] = np.eye(D)

# Choose annealing schedule
from prosper.em.annealing import LinearAnnealing
anneal = LinearAnnealing(150)