Esempio n. 1
0
def measure_marginal_log_likelihood(model, dataset, subdataset, seed=123, minibatch_size=20, num_samples=50):
    print "Measuring {} log likelihood".format(subdataset)
    srng = utils.srng(seed)
    test_x = dataset.data[subdataset]
    n_examples = test_x.get_value(borrow=True).shape[0]

    if n_examples % minibatch_size == 0:
        num_minibatches = n_examples // minibatch_size
    else:
        num_minibatches = n_examples // minibatch_size + 1

    index = T.lscalar('i')
    minibatch = dataset.minibatchIindex_minibatch_size(index, minibatch_size, subdataset=subdataset, srng=srng)

    log_marginal_likelihood_estimate = model.log_marginal_likelihood_estimate(minibatch, num_samples, srng)

    get_log_marginal_likelihood = theano.function([index], T.sum(log_marginal_likelihood_estimate))

    pbar = progressbar.ProgressBar(maxval=num_minibatches).start()
    sum_of_log_likelihoods = 0.
    for i in xrange(num_minibatches):
        summand = get_log_marginal_likelihood(i)
        sum_of_log_likelihoods += summand
        pbar.update(i)
    pbar.finish()

    marginal_log_likelihood = sum_of_log_likelihoods/n_examples

    return marginal_log_likelihood
Esempio n. 2
0
def get_samples(model, num_samples, seed=123):
    srng = utils.srng(seed)
    prior_samples = model.prior.samplesIshape_srng((num_samples, model.first_p_layer_weights_np().shape[0]), srng)
    samples = [prior_samples]
    for layer in model.p_layers[:-1]:
        samples.append(layer.samplesIx_srng(samples[-1], srng))
    samples_function = theano.function([], model.p_layers[-1].meanIx(samples[-1]))

    return reshape_and_tile_images(samples_function())
Esempio n. 3
0
    def measure_marginal_log_likelihood(self,
                                        dataset,
                                        subdataset,
                                        batch_size,
                                        num_samples,
                                        seed=123,
                                        z_mu=None):
        '''
        method used from iwae code, name of the method to be used to keep track of
        what is being implemented.

        The method in iwae takes in:

        model: we have direct access to model here as this method is being made part
        of the same class

        dataset: the input dataset

        subdataset: this is to identify whether it is training dataset or testing one,
        and we will do it accordingly.

        batch_size: this will help us determine the num of batches that has to be used

        num_samples
        '''
        print("Measuring {} log likelihood".format(subdataset))
        srng = utils.srng(seed)

        if subdataset == 'train':
            n_examples = mnist.train.num_examples
        else:
            n_examples = mnist.test.num_examples

        if n_examples % batch_size == 0:
            num_minibatches = n_examples // batch_size
        else:
            num_minibatches = n_examples // batch_size + 1

        index = T.lscalar('i')

        minibatch = dataset.minibatchIindex_minibatch_size(
            index, batch_size, subdataset=subdataset, srng=srng)
        log_marginal_likelihood_estimate = self.log_marginal_likelihood_estimate(
            minibatch, num_samples, srng)
        get_log_marginal_likelihood = theano.function(
            [index], T.sum(log_marginal_likelihood_estimate))

        sum_of_log_likelihoods = 0.
        for i in range(num_minibatches):
            summand = get_log_marginal_likelihood(i)
            sum_of_log_likelihoods += summand

        marginal_log_likelihood = sum_of_log_likelihoods / n_examples

        return marginal_log_likelihood
Esempio n. 4
0
    def checkpoint0(dataset):
        data_dimension = dataset.get_data_dim()
        model = iwae.random_iwae(latent_units=[data_dimension] + latent_units,
                                 hidden_units_q=hidden_units_q,
                                 hidden_units_p=hidden_units_p,
                                 dataset=dataset
                                 )
        srng = utils.srng()
        optimizer = optimizers.Adam(model=model, learning_rate=1e-3)

        return model, optimizer, srng
Esempio n. 5
0
def get_samples(model, num_samples, seed=123):
    srng = utils.srng(seed)
    prior_samples = model.prior.samplesIshape_srng(
        (num_samples, model.first_p_layer_weights_np().shape[0]), srng)
    samples = [prior_samples]
    for layer in model.p_layers[:-1]:
        samples.append(layer.samplesIx_srng(samples[-1], srng))
    samples_function = theano.function([],
                                       model.p_layers[-1].meanIx(samples[-1]))

    return reshape_and_tile_images(samples_function())
Esempio n. 6
0
def load_checkpoint(directory_name, i):
    '''Loads model, optimizer, and random number generator from a pickle file named training_state[i].pkl
    Returns -1, None, None, None if loading failed
    Returns i, model, optimizer, random number generator if loading succeedeed'''
    try:
        load_from_filename = os.path.join(directory_name, "training_state{}.pkl".format(i))

        with open(load_from_filename, "rb") as f:
            model, optimizer, rstate = pkl.load(f)
        srng = utils.srng()
        srng.rstate = rstate
        loaded_checkpoint = i
    except:
        loaded_checkpoint = -1
        model, optimizer, srng = None, None, None
    return loaded_checkpoint, model, optimizer, srng
Esempio n. 7
0
def get_units_variances(model, dataset):
    srng = utils.srng()

    x = dataset.minibatchIindex_minibatch_size(0, 500, subdataset='train', srng=srng)

    samples = model.q_samplesIx_srng(x, srng)

    means = []
    for layer, x in zip(model.q_layers, samples):
        mean, _ = layer.mean_sigmaIx(x)
        means.append(mean)

    mean_fun = theano.function([], means)
    mean_vals = mean_fun()

    vars_of_means = [np.var(mean_val, axis=0) for mean_val in mean_vals]

    return vars_of_means
Esempio n. 8
0
def get_units_variances(model, dataset):
    srng = utils.srng()

    x = dataset.minibatchIindex_minibatch_size(0,
                                               500,
                                               subdataset='train',
                                               srng=srng)

    samples = model.q_samplesIx_srng(x, srng)

    means = []
    for layer, x in zip(model.q_layers, samples):
        mean, _ = layer.mean_sigmaIx(x)
        means.append(mean)

    mean_fun = theano.function([], means)
    mean_vals = mean_fun()

    vars_of_means = [np.var(mean_val, axis=0) for mean_val in mean_vals]

    return vars_of_means
Esempio n. 9
0
def measure_marginal_log_likelihood(model,
                                    dataset,
                                    subdataset,
                                    seed=123,
                                    minibatch_size=20,
                                    num_samples=50):
    print "Measuring {} log likelihood".format(subdataset)
    srng = utils.srng(seed)
    test_x = dataset.data[subdataset]
    n_examples = test_x.get_value(borrow=True).shape[0]

    if n_examples % minibatch_size == 0:
        num_minibatches = n_examples // minibatch_size
    else:
        num_minibatches = n_examples // minibatch_size + 1

    index = T.lscalar('i')
    minibatch = dataset.minibatchIindex_minibatch_size(index,
                                                       minibatch_size,
                                                       subdataset=subdataset,
                                                       srng=srng)

    log_marginal_likelihood_estimate = model.log_marginal_likelihood_estimate(
        minibatch, num_samples, srng)

    get_log_marginal_likelihood = theano.function(
        [index], T.sum(log_marginal_likelihood_estimate))

    pbar = progressbar.ProgressBar(maxval=num_minibatches).start()
    sum_of_log_likelihoods = 0.
    for i in xrange(num_minibatches):
        summand = get_log_marginal_likelihood(i)
        sum_of_log_likelihoods += summand
        pbar.update(i)
    pbar.finish()

    marginal_log_likelihood = sum_of_log_likelihoods / n_examples

    return marginal_log_likelihood
    def checkpoint0(dataset):
        if dataset == 'MNIST':
            continuous=True
        else:
            RuntimeError('Case not implemented')

        load_dataset_from_name(dataset)

        model = VA(
            numberOfInducingPoints, # Number of inducing ponts in sparse GP
            batchSize,              # Size of mini batch
            dimX,                   # Dimensionality of the latent co-ordinates
            dimZ,                   # Dimensionality of the latent variables
            x_train,                   # [NxP] matrix of observations
            kernelType=kernelType,
            encoderType_qX=encoderType_qX,  # 'FreeForm', 'MLP', 'Kernel'.
            encoderType_rX=encoderType_rX,  # 'FreeForm', 'MLP', 'Kernel', 'NoEncoding'.
            encoderType_ru=encoderType_ru,  # 'FreeForm', 'MLP', 'NoEncoding'
            Xu_optimise=Xu_optimise,
            numHiddenUnits_encoder=numHiddenUnits_encoder,
            numHiddentUnits_decoder=numHiddentUnits_decoder,
            continuous=continuous
        )

        model.construct_L_using_r()

        model.setKernelParameters(0.01, 5*np.ones((2,)),
            1e-100, 0.5,
            [1e-10,1e-10], [10,10] )

        model.randomise()

        model.constructUpdateFunction()
        model = model.randomise()

        srng = utils.srng()

        return model, srng