def __init__(self):
        self.mnist = MNIST.full()  # first_10k()

        datasets = load_data(dataset)

        train_set_x, train_set_y = datasets[0]
        test_set_x, test_set_y = datasets[2]

        batch_size = 100  # size of the minibatch

        # compute number of minibatches for training, validation and testing
        n_train_batches = train_set_x.value.shape[0] / batch_size

        # allocate symbolic variables for the data
        index = T.lscalar()  # index to a [mini]batch
        x = T.matrix("x")  # the data is presented as rasterized images

        rng = numpy.random.RandomState(123)
        theano_rng = RandomStreams(rng.randint(2 ** 30))

        # initialize storage fot the persistent chain (state = hidden layer of chain)
        persistent_chain = theano.shared(numpy.zeros((batch_size, 500)))

        # construct the RBM class
        self.rbm = RBM(input=x, n_visible=28 * 28, n_hidden=500, numpy_rng=rng, theano_rng=theano_rng)

        # get the cost and the gradient corresponding to one step of CD

        self.init_series()
    def __init__(self, state):
        self.state = state

        if TEST_CONFIG:
            self.mnist = MNIST.first_1k()
            print "Test config, so loaded MNIST first 1000"
        else:
            self.mnist = MNIST.full()#first_10k()
            print "Loaded MNIST full"

        self.cp = ConvolutionParams( \
                    num_filters=state.num_filters,
                    num_input_planes=1,
                    height_filters=state.filter_size,
                    width_filters=state.filter_size)

        self.image_size = (28,28)

        self.minibatch_size = state.minibatch_size

        self.lr = state.learning_rate
        self.sparsity_lambda = state.sparsity_lambda
        # about 1/num_filters, so only one filter active at a time
        # 40 * 0.05 = ~2 filters active for any given pixel
        self.sparsity_p = state.sparsity_p

        self.crbm = CRBM( \
                    minibatch_size=self.minibatch_size,
                    image_size=self.image_size,
                    conv_params=self.cp,
                    learning_rate=self.lr,
                    sparsity_lambda=self.sparsity_lambda,
                    sparsity_p=self.sparsity_p)
        
        self.num_epochs = state.num_epochs

        self.init_series()
Пример #3
0
def ais_data(fname, do_exact=True):

    rbm_params = load_rbm_params(fname)

    # load data to set visible biases to ML solution
    from pylearn.datasets import MNIST
    dataset = MNIST.train_valid_test()
    data = numpy.asarray(dataset.train.x, dtype=config.floatX)

    # run ais using B=0 model with ML visible biases
    t1 = time.time()
    (logz, log_var_dz), aisobj = rbm_tools.rbm_ais(rbm_params, n_runs=100, seed=123, data=data)
    print 'AIS logZ         : %f' % logz
    print '    log_variance : %f' % log_var_dz
    print 'Elapsed time: ', time.time() - t1

    if do_exact:
        exact_logz = compute_logz(rbm_params)
        print 'Exact logZ = %f' % exact_logz
        numpy.testing.assert_almost_equal(exact_logz, logz, decimal=0)
Пример #4
0
    def __init__(self, which_set, center = False):

        #dear pylearn.datasets.MNIST: there is no such thing as the MNIST validation set. quit pretending that there is.
        orig = i_hate_python.train_valid_test(ntrain=60000,nvalid=0,ntest=10000)

        Xs = {
                'train' : orig.train.x,
                'test'  : orig.test.x
            }

        X = N.cast['float32'](Xs[which_set])

        if center:
            assert False

        view_converter = dense_design_matrix.DefaultViewConverter((28,28,1))

        super(MNIST,self).__init__(X = X, view_converter = view_converter)

        assert not N.any(N.isnan(self.X))
Пример #5
0
    def __init__(self, which_set, center = False):

        #dear pylearn.datasets.MNIST: there is no such thing as the MNIST validation set. quit pretending that there is.
        orig = i_hate_python.train_valid_test(ntrain=60000,nvalid=0,ntest=10000)

        Xs = {
                'train' : orig.train.x,
                'test'  : orig.test.x
            }

        X = N.cast['float32'](Xs[which_set])

        if center:
            assert False

        view_converter = dense_design_matrix.DefaultViewConverter((28,28,1))

        super(MNIST,self).__init__(X = X, view_converter = view_converter)

        assert not N.any(N.isnan(self.X))
Пример #6
0
def ais_data(fname, do_exact=True):

    rbm_params = load_rbm_params(fname)

    # load data to set visible biases to ML solution
    from pylearn.datasets import MNIST
    dataset = MNIST.train_valid_test()
    data = numpy.asarray(dataset.train.x, dtype=config.floatX)

    # run ais using B=0 model with ML visible biases
    t1 = time.time()
    (logz, log_var_dz), aisobj = rbm_tools.rbm_ais(rbm_params,
                                                   n_runs=100,
                                                   seed=123,
                                                   data=data)
    print 'AIS logZ         : %f' % logz
    print '    log_variance : %f' % log_var_dz
    print 'Elapsed time: ', time.time() - t1

    if do_exact:
        exact_logz = compute_logz(rbm_params)
        print 'Exact logZ = %f' % exact_logz
        numpy.testing.assert_almost_equal(exact_logz, logz, decimal=0)