Пример #1
0
def main(learning_rate=0.01, training_epochs=25,
            dataset='./data/mnist.pkl.gz',
            batch_size=10, contraction_level=.1):
    ##Define the Logistic function 
    xi = T.dmatrix('xi')
    s = 1 / (1 + T.exp(-xi))
    logistic=theano.function([xi],s)

    ##Import dataset
    datasets = load_data(dataset)
    train_set_x, train_set_y = datasets[0]
    trainx=train_set_x.get_value(borrow=True)
    trainy=train_set_y.owner.inputs[0].get_value(borrow=True)

    # compute number of minibatches for training, validation and testing
    n_train_batches = train_set_x.get_value(borrow=True).shape[0] / batch_size

    # allocate symbolic variables for the data
    index = T.lscalar() # index to a [mini]batch
    x = T.matrix('x') # the data is presented as rasterized images

    ####################################
    # BUILDING THE FIRST LAYER MODEL #
    ####################################

    rng = numpy.random.RandomState(123)

    ca = cA(numpy_rng=rng, input=x,
            n_visible=28 * 28, n_hidden=1000, n_batchsize=batch_size)

    cost, updates = ca.get_cost_updates(contraction_level=contraction_level,
                                        learning_rate=learning_rate)

    train_ca = theano.function([index], [T.mean(ca.L_rec), ca.L_jacob],
                               updates=updates,
                               givens={x: train_set_x[index * batch_size:
                                                    (index + 1) * batch_size]})

    start_time = time.clock()

    ############
    # TRAINING #
    ############

    # go through training epochs
    for epoch in xrange(training_epochs):
        # go through trainng set
        c = []
        for batch_index in xrange(n_train_batches):
            c.append(train_ca(batch_index))

        c_array = numpy.vstack(c)
        print 'Training epoch %d, reconstruction cost ' % epoch, numpy.mean(
            c_array[0]), ' jacobian norm ', numpy.mean(numpy.sqrt(c_array[1]))

    end_time = time.clock()

    training_time = (end_time - start_time)

    
    ####Extract the weights from the first layer#####
    W=ca.W.get_value(borrow=True)
    b=ca.b.get_value(borrow=True)
    W_prime=ca.W_prime.owner.inputs[0].get_value(borrow=True)
    b_prime=ca.b_prime.get_value(borrow=True)
   
    ####Compute the output after the first layer####
    trainxHidden=convert_many_to_Hidden(trainx,W=W,b=hbias,logistic=logistic)

    ####################################
    # BUILDING THE SECOND LAYER MODEL #
    ####################################
    x_first = T.matrix('x_first')
    shared_x=theano.shared(trainxHidden,borrow=True)
    persistent_chain = theano.shared(numpy.zeros((batch_size, 1024),
                                                 dtype=theano.config.floatX),
                                     borrow=True)
    rbm2 = RBM(input=x_first, n_visible=1024,
              n_hidden=1024, numpy_rng=rng, theano_rng=theano_rng)
    cost, updates = rbm2.get_cost_updates(lr=learning_rate,
                                         persistent=persistent_chain, k=15)
    train_rbm2 = theano.function([index], cost,
           updates=updates,
           givens={x_first: shared_x[index * batch_size:
                                  (index + 1) * batch_size]},
           name='train_rbm2')
Пример #2
0
def main(learning_rate=0.01, training_epochs=25,
            dataset='./data/mnist.pkl.gz',
            batch_size=10, contraction_level=.1):
    ##Define the Logistic function 
    xi = T.dmatrix('xi')
    s = 1 / (1 + T.exp(-xi))
    logistic=theano.function([xi],s)

    ##Import dataset
    datasets = load_data(dataset)
    train_set_x, train_set_y = datasets[0]
    trainx=train_set_x.get_value(borrow=True)
    trainy=train_set_y.owner.inputs[0].get_value(borrow=True)

    # compute number of minibatches for training, validation and testing
    n_train_batches = train_set_x.get_value(borrow=True).shape[0] / batch_size

    # allocate symbolic variables for the data
    index = T.lscalar() # index to a [mini]batch
    x = T.matrix('x') # the data is presented as rasterized images

    ####################################
    # BUILDING THE FIRST LAYER MODEL #
    ####################################

    rng = numpy.random.RandomState(123)

    ca = cA(numpy_rng=rng, input=x,
            n_visible=28 * 28, n_hidden=1000, n_batchsize=batch_size)

    cost, updates = ca.get_cost_updates(contraction_level=contraction_level,
                                        learning_rate=learning_rate)

    train_ca = theano.function([index], [T.mean(ca.L_rec), ca.L_jacob],
                               updates=updates,
                               givens={x: train_set_x[index * batch_size:
                                                    (index + 1) * batch_size]})

    start_time = time.clock()

    ############
    # TRAINING #
    ############

    # go through training epochs
    for epoch in xrange(training_epochs):
        # go through trainng set
        c = []
        for batch_index in xrange(n_train_batches):
            c.append(train_ca(batch_index))

        c_array = numpy.vstack(c)
        print 'Training epoch %d, reconstruction cost ' % epoch, numpy.mean(
            c_array[0]), ' jacobian norm ', numpy.mean(numpy.sqrt(c_array[1]))

    end_time = time.clock()

    training_time = (end_time - start_time)

    
    ####Extract the weights from the first layer#####
    W=ca.W.get_value(borrow=True)
    b=ca.b.get_value(borrow=True)
    W_prime=ca.W_prime.owner.inputs[0].get_value(borrow=True)
    b_prime=ca.b_prime.get_value(borrow=True)
   
    ####Compute the output after the first layer####
    trainxHidden=convert_many_to_Hidden(trainx,W=W,b=b,logistic=logistic)

    ####################################
    # BUILDING THE SECOND LAYER MODEL #
    ####################################
    x_first = T.matrix('x_first')
    shared_x=theano.shared(trainxHidden,borrow=True)
    ca2 = cA(numpy_rng=rng, input=x_first,
            n_visible=1000, n_hidden=1000, n_batchsize=batch_size)
    cost, updates = ca2.get_cost_updates(contraction_level=contraction_level,
                                        learning_rate=learning_rate)

    train_ca2 = theano.function([index], [T.mean(ca2.L_rec), ca2.L_jacob],
                               updates=updates,
                               givens={x_first: shared_x[index * batch_size:
                                                    (index + 1) * batch_size]})
   ############
    # TRAINING #
    ############

    for epoch in xrange(training_epochs):
        # go through trainng set
        c = []
        for batch_index in xrange(n_train_batches):
            c.append(train_ca2(batch_index))

        c_array = numpy.vstack(c)
        print 'Training epoch %d, reconstruction cost ' % epoch, numpy.mean(
            c_array[0]), ' jacobian norm ', numpy.mean(numpy.sqrt(c_array[1]))
    
    
    
    ###Extract the weights from the second layer

    W2=ca2.W.get_value(borrow=True)
    b2=ca2.b.get_value(borrow=True)
    W_prime2=ca2.W_prime.owner.inputs[0].get_value(borrow=True)
    b_prime2=ca2.b_prime.get_value(borrow=True)
    ###Calculate the output after the second layer
    trainxHidden2=convert_many_to_Hidden(trainxHidden,W=W2,b=b2,logistic=logistic)

    ###Interpolations###
   
    arr0,arrn0,arrInterp0=raw_Example(trainx)
    arr1,arrn1=K_nearest_Interp_One_Layer(trainxHidden,W_prime=W_prime,b_prime=b_prime,
                                          logistic=logistic)
    arrInterp1=interp_first(W=W,W_prime=W_prime,b=b,b_prime=b_prime,trainx=trainx,
                 logistic=logistic)
    arr2,arrn2=K_nearest_Interp_Two_Layer(trainxHidden2,trainx,logistic=logistic,
                                          W_prime=W_prime,W_prime2=W_prime2,
                                          b_prime=b_prime,b_prime2=b_prime2)
    arrInterp2=interp_second(trainxHidden2,trainx,W_prime=W_prime,
                             W_prime2=W_prime2,b_prime=b_prime,b_prime2=b_prime2,
                             logistic=logistic)
    arr=numpy.concatenate((arr0,arr1),axis=0)
    arr=numpy.concatenate((arr,arr2),axis=0)
    arrn=numpy.concatenate((arrn0,arrn1),axis=0)
    arrn=numpy.concatenate((arrn,arrn2),axis=0)
    arrInterp=numpy.concatenate((arrInterp0,arrInterp1),axis=0)
    arrInterp=numpy.concatenate((arrInterp,arrInterp2),axis=0)
    image_arr=PIL.Image.fromarray(arr)
    image_arrn=PIL.Image.fromarray(arrn)
    image_arrInterp=PIL.Image.fromarray(arrInterp)
    image_arr.show()
    image_arrn.show()
    image_arrInterp.show()
    classify_with_SVM(trainx,trainy,testx,testy,W=W,
                      b=b,W2=W2,b2=b2,logistic=logistic)