write_file = open(parapath, 'wb')
    for parai in paramlist:
        cPickle.dump(parai.get_value(borrow=True), write_file, -1)
    write_file.close()


###
def show_params():
    parapath = '/home/yr/...'
    f = open(parapath, 'rb')
    w1 = cPickle.load(f)
    b1 = cPickle.load(f)
    w2 = cPickle.load(f)
    b2 = cPickle.load(f)

    print np.shape(w1), np.shape(b1)
    print np.shape(w2), np.shape(b2)
    f.close()


#####
if __name__ == '__main__':
    arrx, arry = im2arr(datapath0, datapath1)
    train_set = load_data(arrx, arry)  #generate dataset [x,y]
    arrx, arry = im2arr(datapath00, datapath11)
    test_set = load_data(arrx, arry)
    ####

    evaluate_lenet5([train_set, train_set, test_set])
    #show_params()
Exemplo n.º 2
0
def test_dA(learning_rate=0.1,
            training_epochs=5,
            dataset=datapath,
            batch_size=20,
            output_folder='/home/yr/theanoExercise/dA_plots'):
    """
    This demo is tested on MNIST

    :type learning_rate: float
    :param learning_rate: learning rate used for training the DeNosing
                          AutoEncoder

    :type training_epochs: int
    :param training_epochs: number of epochs used for training

    :type dataset: string
    :param dataset: path to the picked dataset

    """
    datasets = load_data(dataset)
    train_set_x, train_set_y = datasets[0]

    # compute number of minibatches for training, validation and testing
    n_train_batches = train_set_x.get_value(borrow=True).shape[0] / batch_size

    # allocate symbolic variables for the data
    index = T.lscalar()  # index to a [mini]batch
    x = T.matrix('x')  # the data is presented as rasterized images
    '''
    if not os.path.isdir(output_folder):
        os.makedirs(output_folder)
    os.chdir(output_folder)
    '''
    ####################################
    # BUILDING THE MODEL NO CORRUPTION #
    ####################################

    rng = numpy.random.RandomState(123)
    theano_rng = RandomStreams(rng.randint(2**30))

    da = dA(numpy_rng=rng,
            theano_rng=theano_rng,
            input=x,
            n_visible=28 * 28,
            n_hidden=2000)

    cost, updates = da.get_cost_updates(corruption_level=0.,
                                        learning_rate=learning_rate)

    train_da = theano.function(
        [index],
        cost,
        updates=updates,
        givens={x: train_set_x[index * batch_size:(index + 1) * batch_size]})

    start_time = time.clock()

    ############
    # TRAINING #
    ############

    # go through training epochs
    for epoch in xrange(training_epochs):
        # go through trainng set
        c = []
        for batch_index in xrange(n_train_batches):
            c.append(train_da(batch_index))
            #print batch_index
        print 'Training epoch %d, cost ' % epoch, numpy.mean(c)

    end_time = time.clock()

    training_time = (end_time - start_time)

    print >> sys.stderr, ('The no corruption code for file ' +
                          os.path.split(__file__)[1] + ' ran for %.2fm' %
                          ((training_time) / 60.))

    ####save w
    import cPickle
    params = '/home/yr/theanoExercise/autoEncoder/para3'
    write_file = open(params, 'wb')
    cPickle.dump(da.W.get_value(borrow=True), write_file, -1)
    write_file.close()

    #####################################
    # BUILDING THE MODEL CORRUPTION 30% #
    #####################################

    rng = numpy.random.RandomState(123)
    theano_rng = RandomStreams(rng.randint(2**30))

    da = dA(numpy_rng=rng,
            theano_rng=theano_rng,
            input=x,
            n_visible=28 * 28,
            n_hidden=2000)

    cost, updates = da.get_cost_updates(corruption_level=0.3,
                                        learning_rate=learning_rate)

    train_da = theano.function(
        [index],
        cost,
        updates=updates,
        givens={x: train_set_x[index * batch_size:(index + 1) * batch_size]})

    start_time = time.clock()

    ############
    # TRAINING #
    ############

    # go through training epochs
    for epoch in xrange(training_epochs):
        # go through trainng set
        c = []
        for batch_index in xrange(n_train_batches):
            c.append(train_da(batch_index))

        print 'Training epoch %d, cost ' % epoch, numpy.mean(c)

    end_time = time.clock()

    training_time = (end_time - start_time)

    print >> sys.stderr, ('The 30% corruption code for file ' +
                          os.path.split(__file__)[1] + ' ran for %.2fm' %
                          (training_time / 60.))

    ####save w
    import cPickle
    params = '/home/yr/theanoExercise/autoEncoder/para4'
    write_file = open(params, 'wb')
    cPickle.dump(da.W.get_value(borrow=True), write_file, -1)
    write_file.close()
### 
def show_params():
    parapath='/home/yr/...'
    f=open(parapath,'rb')
    w1=cPickle.load(f)
    b1=cPickle.load(f)
    w2=cPickle.load(f)
    b2=cPickle.load(f)

    print np.shape(w1),np.shape(b1)
    print np.shape(w2),np.shape(b2)
    f.close()
#####
if __name__=='__main__':
	arrx,arry=im2arr(datapath0,datapath1)
	train_set=load_data(arrx,arry)#generate dataset [x,y] 
	arrx,arry=im2arr(datapath00,datapath11)
	test_set=load_data(arrx,arry)
	####
	
	evaluate_lenet5([train_set,train_set,test_set])
	#show_params()


	
			




Exemplo n.º 4
0
def test_dA(learning_rate=0.1, training_epochs=5,
            dataset=datapath,
            batch_size=20, output_folder='/home/yr/theanoExercise/dA_plots'):

    """
    This demo is tested on MNIST

    :type learning_rate: float
    :param learning_rate: learning rate used for training the DeNosing
                          AutoEncoder

    :type training_epochs: int
    :param training_epochs: number of epochs used for training

    :type dataset: string
    :param dataset: path to the picked dataset

    """
    datasets = load_data(dataset)
    train_set_x, train_set_y = datasets[0]

    # compute number of minibatches for training, validation and testing
    n_train_batches = train_set_x.get_value(borrow=True).shape[0] / batch_size

    # allocate symbolic variables for the data
    index = T.lscalar()    # index to a [mini]batch
    x = T.matrix('x')  # the data is presented as rasterized images

    '''
    if not os.path.isdir(output_folder):
        os.makedirs(output_folder)
    os.chdir(output_folder)
    '''
    ####################################
    # BUILDING THE MODEL NO CORRUPTION #
    ####################################

    rng = numpy.random.RandomState(123)
    theano_rng = RandomStreams(rng.randint(2 ** 30))

    da = dA(
        numpy_rng=rng,
        theano_rng=theano_rng,
        input=x,
        n_visible=28 * 28,
        n_hidden=2000
    )

    cost, updates = da.get_cost_updates(
        corruption_level=0.,
        learning_rate=learning_rate
    )

    train_da = theano.function(
        [index],
        cost,
        updates=updates,
        givens={
            x: train_set_x[index * batch_size: (index + 1) * batch_size]
        }
    )

    start_time = time.clock()

    ############
    # TRAINING #
    ############

    # go through training epochs
    for epoch in xrange(training_epochs):
        # go through trainng set
        c = []
        for batch_index in xrange(n_train_batches):
            c.append(train_da(batch_index))
            #print batch_index
        print 'Training epoch %d, cost ' % epoch, numpy.mean(c)

    end_time = time.clock()

    training_time = (end_time - start_time)

    print >> sys.stderr, ('The no corruption code for file ' +
                          os.path.split(__file__)[1] +
                          ' ran for %.2fm' % ((training_time) / 60.))
   

    ####save w
    import cPickle
    params='/home/yr/theanoExercise/autoEncoder/para3'
    write_file=open(params,'wb')
    cPickle.dump(da.W.get_value(borrow=True),write_file,-1)
    write_file.close()
                
	
    #####################################
    # BUILDING THE MODEL CORRUPTION 30% #
    #####################################

    rng = numpy.random.RandomState(123)
    theano_rng = RandomStreams(rng.randint(2 ** 30))

    da = dA(
        numpy_rng=rng,
        theano_rng=theano_rng,
        input=x,
        n_visible=28 * 28,
        n_hidden=2000
    )

    cost, updates = da.get_cost_updates(
        corruption_level=0.3,
        learning_rate=learning_rate
    )

    train_da = theano.function(
        [index],
        cost,
        updates=updates,
        givens={
            x: train_set_x[index * batch_size: (index + 1) * batch_size]
        }
    )

    start_time = time.clock()

    ############
    # TRAINING #
    ############

    # go through training epochs
    for epoch in xrange(training_epochs):
        # go through trainng set
        c = []
        for batch_index in xrange(n_train_batches):
            c.append(train_da(batch_index))

        print 'Training epoch %d, cost ' % epoch, numpy.mean(c)

    end_time = time.clock()

    training_time = (end_time - start_time)

    print >> sys.stderr, ('The 30% corruption code for file ' +
                          os.path.split(__file__)[1] +
                          ' ran for %.2fm' % (training_time / 60.))
    
    ####save w
    import cPickle
    params='/home/yr/theanoExercise/autoEncoder/para4'
    write_file=open(params,'wb')
    cPickle.dump(da.W.get_value(borrow=True),write_file,-1)
    write_file.close()