np_rng=rng)
          
          
#%%============================================================================
# Training the RBM
#==============================================================================


start_time = timeit.default_timer()
accuracies = []
argmax_acc = 0
for epoch in xrange(training_epochs):
    epoch_time = timeit.default_timer()
    mean_cost = []
    for batch_index, batch in enumerate(batches):
        rbm.update(batch, persistent=True, k=k)
        sys.stdout.write("\rEpoch advancement: %d%%" % (100*float(batch_index)/len(batches)))
        sys.stdout.flush() 
    # Training Logistic regression
    sys.stdout.write("\rTraining softmax...")
    sm_time = timeit.default_timer()
    softmax_classifier = LogisticRegression(penalty='l1', 
                                            C=1.0, 
                                            solver='lbfgs', 
                                            multi_class='multinomial')
    softmax_classifier.fit(rbm.propup(train_set[:,n_labels:], np.ones((len(train_set),n_hidden))),
                           train_labels)
    sys.stdout.write('\rSoftmax trained in %f minutes.\n' % ((timeit.default_timer()-sm_time) / 60.))
    sys.stdout.write("Evaluating accuracy...")
    cv_time = timeit.default_timer()
    acc = softmax_classifier.score(rbm.propup(test_set[:,n_labels:], np.ones((len(test_set),n_hidden))), 
示例#2
0
    # Training the RBM
    #==========================================================================    
    max_score = -np.inf
    argmax_score = RBM(n_visible=n_visible,
                       n_labels=n_labels,
                       n_hidden=n_hidden, 
                       dropout_rate=dropout_rate,
                       batch_size=batch_size,
                       np_rng=rng)

    start_time = timeit.default_timer()
    for epoch in xrange(training_epochs):
        epoch_time = timeit.default_timer()
        mean_cost = []
        for batch_index in xrange(n_train_batches):
            rbm.update(np_train_set[batch_index*batch_size:(batch_index+1)*batch_size,:], persistent=True, k=k)
            sys.stdout.write("\rEpoch advancement: %d%%" % (100*float(batch_index)/n_train_batches))
            sys.stdout.flush()
        sys.stdout.write("\rEvaluating accuracy...")
        sys.stdout.flush()
        cv_time = timeit.default_timer()
        acc = rbm.cv_accuracy(np_test_set)
        sys.stdout.write('''\rEpoch %i took %f minutes, 
                         accuracy (computed in %f minutes) is %f.\n'''
            % (epoch, 
               ((cv_time-epoch_time) / 60.), 
               ((timeit.default_timer()-cv_time) / 60.),
               acc))
            
        if scoring=='cost':
            score = np.mean(mean_cost)