コード例 #1
0
ファイル: run_nnet.py プロジェクト: BenedicteLC/MLProj3
def test_net(train_set, train_labels, valid_set, valid_labels, test_set, learning_rate, \
            decrease_constant, size, l2, l1, function) :
    """
    Train and validate the neural net with 
    a given set of parameters.
    Returns the final test output.
    """    
    neuralNet = NeuralNetwork(lr=learning_rate, dc=decrease_constant, sizes=size, L2=l2, L1=l1,
                     seed=5678, tanh=function, n_epochs=10)
    
    n_classes = 10
    
    print "Training..."
    # Early stopping code
    best_val_error = np.inf # Begin with infinite error
    best_it = 0 # Iteration of the best neural net so far wrt valid error
    look_ahead = 5
    n_incr_error = 0
    for current_stage in range(1,500+1,1):
        
        #Stop training when NN has not improved for 5 turns.
        if not n_incr_error < look_ahead:
            break
        neuralNet.n_epochs = current_stage
        neuralNet.train(train_set, train_labels, n_classes)
        n_incr_error += 1
        
        outputs, errors, accuracy = neuralNet.test(train_set, train_labels)
        print 'Epoch',current_stage,'|',
        print 'Training accuracy: ' + '%.3f'%accuracy+',', ' |',
        outputs, errors, accuracy = neuralNet.test(valid_set, valid_labels)
        print 'Validation accuracy: ' + '%.3f'%accuracy
        
        # Check if this model is better than the previous:
        error = 1.0 - accuracy
        if error < best_val_error:
            best_val_error = error
            best_it = current_stage
            n_incr_error = 0
            best_model = copy.deepcopy(neuralNet) # Save the model.
    
    #TODO Clear train and valid set to free memory.
    #Load test set
    outputs = best_model.predict(test_set)
コード例 #2
0
ファイル: run_nnet.py プロジェクト: BenedicteLC/MLProj3
def validate_net(train_set, train_labels, valid_set, valid_labels, learning_rate, \
            decrease_constant, size, l2, l1, function) :
    """
    Train and validate the neural net with 
    a given set of parameters.
    Return the best accuracy.
    """    
    neuralNet = NeuralNetwork(lr=learning_rate, dc=decrease_constant, sizes=size, L2=l2, L1=l1,
                     seed=5678, tanh=function, n_epochs=10)
    
    n_classes = 10
    
    print "Training..."
    # Early stopping code @Hugo Larochelle (partially)
    best_val_error = np.inf # Begin with infinite error
    best_it = 0 # Iteration of the best neural net so far wrt valid error
    look_ahead = 5
    n_incr_error = 0
    for current_stage in range(1,500+1,1):
        
        #Stop training when NN has not improved for 5 turns.
        if not n_incr_error < look_ahead:
            break
        neuralNet.n_epochs = current_stage
        neuralNet.train(train_set, train_labels, n_classes)
        n_incr_error += 1
        
        outputs, errors, train_accuracy = neuralNet.test(train_set, train_labels)
        print 'Epoch',current_stage,'|',
        print 'Training accuracy: ' + '%.3f'%train_accuracy+',', ' |',
        outputs, errors, valid_accuracy = neuralNet.test(valid_set, valid_labels)
        print 'Validation accuracy: ' + '%.3f'%valid_accuracy
        
        # Check if this model is better than the previous:
        error = 1.0 - valid_accuracy
        if error < best_val_error:
            best_val_error = error
            best_train_accuracy = train_accuracy            
            n_incr_error = 0
    
    return 1 - best_val_error, best_train_accuracy
コード例 #3
0
                         parameter_initialization=(pretrained_bs,pretrained_Ws))

print "Fine-tuning..."
# Early stopping code
best_val_error = np.inf
best_it = 0
str_header = 'best_it\t'
look_ahead = 5
n_incr_error = 0
for stage in range(1,500+1,1):
    if not n_incr_error < look_ahead:
        break
    myObject.n_epochs = stage
    myObject.train(trainset)
    n_incr_error += 1
    outputs, costs = myObject.test(trainset)
    errors = np.mean(costs,axis=0)
    print 'Epoch',stage,'|',
    print 'Training errors: classif=' + '%.3f'%errors[0]+',', 'NLL='+'%.3f'%errors[1] + ' |',
    outputs, costs = myObject.test(validset)
    errors = np.mean(costs,axis=0)
    print 'Validation errors: classif=' + '%.3f'%errors[0]+',', 'NLL='+'%.3f'%errors[1]
    error = errors[0]
    if error < best_val_error:
        best_val_error = error
        best_it = stage
        n_incr_error = 0
        best_model = copy.deepcopy(myObject)

outputs_tr,costs_tr = best_model.test(trainset)
columnCount = len(costs_tr.__iter__().next())
コード例 #4
0
        nnet = NeuralNetwork()
        nnet.train(file_name, model_file, epochs=10000)
    elif model == 'best':
        best = Best()
        best.train(file_name, model_file, epochs=10000)
    else:
        print 'Specified model not found!!'
else:
    if model == 'nearest' or model == 'nnet' or model == 'best':
        if model_file.endswith('.txt'):
            model_file = model_file + '.npy'

    if model == 'nearest':
        knn = Knn()
        knn.test(file_name, model_file)
    elif model == 'nnet':
        nnet = NeuralNetwork()
        nnet.test(file_name, model_file)
    elif model == 'best':
        best = Best()
        best.test(file_name, model_file)
    elif model == 'adaboost':
        trial = adaboost.Adaboost(None, file_name)
        trial.testing = file_name
        test_rotation = trial.prepare_data(trial.testing)[1]
        learners = [trial.learner1, trial.learner2]
        print "Accuracy on test set: ", trial.get_accuarcy(
            test_rotation, trial.test(file_name, model_file)), " %"
    else:
        print 'Specified model not found!!'
コード例 #5
0
 def test(self, file_name, model_file, output_file="best_output.txt"):
     nnet = NeuralNetwork()
     nnet.test(file_name, model_file, output_file)