示例#1
0
################################################################################
# TODO:                                                                        #
# Use the validation set to set the learning rate and regularization strength. #
# This should be identical to the validation that    you did for the SVM; save    #
# the best trained softmax classifer in best_softmax.                          #
################################################################################
params = [(x, y) for x in learning_rates for y in regularization_strengths]
for lrate, regular in params:
    softmax = Softmax()
    loss_hist = softmax.train(X_train,
                              y_train,
                              learning_rate=lrate,
                              reg=regular,
                              num_iters=700,
                              verbose=True)
    y_train_pred = softmax.predict(X_train)
    accuracy_train = np.mean(y_train == y_train_pred)
    y_val_pred = softmax.predict(X_val)
    accuracy_val = np.mean(y_val == y_val_pred)
    results[(lrate, regular)] = (accuracy_train, accuracy_val)
    if (best_val < accuracy_val):
        best_val = accuracy_val
        best_softmax = softmax
################################################################################
#                              END OF YOUR CODE                                #
################################################################################

# Print out results.
for lr, reg in sorted(results):
    train_accuracy, val_accuracy = results[(lr, reg)]
    print('lr %e reg %e train accuracy: %f val accuracy: %f' %
best_softmax = None

################################################################################
# Softmax
################################################################################
for _l in learning_rates:
    for _r in regularization_strengths:
        softmax = Softmax()
        loss_hist = softmax.train(X2_train_feats,
                                  y_train,
                                  learning_rate=_l,
                                  reg=_r,
                                  num_iters=1500,
                                  verbose=False)

        y_train_pred = softmax.predict(X2_train_feats)
        train_accuracy = np.mean(y_train == y_train_pred)

        y_val_pred = softmax.predict(X2_val_feats)
        val_accuracy = np.mean(y_val == y_val_pred)
        print("train accuracy:{0}, val accuracy : {1}".format(
            train_accuracy, val_accuracy))

        results[(_l, _r)] = (train_accuracy, val_accuracy)
        if (val_accuracy > best_val):
            best_val = val_accuracy
            best_softmax = softmax

# Print out results.
for lr, reg in sorted(results):
    train_accuracy, val_accuracy = results[(lr, reg)]
示例#3
0
################################################################################
# Your code
################################################################################
#                              END OF YOUR CODE                                #
################################################################################
lr_sample = np.linspace(learning_rates[0],learning_rates[1],1)
reg_sample = np.linspace(regularization_strengths[0],regularization_strengths[1],1)

tic = time.time()

for s1 in lr_sample:
    for s2 in reg_sample:
        soft = Softmax()
        # getting the trained modal for the current svm object
        soft.train(X_train,y_train,learning_rate = s1,reg = s2,num_iters = 1500,verbose=False)
        y_train_predicted = soft.predict(X_train)
        # validation of the current svm modal
        train_accuracy = np.mean(y_train_predicted == y_train)
        # validation of the trained modal on unknown set
        y_val_predicted = soft.predict(X_val)
        # validation of the current svm modal
        val_accuracy = np.mean(y_val_predicted == y_val)
        
        results[(s1,s2)] = (train_accuracy,val_accuracy)
        if val_accuracy > best_val : 
            best_val = val_accuracy
            best_softmax = soft
toc = time.time()
print('\n===================================================================================')
# Print out results.
for lr, reg in sorted(results):
示例#4
0
文件: sofe.py 项目: eejackliu/my1
learning_rates = [1e-7, 5e-7]
regularization_strengths = [5e4, 1e8]

################################################################################
# TODO:                                                                        #
# Use the validation set to set the learning rate and regularization strength. #
# This should be identical to the validation that you did for the SVM; save    #
# the best trained softmax classifer in best_softmax.                          #
################################################################################

record=0
for i in learning_rates:
    for j in regularization_strengths:
        soft=Softmax()
        soft.train(X_train,y_train,learning_rate=i,reg=j,num_iters=1500,verbose=True)
        pred=soft.predict(X_val)
        precise=np.mean((pred==y_val))
        pred_train=soft.predict(X_train)
        tmp=np.mean((pred_train==y_train))
        results[i,j]=[tmp,precise]
        if precise>best_val:
            best_softmax=soft
            best_val=precise

################################################################################
#                              END OF YOUR CODE                                #
################################################################################

# Print out results.
for lr, reg in sorted(results):
    train_accuracy, val_accuracy = results[(lr, reg)]
示例#5
0
# same from svm
learning_rates = [1e-7, 5e-8]
regularization_strengths = [5e4, 5e5]
################################################################################
# TODO:                                                                        #
# Use the validation set to set the learning rate and regularization strength. #
# This should be identical to the validation that you did for the SVM; save    #
# the best trained softmax classifer in best_softmax.                          #
################################################################################
pass
import itertools
for lr, reg in itertools.product(learning_rates, regularization_strengths):
    sm = Softmax()
    sm.train(X_train, y_train, lr, reg, num_iters=1500)

    y_pred = sm.predict(X_train)
    train_accuracy = np.mean(y_pred == y_train)

    y_pred = sm.predict(X_val)
    val_accuracy = np.mean(y_pred == y_val)

    results[(lr, reg)] = (train_accuracy, val_accuracy)
    if val_accuracy > best_val:
        best_val = val_accuracy
        best_softmax = sm

################################################################################
#                              END OF YOUR CODE                                #
################################################################################

# Print out results.
示例#6
0
import h5py
from cs231n.classifiers import Softmax
from numpy import loadtxt
import numpy as np
h5f = h5py.File('img_data.h5','r')
X = h5f['dataset_1'][:]
h5f.close()
y = loadtxt("y_labels.txt", dtype=np.uint8, delimiter="\n", unpack=False)

#X_train = np.zeros((27116,196608))
#y_train = np.zeros(27116)
#X_val = np.zeros((5000,196608))
#y_val = np.zeros(5000)

X_train = X[8000:35117,:]
y_train = y[8000:35117]
X_val=X[3000:8000,:]
y_val=y[3000:8000]
# Generate a random softmax weight matrix and use it to compute the loss.
W = np.random.randn(196608, 5) * 0.0001
loss_vectorized, grad_vectorized = softmax_loss_vectorized(W, X_train, y_train, 0.00001)
softmax=Softmax()
loss_hist = softmax.train(X_train, y_train, learning_rate=1e-7, reg=5e4,
                      num_iters=1500, verbose=False)
y_train_pred = softmax.predict(X_train)
training_accuracy = np.mean(y_train == y_train_pred)
y_val_pred = softmax.predict(X_val)
val_accuracy = np.mean(y_val == y_val_pred)
print 'training accuracy: %f' % (np.mean(y_train == y_train_pred), )
print 'validation accuracy: %f' % (np.mean(y_val == y_val_pred), )
示例#7
0
regularization_strengths = [5e-4, 1e-8]

################################################################################
# TODO:                                                                        #
# Use the validation set to set the learning rate and regularization strength. #
# This should be identical to the validation that you did for the SVM; save    #
# the best trained softmax classifer in best_softmax.                          #
################################################################################
for lr_idx in range(0,len(learning_rates)):
    for reg_idx in range(0,len(regularization_strengths)):
        lr_use = learning_rates[lr_idx]
        reg_use = regularization_strengths[reg_idx]
        sfmx = Softmax()
        loss_hist = sfmx.train(X_dev, y_dev, learning_rate=lr_use, reg=reg_use,
                      num_iters=1500, verbose=True)
        y_train_pred = sfmx.predict(X_dev)
        y_val_pred = sfmx.predict(X_val)

        acc_train = np.mean(y_train == y_train_pred)
        acc_val = np.mean(y_val == y_val_pred)
        if acc_val > best_val:
            best_lr = lr_use
            best_reg = reg_use
            best_val = acc_val
            best_sfmx = sfmx
        results_once = {(lr_use, reg_use): (acc_train, acc_val)}
        results.update(results_once)
################################################################################
#                              END OF YOUR CODE                                #
################################################################################
    
示例#8
0
# This should be identical to the validation that you did for the SVM; save    #
# the best trained softmax classifer in best_softmax.                          #
################################################################################

for lr in np.linspace(learning_rates[0], learning_rates[1], 5):
    for rs in np.linspace(regularization_strengths[0],
                          regularization_strengths[1], 5):

        softmax = Softmax()
        loss_hist = softmax.train(X_train,
                                  y_train,
                                  learning_rate=lr,
                                  reg=rs,
                                  num_iters=1500,
                                  verbose=True)
        train_accuracy = np.mean(y_train == softmax.predict(X_train))
        val_accuracy = np.mean(y_val == softmax.predict(X_val))
        if best_val < val_accuracy:
            best_val = val_accuracy
            best_softmax = softmax

        results[(lr, rs)] = train_accuracy, val_accuracy

################################################################################
#                              END OF YOUR CODE                                #
################################################################################

# Print out results.
for lr, reg in sorted(results):
    train_accuracy, val_accuracy = results[(lr, reg)]
    print 'lr %e reg %e train accuracy: %f val accuracy: %f' % (
示例#9
0
# (training_accuracy, validation_accuracy). The accuracy is simply the fraction
# of data points that are correctly classified.
results = {}
best_val = -1  # The highest validation accuracy that we have seen so far.
best_softmax = None  # The LinearSVM object that achieved the highest validation rate.

for lr in learning_rates:
    for reg in regularization_strengths:
        sm = Softmax()
        loss_hist = sm.train(X_train,
                             y_train,
                             learning_rate=lr,
                             reg=reg,
                             num_iters=1500,
                             verbose=False)
        y_train_pred = sm.predict(X_train)
        tr_acc = np.mean(y_train == y_train_pred)
        #print('training accuracy: %f' % (tr_acc,))
        y_val_pred = sm.predict(X_val)
        val_acc = np.mean(y_val == y_val_pred)
        #print('validation accuracy: %f' % (val_acc,))
        results[(lr, reg)] = (tr_acc, val_acc)
        if val_acc > best_val:
            best_val = val_acc
            best_softmax = sm

# Print out results.
for lr, reg in sorted(results):
    train_accuracy, val_accuracy = results[(lr, reg)]
    print('lr %e reg %e train accuracy: %f val accuracy: %f' %
          (lr, reg, train_accuracy, val_accuracy))
示例#10
0
# Use the validation set to set the learning rate and regularization strength. #
# This should be identical to the validation that you did for the SVM; save    #
# the best trained softmax classifer in best_softmax.                          #
################################################################################
print '-------------_*****--------------'
print 'Tuning Parameters: '
for lr in np.arange(0.0000001, 0.0000005, 0.00000005):
    for reg in np.arange(5e4, 1e8, 20000):
        sm_iter = Softmax()
        sm_iter.train(X_train,
                      y_train,
                      learning_rate=lr,
                      reg=reg,
                      num_iters=3000,
                      verbose=True)
        y_val_pred_iter = sm_iter.predict(X_val)
        y_train_pred_iter = sm_iter.predict(X_train)
        val_acc = np.mean(y_val == y_val_pred_iter)
        train_acc = np.mean(y_train == y_train_pred_iter)
        results[(lr, reg)] = (train_acc, val_acc)  # Turple Mapping
        print 'Validation accuracy: %f' % (val_acc)
        if (val_acc > best_val):
            best_val = val_acc
            best_softmax = sm_iter
            print 'Best so far: %f' % (val_acc)
################################################################################
#                              END OF YOUR CODE                                #
################################################################################

# Print out results.
for lr, reg in sorted(results):
示例#11
0
learning_rates = [1e-7, 5e-7, 1e-6, 5e-6, 1e-5, 5e-5]
regularization_strengths = [1e4, 2e4, 2.5e4, 3e4, 4e4, 5e4]

softmax_model = Softmax()

import itertools

for learn, regul in itertools.product(learning_rates,
                                      regularization_strengths):
    loss_hist = softmax_model.train(X_train,
                                    y_train,
                                    learning_rate=learn,
                                    reg=regul,
                                    num_iters=150,
                                    verbose=False)
    y_train_pred = softmax_model.predict(X_train)
    y_val_pred = softmax_model.predict(X_val)
    train_accur = np.mean(y_train == y_train_pred)
    val_accur = np.mean(y_val == y_val_pred)
    results[learn, regul] = train_accur, val_accur
    if val_accur > best_val:
        best_val = val_accur
        best_softmax = softmax_model

for lr, reg in sorted(results):
    train_accuracy, val_accuracy = results[(lr, reg)]
    print('lr %e reg %e train accuracy: %f val accuracy: %f' %
          (lr, reg, train_accuracy, val_accuracy))

print('best validation accuracy achieved during cross-validation: %f' %
      best_val)
示例#12
0
X_test = np.reshape(X_test, (X_test.shape[0], -1))  # 1维展开
X_test = np.hstack([X_test, np.ones([X_test.shape[0], 1])])
num_class = 10
W = np.random.randn(X_train.shape[1], num_class) * 0.001
# 检查数值梯度和解析梯度
from cs231n.classifiers import softmax_loss_naive, softmax_loss_vectorized
loss, grad = softmax_loss_naive(W, X_train, y_train, 0.5)
from cs231n.gradient_check import grad_check_sparse
f = lambda w: softmax_loss_vectorized(w, X_train, y_train, 0.5)[0]
grad_check_sparse(f, W, grad)
from cs231n.classifiers import Softmax
classifer = Softmax()
loss_hist = classifer.train(X_train,
                            y_train,
                            verbose=True,
                            num_iters=5000,
                            batch_size=100)
plt.plot(loss_hist)
plt.xlabel('Step')
plt.ylabel('Loss')
plt.show()
# 泛化准确率
y_pred = classifer.predict(X_test)
accuracy = np.mean(y_pred == y_test)
print("Test accuracy:", accuracy)

# from cs231n.classifiers import KNearestNeighbor, KNN_test, KNN_train
# classifer = KNearestNeighbor()
# KNN_train(classifer, X_train, y_train) # 训练
# KNN_test(classifer, X_test, y_test) # 泛化准确率
################################################################################
# TODO:                                                                        #
# Use the validation set to set the learning rate and regularization strength. #
# This should be identical to the validation that you did for the SVM; save    #
# the best trained softmax classifer in best_softmax.                          #
################################################################################
for alpha in learning_rates:
    for lam in regularization_strengths:
        softmax = Softmax()
        loss_hist = softmax.train(X_train,
                                  y_train,
                                  learning_rate=alpha,
                                  reg=lam,
                                  num_iters=500,
                                  verbose=True)
        y_tr_pred = softmax.predict(X_train)
        training_accuracy = float(np.mean(y_tr_pred == y_train))
        y_cv_pred = softmax.predict(X_val)
        cv_accuracy = float(np.mean(y_cv_pred == y_val))

        results[(alpha, lam)] = (training_accuracy, cv_accuracy)
        #getting many NaN over here... probably bcs of learning rate/regularization,
        # didn't seem to have bugs (as also runs correctly and accuracy is predicted well)
        if cv_accuracy > best_val:
            best_val = cv_accuracy
            best_softmax = softmax

################################################################################
#                              END OF YOUR CODE                                #
################################################################################