# Fit and plot one-vs-rest NN classifiers
y_test_est = np.mat(np.zeros((y_test.shape[0], C)))
nets = []
for c in range(C):
    ann = nl.net.newff(
        [[X[:, 0].min(), X[:, 0].max()], [X[:, 1].min(), X[:, 1].max()]],
        [NHiddenUnits, 1],
        [nl.trans.TanSig(), nl.trans.PureLin()])
    train_error = ann.train(X_train, (y_train == c),
                            goal=learning_goal,
                            epochs=max_epochs,
                            show=round(max_epochs / 8))
    nets.append(ann)
    y_test_est[:, c] = ann.sim(X_test)
    figure(c + 1)
    dbplotf(X_test, (y_test == c).astype(int), lambda x: ann.sim(x), 'auto')

# Plot descision boundary for ensemble of neural networks
figure(C + 1)


def neval(xval):
    return np.argmax(hstack([n.sim(xval) for n in nets]), 1)


dbplotf(X_test, y_test, neval, 'auto')
show()
# Compute error rate
ErrorRate = (np.argmax(y_test_est, 1) != y_test).mean(dtype=float)
print('Error rate (ensemble): {0}%'.format(100 * ErrorRate))
示例#2
0
DS_train = conv2DS(X_train, y_train)
DS_test = conv2DS(X_test, y_test)

fnn = buildNetwork(DS_train.indim,
                   NHiddenUnits,
                   DS_train.outdim,
                   outclass=SoftmaxLayer,
                   bias=True)
trainer = BackpropTrainer(fnn,
                          dataset=DS_train,
                          momentum=0.1,
                          verbose=True,
                          weightdecay=0.01)
# Train for 100 iterations.
for i in range(50):
    trainer.trainEpochs(1)
ote = fnn.activateOnDataset(DS_test)

ErrorRate = (np.argmax(ote, 1) != y_test.T).mean(dtype=float)
print('Error rate (ensemble): {0}%'.format(100 * ErrorRate))
figure(1)


def neval(xval):
    return argmax(fnn.activateOnDataset(conv2DS(np.asmatrix(xval))), 1)


toolbox_02450.dbplotf(X_test, y_test, neval, 'auto')
show()
示例#3
0
#%% convert to ClassificationDataSet format. 
def conv2DS(Xv,yv = None) :
    if yv == None : 
        yv =  np.asmatrix( np.ones( (Xv.shape[0],1) ) )
        for j in range(len(classNames)) : yv[j] = j
            
    C = len(unique(yv.flatten().tolist()[0]))
    DS = ClassificationDataSet(M, 1, nb_classes=C)
    for i in range(Xv.shape[0]) : DS.appendLinked(Xv[i,:].tolist()[0], [yv[i].A[0][0]])
    DS._convertToOneOfMany( )
    return DS    

DS_train = conv2DS(X_train,y_train)
DS_test = conv2DS(X_test,y_test)

# A neural network without a hidden layer will simulate logistic regression (albeit very slowly)
fnn = buildNetwork(  DS_train.indim, DS_train.outdim, outclass=SoftmaxLayer,bias=True )    
trainer = BackpropTrainer( fnn, dataset=DS_train, momentum=0.1, verbose=True, weightdecay=0.01)
# Train for 100 iterations. 
for i in range(50): trainer.trainEpochs( 1 )
ote = fnn.activateOnDataset(DS_test)

ErrorRate = (np.argmax(ote,1) != y_test.T).mean(dtype=float)
print('Error rate (ensemble): {0}%'.format(100*ErrorRate))
figure(1)
def neval(xval):
    return argmax(fnn.activateOnDataset(conv2DS(np.asmatrix(xval)) ),1) 

dbplotf(X_test,y_test,neval,'auto')
show()
示例#4
0
    best_knn = KNeighborsClassifier(n_neighbors=best_param_knn[k])
    best_knn = best_knn.fit(X_train, y_train)
    y_est = best_knn.predict(X_test)
    X_test_outer.append(X_test)
    best_knn_list.append(best_knn)
    mis_classified = np.sum(y_est != y_test)
    relative_error = mis_classified / len(y_test)
    y_ESTKNN.append(y_est)

    y_test_outer.append(y_test)
    err_test_outer.append(relative_error)
    k += 1

print("The generalization error is {0} ".format(np.mean(err_test_outer)))
best_index = np.argmin(err_test_outer)

confmatplot(y_test_outer[best_index], y_ESTKNN[best_index])
show()

figure(1)


def neval(xval):
    return np.argmax(best_knn.predict_proba(xval), 1)


if k_pca == 2:
    figure()
    dbplotf(X_test_outer[best_index], y_test_outer[best_index], neval, 'auto')
    show()
示例#5
0
    best_est.append(y_est)
    mis_classified = np.sum(y_est != y_test)
    relative_error = mis_classified / len(y_test)
    gen_err[k] = relative_error
    k += 1
# show confusion matrix for model training using K-fold cross validation
best_index = gen_err.argmin()
figure()
confmatplot(best_est[best_index], y_test_outer[best_index])
print("The mean generalization error is {0}".format(np.mean(gen_err)))

#
#
#
# #show confusion matrix for model testing using Leave one out cross validation
# figure(1)
# Y_est2 = clf_list[best_index].predict(X_test)
# confmatplot(Y_test,Y_est2)
# show()
#
# Decision boundaries for the multinomial regression model

def nevallog(xval):
    return np.argmax(clf_list[best_index].predict_proba(xval),1)

if k_pca == 2:
    figure()
    dbplotf(x_test_outer[best_index], y_test_outer[best_index] ,nevallog,'auto')
    show()

print y_train.shape
#%% convert to ClassificationDataSet format.
def conv2DS(Xv,yv = None) :
    if yv == None :
        yv =  np.asmatrix( np.ones( (Xv.shape[0],1) ) )
        for j in range(len(classNames)) : yv[j] = j

    C = len(unique(yv.flatten().tolist()[0]))
    DS = ClassificationDataSet(M, 1, nb_classes=C)
    for i in range(Xv.shape[0]) : DS.appendLinked(Xv[i,:].tolist()[0], [yv[i].A[0][0]])
    DS._convertToOneOfMany( )
    return DS

DS_train = conv2DS(X_train,y_train)
DS_test = conv2DS(X_test,y_test)

fnn = buildNetwork(DS_train.indim, NHiddenUnits, DS_train.outdim, outclass=SoftmaxLayer,bias=True)
trainer = BackpropTrainer( fnn, dataset=DS_train, momentum=0.1, verbose=True, weightdecay=0.01)
# Train for 100 iterations.
for i in range(50): trainer.trainEpochs( 1 )
ote = fnn.activateOnDataset(DS_test)

ErrorRate = (np.argmax(ote,1) != y_test.T).mean(dtype=float)
print('Error rate (ensemble): {0}%'.format(100*ErrorRate))
figure(1)
def neval(xval):
    return argmax(fnn.activateOnDataset(conv2DS(np.asmatrix(xval)) ),1)

toolbox_02450.dbplotf(X_test,y_test,neval,'auto')
show()
attributeNames = [name[0] for name in mat_data['attributeNames'].squeeze()]
classNames = [name[0][0] for name in mat_data['classNames']]
N, M = X.shape
C = len(classNames)
NHiddenUnits = 4;
# These parameters are usually adjusted to: (1) data specifics, (2) computational constraints
learning_goal = 0.01     # stop criterion 1 (train mse to be reached)
max_epochs = 300      # stop criterion 2 (max epochs in training)

# Fit and plot one-vs-rest NN classifiers
y_test_est = np.mat(np.zeros((y_test.shape[0],C)))
nets = []
for c in range(C):
    ann = nl.net.newff([[X[:,0].min(),X[:,0].max()], [X[:,1].min(),X[:,1].max()]], [NHiddenUnits, 1], [nl.trans.TanSig(),nl.trans.PureLin()])
    train_error = ann.train(X_train, (y_train == c), goal=learning_goal, epochs=max_epochs, show=round(max_epochs/8))
    nets.append(ann)
    y_test_est[:,c] = ann.sim(X_test)
    figure(c+1)
    dbplotf(X_test,(y_test==c).astype(int), lambda x : ann.sim(x), 'auto')

# Plot descision boundary for ensemble of neural networks
figure(C+1)
def neval(xval):
    return np.argmax(hstack([n.sim(xval) for n in nets] ),1)

dbplotf(X_test,y_test,neval,'auto')
show()
# Compute error rate
ErrorRate = (np.argmax(y_test_est,1) != y_test).mean(dtype=float)
print('Error rate (ensemble): {0}%'.format(100*ErrorRate))
NHiddenUnits = 2;

#%% convert to ClassificationDataSet format. 
def conv2DS(Xv,yv = None) :
    if yv == None : 
        yv =  np.asmatrix( np.ones( (Xv.shape[0],1) ) )
        for j in range(len(classNames)) : yv[j] = j
            
    C = len(unique(yv.flatten().tolist()[0]))
    DS = ClassificationDataSet(M, 1, nb_classes=C)
    for i in range(Xv.shape[0]) : DS.appendLinked(Xv[i,:].tolist()[0], [yv[i].A[0][0]])
    DS._convertToOneOfMany( )
    return DS    

DS_train = conv2DS(X_train,y_train)
DS_test = conv2DS(X_test,y_test)

fnn = buildNetwork(  DS_train.indim, NHiddenUnits, DS_train.outdim, outclass=SoftmaxLayer,bias=True )    
trainer = BackpropTrainer( fnn, dataset=DS_train, momentum=0.1, verbose=True, weightdecay=0.01)
# Train for 100 iterations. 
for i in range(50): trainer.trainEpochs( 1 )
ote = fnn.activateOnDataset(DS_test)

ErrorRate = (np.argmax(ote,1) != y_test.T).mean(dtype=float)
print('Error rate (ensemble): {0}%'.format(100*ErrorRate))
figure(1)
def neval(xval):
    return argmax(fnn.activateOnDataset(conv2DS(np.asmatrix(xval)) ),1) 

dbplotf(X_test,y_test,neval,'auto')
show()