Beispiel #1
0
from pyxvis.io.data import load_features
from pyxvis.io.plots import show_confusion_matrix
from pyxvis.learning.classifiers import clf_model, define_classifier
from pyxvis.learning.classifiers import train_classifier, test_classifier

(X, d, Xt,
 dt) = load_features('../data/F2/F2')  # load training and testing data

# Classifier definition
ss_cl = ['dmin', 'svm-rbf(0.1,1)']
n = len(ss_cl)
for k in range(n):
    (name, params) = clf_model(ss_cl[k])  # function name and parameters
    clf = define_classifier([name, params])  # classifier definition
    clf = train_classifier(clf, X, d)  # classifier training
    ds = test_classifier(clf, Xt)  # classification of testing
    show_confusion_matrix(dt, ds, ss_cl[k])  # display confusion matrix
Beispiel #2
0
from sklearn.model_selection import train_test_split

from pyxvis.io.data import load_features
from pyxvis.io.plots import show_confusion_matrix
from pyxvis.learning.classifiers import clf_model
from pyxvis.learning.evaluation import hold_out

# load available dataset
(X0, d0) = load_features('../data/F2/F2', full=1)

# definition of training and testing data
X, Xt, d, dt = train_test_split(X0, d0, test_size=0.2, stratify=d0)

# definition of the classifier
cl_name = 'svm-rbf(0.1,1)'  # generic name of the classifier
(name, params) = clf_model(cl_name)  # function name and parameters

# Hold-out (train on (X,d), test on (Xt), compare with dt)
ds, acc, _ = hold_out([name, params], X, d, Xt, dt)  # hold out
print(cl_name + ': ' + f'Accuracy = {acc:.4f}')

# display confusion matrix
show_confusion_matrix(dt, ds, 'Testing subset')
Beispiel #3
0
n           = [n_0]+nh+[n_m]     # nodes of each layer
m           = len(n)-1
ltrain      = np.zeros([tmax,1]) # training loss

# Training
t     = -1
train =  1
W,b   = nn_definition(n,N)                            # (step 1)
while train:
    t         = t+1
    a         = nn_forward_propagation(Xtrain,W,b)    # (step 2)
    dW,db     = nn_backward_propagation(Ytrain,a,W,b) # (step 3)
    W,b       = nn_parameters_update(W,b,dW,db,alpha) # (step 4)
    ltrain[t] = nn_loss_function(a,Ytrain)            # (step 5)
    train     = ltrain[t]>=loss_eps and t<tmax-1

# Loss function on training and validation subsets
plot_loss(ltrain)

# Evaluation on training and testing subsets 
a = nn_forward_propagation(Xtrain,W,b)    # output layer is a[m]
show_confusion_matrix(a[m],Ytrain,'Training',categorical=1)
a = nn_forward_propagation(Xtest,W,b)     # output layer is a[m]
show_confusion_matrix(a[m],Ytest,'Testing',categorical=1)






Beispiel #4
0
from sklearn.neural_network import MLPClassifier
from pyxvis.io.plots import plot_features2,show_confusion_matrix, plot_loss
from pyxvis.io.data import load_features

# Load training and testing data
(Xtrain,Ytrain,Xtest,Ytest)   = load_features('../data/G4/G4')       
plot_features2(Xtrain,Ytrain,'Training+Testing Subsets')

# Definitions
alpha       = 1e-5     # learning rate
nh          = (6,12)   # nodes of hidden layers
tmax        = 2000     # max number of iterations
solver      = 'adam'   # optimization approach ('lbfgs','sgd', 'adam') 

# Training
net = MLPClassifier(solver=solver, alpha=alpha,hidden_layer_sizes=nh, 
                    random_state=1,max_iter=tmax)
print(Xtrain.shape)
print(Ytrain.shape)
net.fit(Xtrain, Ytrain)

# Evaluation
Ym  = net.predict(Xtrain)
show_confusion_matrix(Ym,Ytrain,'Training')

Ys  = net.predict(Xtest)
show_confusion_matrix(Ys,Ytest,'Testing')
Beispiel #5
0
def computeconfusionMatrix(model, X, y):
    Y_prediction = model.predict(X)
    Y_pred_classes = np.argmax(Y_prediction,
                               axis=1)  # classes to one hot vectors
    Y_true = np.argmax(y, axis=1)  # classes to one hot vectors
    show_confusion_matrix(Y_true, Y_pred_classes, 'CNN')