def strategy05(X_train, labels_train, X_test, labels_test, groups):
    '''
    Estrategia número 5 para el segundo clasificador
    '''

    print('\nEjecutando la estrategia número 5 del segundo clasificador...')

    # *** DEFINCION DE DATOS PARA EL TRAINING ***

    # Paso 1: Clean
    #         > Training: 5040 x 82
    s_clean = clean(X_train)
    X_train = X_train[:, s_clean]

    # Paso 2: PCA
    #         > Training: 5040 x 82
    X_train, _, A1, Xm1, _ = pca(X_train, n_components=X_train.shape[1])

    # Paso 3: Normalizacion
    #         > Training: 5040 x 82
    X_train, a, b = normalize(X_train)

    # Paso 4: SFS
    #         > Training: 5040 x 80
    s_sfs = sfs(X_train, labels_train, n_features=80, method="fisher")
    X_train = X_train[:, s_sfs]
    X_train_sfs80 = X_train.copy()

    # Paso 5: PCA
    #         > Training: 5040 x 10
    X_train, _, A2, Xm2, _ = pca(X_train, n_components=10)

    #Paso 6: SFS
    #	> Trainning: 5040 x 20
    X_train = np.concatenate((X_train, X_train_sfs80), axis=1)
    s_sfs2 = sfs(X_train, labels_train, n_features=20, method="fisher")
    X_train = X_train[:, s_sfs2]

    # *** DEFINCION DE DATOS PARA EL TESTING ***

    X_test = X_test[:, s_clean]  # Paso 1: clean
    X_test = np.matmul(X_test - Xm1, A1)  # Paso 2: PCA
    X_test = X_test * a + b  # Paso 3: normalizacion
    X_test = X_test[:, s_sfs]  # Paso 4: SFS
    X_test_sfs80 = X_test.copy()
    X_test = np.matmul(X_test - Xm2, A2)  # Paso 5: PCA
    X_test = np.concatenate((X_test, X_test_sfs80), axis=1)
    X_test = X_test[:, s_sfs2]  # Paso 6: SFS

    # *** ENTRENAMIENTO CON DATOS DE TRAINING Y PRUEBA CON DATOS DE TESTING ***

    return classifier_tests(X_train, labels_train, X_test, labels_test, groups)
Exemplo n.º 2
0
def strategy02(X_train, labels_train, X_test, labels_test):
    '''
    Estrategia número 2 para el primer clasificador
    '''

    print('\nEjecutando la estrategia número 2 del primer clasificador...')

    # *** DEFINCION DE DATOS PARA EL TRAINING ***

    # Paso 1: Clean
    #         > Training: 5040 x 82
    s_clean = clean(X_train)
    X_train = X_train[:, s_clean]

    # Paso 2: PCA de 70 componentes
    #         > Training: 5040 x 70
    X_train, _, A, Xm, _ = pca(X_train, n_components=70)

    # Paso 3: Normalizacion
    #         > Training: 5040 x 70
    X_train, a, b = normalize(X_train)

    # Paso 4: SFS
    #         > Training: 5040 x 20
    s_sfs = sfs(X_train, labels_train, n_features=20, method="fisher")
    X_train = X_train[:, s_sfs]

    # *** DEFINCION DE DATOS PARA EL TESTING ***
    X_test = X_test[:, s_clean]  # Paso 1: Clean
    X_test = np.matmul(X_test - Xm, A)  # Paso 2: PCA
    X_test = X_test * a + b  # Paso 3: Normalizacion
    X_test = X_test[:, s_sfs]  # Paso 4: SFS

    return classifier_tests(X_train, labels_train, X_test, labels_test)
Exemplo n.º 3
0
def strategy01(X_train, labels_train, X_test, labels_test):
    '''
    Estrategia número 1 para el primer clasificador
    '''

    print('\nEjecutando la estrategia número 1 del primer clasificador...')

    # *** DEFINCION DE DATOS PARA EL TRAINING ***

    # Paso 1: Cleaning de los datos
    #   > Training: 5040 x 82
    s_clean = clean(X_train)
    X_train = X_train[:, s_clean]

    # Paso 2: Normalización Mean-Std de los datos
    X_train, a, b = normalize(X_train)

    # Paso 3: Selección de características
    # Acá se utilizó el criterio de fisher
    #   > Training: 5040 x 50
    s_sfs = sfs(X_train, labels_train, n_features=50, method="fisher")
    X_train = X_train[:, s_sfs]

    # Paso 4: PCA
    #         > Training: 5040 x 50
    X_train, _, A, Xm, _ = pca(X_train, n_components=50)

    # *** DEFINCION DE DATOS PARA EL TESTING ***

    X_test = X_test[:, s_clean]  # Paso 1: Clean
    X_test = X_test * a + b  # Paso 2: Normalizacion
    X_test = X_test[:, s_sfs]  # Paso 3: SFS
    X_test = np.matmul(X_test - Xm, A)  # Paso 4: PCA

    return classifier_tests(X_train, labels_train, X_test, labels_test)
Exemplo n.º 4
0
def PCA(x_train, x_test, x_val, n_components):
    """
    Realiza la transformación PCA de los datos a tan solo 'n_components' características.
    n_components:   número de características.
    """
    x_train, _, A, Xm, _ = pca(x_train, n_components=n_components)
    x_test = np.matmul(x_test - Xm, A)
    x_val = np.matmul(x_val - Xm, A)
    return x_train, x_test, x_val
Exemplo n.º 5
0
def sfspca_old(X, d, m1, m2, cc, ex, m3):
    s1 = sfsfisher(X, d, m1)
    X = X[:, s1]
    Y, _, _, _, _ = pca(X, n_components=m2)
    if cc == 1:
        Y = np.concatenate((X, Y), axis=1)
    if ex == 1:
        s2 = exsearch(Y, d, m3)
    else:
        s2 = sfs(Y, d, m3)
    X = Y[:, s2]
    return X
Exemplo n.º 6
0
def WinnerStrategy(X_train, labels_train, X_test, labels_test):
    '''
    Estrategia Número 1 con redes neuronales,
    Reescrita para poder obtener estadísticas
    '''

    # *** DEFINCION DE DATOS PARA EL TRAINING ***

    # Paso 1: Cleaning de los datos
    #   > Training: 5040 x 82
    s_clean = clean(X_train)
    X_train = X_train[:, s_clean]

    # Paso 2: Normalización Mean-Std de los datos
    X_train, a, b = normalize(X_train)

    # Paso 3: Selección de características
    # Acá se utilizó el criterio de fisher
    #   > Training: 5040 x 50
    s_sfs = sfs(X_train, labels_train, n_features=50, method="fisher")
    X_train = X_train[:, s_sfs]

    # Paso 4: PCA
    #         > Training: 5040 x 50
    X_train, _, A, Xm, _ = pca(X_train, n_components=50)

    # *** DEFINCION DE DATOS PARA EL TESTING ***

    X_test = X_test[:, s_clean]  # Paso 1: Clean
    X_test = X_test * a + b  # Paso 2: Normalizacion
    X_test = X_test[:, s_sfs]  # Paso 3: SFS
    X_test = np.matmul(X_test - Xm, A)  # Paso 4: PCA

    classifier = MLPClassifier(alpha=1, max_iter=1000, random_state=2)

    results = {}

    # Clasificamos las muestras de Testing
    classifier.fit(X_train, labels_train)
    Y_pred = classifier.predict(X_test)
    accuracy = performance(Y_pred, labels_test)

    results['Accuracy'] = accuracy * 100
    results['Y_pred'] = Y_pred
    results['labels_test'] = labels_test

    return results
Exemplo n.º 7
0
def sfspca(bcl, X, d, m1, m2, cc, ex, m3):
    s1 = fsel(bcl, X, d, m1)
    X = X[:, s1]
    if m2 > 0:
        Y, _, _, _, _ = pca(X, n_components=m2)
    else:
        Y = X
    if cc == 1:
        Y = np.concatenate((X, Y), axis=1)
    if m3 > 0:
        if ex == 1:
            s2 = exsearch(Y, d, m3)
        else:
            s2 = fsel(bcl, Y, d, m3)
        X = Y[:, s2]
    else:
        X = Y
    return X
def strategy03(X_train, labels_train, X_test, labels_test, groups):
    '''
    Estrategia número 3 para el segundo clasificador
    '''

    print('\nEjecutando la estrategia número 3 del segundo clasificador...')

    # *** DEFINCION DE DATOS PARA EL TRAINING ***

    # Paso 1: Clean
    #         > Training: 5040 x 82
    s_clean = clean(X_train)
    X_train = X_train[:, s_clean]

    # Paso 2: Normalizacion
    #         > Training: 5040 x 82
    X_train, a, b = normalize(X_train)

    # Paso 3: SFS
    #         > Training: 5040 x 80
    s_sfs = sfs(X_train, labels_train, n_features=80, method="fisher")
    X_train = X_train[:, s_sfs]

    # Paso 4: PCA
    #         > Training: 5040 x 20
    X_train, _, A, Xm, _ = pca(X_train, n_components=20)

    # Paso 5: SFS
    #	      > Training: 5040 x 15
    s_sfs2 = sfs(X_train, labels_train, n_features=15, method="fisher")
    X_train = X_train[:, s_sfs2]

    # *** DEFINCION DE DATOS PARA EL TESTING ***

    X_test = X_test[:, s_clean]  # Paso 1: Clean
    X_test = X_test * a + b  # Paso 2: Normalizacion
    X_test = X_test[:, s_sfs]  # Paso 3: SFS
    X_test = np.matmul(X_test - Xm, A)  # Paso 4: PCA
    X_test = X_test[:, s_sfs2]  # Paso 5: SFS

    # *** ENTRENAMIENTO CON DATOS DE TRAINING Y PRUEBA CON DATOS DE TESTING ***

    return classifier_tests(X_train, labels_train, X_test, labels_test, groups)
Exemplo n.º 9
0
#         > Training: 211 x 387
s_clean = clean(X_train)
X_train = X_train[:, s_clean]

# Paso 3-Training: Normalizacion
#         > Training: 211 x 387
X_train, a, b = normalize(X_train)

# Paso 4-Training: SFS
#         > Training: 211 x 40
s_sfs = sfs(X_train, d_train, n_features=40, method="fisher", show=True)
X_train = X_train[:, s_sfs]

# Paso 5-Training: PCA
#         > Training: 211 x 10
X_train, _, A, Xm, _ = pca(X_train, n_components=10)

# *** DEFINCION DE DATOS PARA EL TESTING ***

X_test = X_test[:, s_clean]  # Paso 2: clean
X_test = X_test * a + b  # Paso 3: normalizacion
X_test = X_test[:, s_sfs]  # Paso 4: SFS
X_test = np.matmul(X_test - Xm, A)  # Paso 5: PCA

# *** ENTRENAMIENTO CON DATOS DE TRAINING Y PRUEBA CON DATOS DE TESTING ***

knn = KNN(n_neighbors=1)
knn.fit(X_train, d_train)
Y_pred = knn.predict(X_test)
accuracy = performance(Y_pred, d_test)
Exemplo n.º 10
0
from pyxvis.features.selection import fse_model, fse_sbs, clean_norm, clean_norm_transform
from pyxvis.io.plots import plot_features3, print_confusion
from sklearn.neighbors import KNeighborsClassifier as knn

# Training-Data
path = '../images/threatobjects/'
fx = ['basicgeo', 'ellipse', 'hugeo', 'flusser', 'fourierdes', 'gupta']
X, d = extract_features_labels(fx,
                               path + 'train',
                               'jpg',
                               segmentation='bimodal')
X, sclean, a, b = clean_norm(X)
(name, params) = fse_model('LDA')
ssbs = fse_sbs([name, params], X, d, 20)
X = X[:, ssbs]
Ypca, _, _, _, _ = pca(X, n_components=3)
plot_features3(Ypca, d, 'PCA - Threat Objects', view=(-160, 120))

# Testing-Data
Xt, dt = extract_features_labels(fx,
                                 path + 'test',
                                 'jpg',
                                 segmentation='bimodal')
Xt = clean_norm_transform(Xt, sclean, a, b)
Xt = Xt[:, ssbs]

# Classification and Evaluation
clf = knn(n_neighbors=5)
clf.fit(X, d)
ds = clf.predict(Xt)
print_confusion(dt, ds)
Exemplo n.º 11
0
from pybalu.feature_transformation import pca
from pybalu.feature_analysis import jfisher
from pyxvis.features.extraction import extract_features_labels
from pyxvis.features.selection import fsel, fse_model, clean_norm, clean_norm_transform
from pyxvis.io.plots import plot_features3, print_confusion
from sklearn.neighbors import KNeighborsClassifier as KNN

# Training-Data
path = '../images/fishbones/'
fx = ['basicint', 'gabor-ri', 'lbp-ri', 'haralick-2', 'fourier', 'dct', 'hog']
X, d = extract_features_labels(fx, path + 'train', 'jpg')
X, sclean, a, b = clean_norm(X)
(name, params) = fse_model('QDA')
ssfs = fsel([name, params], X, d, 15, cv=5, show=1)
X = X[:, ssfs]
Ypca, _, A, Mx, _ = pca(X, n_components=6)
X = np.concatenate((X, Ypca), axis=1)
sf = exsearch(X, d, n_features=3, method="fisher", show=True)
X = X[:, sf]
print('Jfisher = ' + str(jfisher(X, d)))
plot_features3(X, d, 'Fishbones')

# Testing-Data
Xt, dt = extract_features_labels(fx, path + 'test', 'jpg')
Xt = clean_norm_transform(Xt, sclean, a, b)
Xt = Xt[:, ssfs]
Ytpca = np.matmul(Xt - Mx, A)
Xt = np.concatenate((Xt, Ytpca), axis=1)
Xt = Xt[:, sf]

# Classification and Evaluation