Esempio n. 1
0
def make_classifiers():

    names = [
        "ELM(10,tanh)", "ELM(10,tanh,LR)", "ELM(10,sinsq)", "ELM(10,tribas)",
        "ELM(hardlim)", "ELM(20,rbf(0.1))"
    ]

    nh = 10

    # pass user defined transfer func
    sinsq = (lambda x: np.power(np.sin(x), 2.0))
    srhl_sinsq = MLPRandomLayer(n_hidden=nh, activation_func=sinsq)

    # use internal transfer funcs
    srhl_tanh = MLPRandomLayer(n_hidden=nh, activation_func='tanh')

    srhl_tribas = MLPRandomLayer(n_hidden=nh, activation_func='tribas')

    srhl_hardlim = MLPRandomLayer(n_hidden=nh, activation_func='hardlim')

    # use gaussian RBF
    srhl_rbf = RBFRandomLayer(n_hidden=nh * 2, rbf_width=0.1, random_state=0)

    log_reg = LogisticRegression()

    classifiers = [
        GenELMClassifier(hidden_layer=srhl_tanh),
        GenELMClassifier(hidden_layer=srhl_tanh, regressor=log_reg),
        GenELMClassifier(hidden_layer=srhl_sinsq),
        GenELMClassifier(hidden_layer=srhl_tribas),
        GenELMClassifier(hidden_layer=srhl_hardlim),
        GenELMClassifier(hidden_layer=srhl_rbf)
    ]

    return names, classifiers
Esempio n. 2
0
def trainELMClassifier(trainData, trainLabels, testData):
    print("\nTraining ELM Classifier...")

    trainData = np.asarray(trainData)
    trainLabels = np.asarray(trainLabels)
    print(trainData.shape)
    print(trainLabels.shape)

    # create initialize elm activation functions
    nh = 100
    activation = 'tanh'

    if activation == 'rbf':
        act_layer = RBFRandomLayer(n_hidden=nh,
                                   random_state=0,
                                   rbf_width=0.001)
    elif activation == 'tanh':
        act_layer = MLPRandomLayer(n_hidden=nh, activation_func='tanh')
    elif activation == 'tribas':
        act_layer = MLPRandomLayer(n_hidden=nh, activation_func='tribas')
    elif activation == 'hardlim':
        act_layer = MLPRandomLayer(n_hidden=nh, activation_func='hardlim')

    # initialize ELM Classifier
    elm = GenELMClassifier(hidden_layer=act_layer)

    t0 = time()
    elm.fit(trainData, trainLabels)
    print("\nTraining finished in %0.3fs \n" % (time() - t0))

    t0 = time()
    predictedLabels = elm.predict(testData)
    print("\nTesting finished in %0.3fs" % (time() - t0))

    t0 = time()
    confidence_scores = elm.decision_function(testData)
    print("\nTesting finished in %0.3fs" % (time() - t0))

    print("\nPredicted Labels")
    print("----------------------------------")
    print(predictedLabels)

    print("\nConfidence Scores")
    print("----------------------------------")
    print(confidence_scores)

    params = {
        'nh': nh,
        'af': activation,
    }

    return confidence_scores, predictedLabels, params
    def __init__(self,
                 hidden_layer=MLPRandomLayer(random_state=0),
                 regressor=None):

        super(GenELMRegressor, self).__init__(hidden_layer, regressor)

        self.coefs_ = None
        self.fitted_ = False
        self.hidden_activations_ = None
    def __init__(self,
                 hidden_layer=MLPRandomLayer(random_state=0),
                 binarizer=LabelBinarizer(-1, 1),
                 regressor=None):

        super(GenELMClassifier, self).__init__(hidden_layer, regressor)

        self.binarizer = binarizer

        self.classes_ = None
        self.genelm_regressor_ = GenELMRegressor(hidden_layer, regressor)
def make_classifiers():
    names = [   #"ELM(tanh)",
       # "ELM(tanh,LR)",
       # "ELM(sinsq)",
        #"ELM(sigmoid)",
       # "ELM(sine)",
       # "ELM(inv_tribas)",
       # "ELM(softlim)",
       # "ELM(gaussian)",
       # "ELM(multiquadric)",
       # "ELM(inv_multiquadric)",
        #"ELM(tribas)",
       # "ELM(hardlim)",
        #"Basic ELM(hardlim)",
        #"ELM(rbf(0.1))",
       # "LR",
        #"LDA",9
        #"KNN",
        #"DT",
        #"NB",
        #"RDF",
        "SVM(linear)",
        #"SVM(rbf)",
        #"SVM(sigmoid)",
        #"SVM(poly)"
    ]
    # # Hidden nodes
    nh = 2000
    #
    # pass user defined transfer func
    sinsq = (lambda x: np.power(np.sin(x), 2.0))
    srhl_sinsq = MLPRandomLayer(n_hidden=nh, activation_func=sinsq)
    # use internal transfer funcs
    srhl_tanh = MLPRandomLayer(n_hidden=nh, activation_func='tanh')
    # use tribas
    srhl_tribas = MLPRandomLayer(n_hidden=nh, activation_func='tribas')
    # use hardlim
    srhl_hardlim = MLPRandomLayer(n_hidden=nh, activation_func='hardlim')
    # use gaussian RBF
    srhl_rbf = RBFRandomLayer(n_hidden=nh * 2, rbf_width=0.1, random_state=0)
    # use sigmoid
    srhl_sigmoid = MLPRandomLayer(n_hidden=nh, activation_func='sigmoid')
    # use sine
    srhl_sine = MLPRandomLayer(n_hidden=nh, activation_func='sine')
    # use inv_tribas
    srhl_inv_tribas = MLPRandomLayer(n_hidden=nh, activation_func='inv_tribas')
    # use softlim
    srhl_softlim = MLPRandomLayer(n_hidden=nh, activation_func='softlim')
    # use gaussian
    srhl_gaussian = MLPRandomLayer(n_hidden=nh, activation_func='gaussian')
    # use multiquadric
    srhl_multiquadric = MLPRandomLayer(n_hidden=nh, activation_func='multiquadric')
    # use inv_multiquadric
    srhl_inv_multiquadric = MLPRandomLayer(n_hidden=nh, activation_func='inv_multiquadric')
    log_reg = LogisticRegression()

    classifiers = [ #  GenELMClassifier(hidden_layer=srhl_tanh),
        #GenELMClassifier(hidden_layer=srhl_tanh, regressor=log_reg),
        #GenELMClassifier(hidden_layer=srhl_sinsq),
        #GenELMClassifier(hidden_layer=srhl_sigmoid),
        #GenELMClassifier(hidden_layer=srhl_sine),
        #GenELMClassifier(hidden_layer=srhl_inv_tribas),
        #GenELMClassifier(hidden_layer=srhl_softlim),
        #GenELMClassifier(hidden_layer=srhl_gaussian),
        #GenELMClassifier(hidden_layer=srhl_multiquadric),
        #GenELMClassifier(hidden_layer=srhl_inv_multiquadric),
        #GenELMClassifier(hidden_layer=srhl_tribas),
        #GenELMClassifier(hidden_layer=srhl_hardlim),
        #ELMClassifier(activation_func="hardlim",alpha=1,n_hidden=nh),
        #GenELMClassifier(hidden_layer=srhl_rbf),
        #LogisticRegression(),
        #LinearDiscriminantAnalysis(),
        #KNeighborsClassifier(),
        #DecisionTreeClassifier(),
        #GaussianNB(),
        #RandomForestClassifier(n_estimators=5),
        #SVC(kernel="rbf", gamma=0.01, C=10),
        SVC(kernel="linear", C=1),
        #SVC(kernel='rbf',C=10,gamma=0.01)
        #SVC(kernel="poly", gamma=2)
    ]

    return names, classifiers
Esempio n. 6
0
n_splits = 20
sKF = StratifiedKFold(n_splits=n_splits, shuffle=False)
i = 0

stop_train = False
num_epochs = 10
for train_index, test_index in sKF.split(std_X, y):
    i += 1
    x_train = std_X[train_index]
    y_train = y[train_index]
    x_test = std_X[test_index]
    y_test = y[test_index]
    #-------------------------------------------------------------------------------
    grbf = GRBFRandomLayer(n_hidden=500, grbf_lambda=0.0001)
    act = MLPRandomLayer(n_hidden=500, activation_func='sigmoid')
    rbf = RBFRandomLayer(n_hidden=290,
                         rbf_width=0.0001,
                         activation_func='sigmoid')

    clf = GenELMClassifier(hidden_layer=rbf)
    clf.fit(x_train, y_train.ravel())
    y_pre = clf.predict(x_test)
    y_score = clf.decision_function(x_test)
    fpr, tpr, thresholds = roc_curve(y_test, y_score)
    tprs.append(tpr)
    fprs.append(fpr)
    roc_auc = auc(fpr, tpr)
    tn, fp, fn, tp = confusion_matrix(y_test, y_pre).ravel()
    test_acc = (tn + tp) / (tn + fp + fn + tp)
    test_Sn = tp / (fn + tp)
plot(xtoy, ytoy, xtoy, elmr.predict(xtoy))

# <codecell>

rbf_rhl = RBFRandomLayer(n_hidden=100, random_state=0, rbf_width=0.01)
elmc_rbf = GenELMClassifier(hidden_layer=rbf_rhl)
elmc_rbf.fit(dgx_train, dgy_train)
print elmc_rbf.score(dgx_train, dgy_train), elmc_rbf.score(dgx_test, dgy_test)


def powtanh_xfer(activations, power=1.0):
    return pow(np.tanh(activations), power)


tanh_rhl = MLPRandomLayer(n_hidden=100,
                          activation_func=powtanh_xfer,
                          activation_args={'power': 3.0})
elmc_tanh = GenELMClassifier(hidden_layer=tanh_rhl)
elmc_tanh.fit(dgx_train, dgy_train)
print elmc_tanh.score(dgx_train,
                      dgy_train), elmc_tanh.score(dgx_test, dgy_test)

# <codecell>

rbf_rhl = RBFRandomLayer(n_hidden=100, rbf_width=0.01)
tr, ts = res_dist(dgx,
                  dgy,
                  GenELMClassifier(hidden_layer=rbf_rhl),
                  n_runs=100,
                  random_state=0)
Esempio n. 8
0
from sklearn import preprocessing

import warnings

dataset_name = ['caltech', 'feret', 'ORL', 'Indianface']
features = [
    'npymodel/caltech_data.npy', 'npymodel/feret_data.npy',
    'npymodel/ORL_data.npy', 'npymodel/Indianface_data.npy'
]

lables = [
    'npymodel/caltech_lable.npy', 'npymodel/feret_lable.npy',
    'npymodel/ORL_lable.npy', 'npymodel/Indianface_lable.npy'
]

srhl_sigmoid = MLPRandomLayer(n_hidden=2000, activation_func='sigmoid')
srhl_gaussian = MLPRandomLayer(n_hidden=2000, activation_func='gaussian')

names = [
    "ELM(sigmoid)",
    #"ELM(gaussian)",
    "SVM(linear)",
    #'SVM(rbf)',
    "LR"
]

classifiers = [
    GenELMClassifier(hidden_layer=srhl_sigmoid),
    # GenELMClassifier(hidden_layer=srhl_gaussian),
    SVC(kernel='linear', C=1),
    #SVC(kernel='rbf',C=10,gamma=0.01),
Esempio n. 9
0
__author__ = 'ozgurcatak'

from mrjob.job import MRJob
import random, numpy as np, sys, os, uuid, errno, time
from elm import GenELMClassifier
from random_layer import RBFRandomLayer, MLPRandomLayer
from sklearn.ensemble import AdaBoostClassifier
from sklearn.metrics import classification_report, accuracy_score, precision_score, recall_score, f1_score
from sklearn.externals import joblib

M = 2
nh = 5
T = 5

srhl_tanh = MLPRandomLayer(n_hidden=nh, activation_func='tanh')
srhl_rbf = RBFRandomLayer(n_hidden=nh*2, rbf_width=0.1, random_state=0)
srhl_tribas = MLPRandomLayer(n_hidden=nh, activation_func='tribas')
srhl_hardlim = MLPRandomLayer(n_hidden=nh, activation_func='hardlim')

# clf = GenELMClassifier(hidden_layer=srhl_tanh)
clf = GenELMClassifier(hidden_layer=srhl_rbf)
# clf = GenELMClassifier(hidden_layer=srhl_tribas)
# clf = GenELMClassifier(hidden_layer=srhl_hardlim)



class ELMTraining(MRJob):

    def mapper(self, _, line):
        k = random.randint(1,M)
        yield k, (line)
Esempio n. 10
0
print("Rights: " + str(rights))
print("Wrongs:" + str(wrongs))
print("Score: " + str(rights / (rights + wrongs) * 100) + "%")
print("\n")

#####ELM with MLP Random Layer#####
from random_layer import MLPRandomLayer
from elm import GenELMClassifier as classifier


def powtanh_xfer(activations, power=1.0):
    return pow(np.tanh(activations), power)


model = classifier(hidden_layer=MLPRandomLayer(n_hidden=100,
                                               activation_func=powtanh_xfer,
                                               activation_args={'power': 3.0}))
model.fit(trainX, trainy)
dump(model, 'ELMMLPModel.bin')

print(
    "##########Testing person identification with ELM with MLP Random Layer model##########"
)
predictions = model.predict(testX)

rights, wrongs = 0, 0
for prediction, actual in zip(predictions, testy):
    if prediction == actual:
        if showIndividualPredictions:
            print(prediction)
            print("Correct!")