Exemple #1
0
def make_classifiers():
    """

    :return:
    """

    names = ["ELM(10,tanh)", "ELM(10,tanh,LR)", "ELM(10,sinsq)", "ELM(10,tribas)", "ELM(hardlim)", "ELM(20,rbf(0.1))"]

    nh = 10

    # pass user defined transfer func
    sinsq = (lambda x: np.power(np.sin(x), 2.0))
    srhl_sinsq = MLPRandomLayer(n_hidden=nh, activation_func=sinsq)

    # use internal transfer funcs
    srhl_tanh = MLPRandomLayer(n_hidden=nh, activation_func='tanh')
    srhl_tribas = MLPRandomLayer(n_hidden=nh, activation_func='tribas')
    srhl_hardlim = MLPRandomLayer(n_hidden=nh, activation_func='hardlim')

    # use gaussian RBF
    srhl_rbf = RBFRandomLayer(n_hidden=nh*2, rbf_width=0.1, random_state=0)
    log_reg = LogisticRegression()

    classifiers = [GenELMClassifier(hidden_layer=srhl_tanh),
                   GenELMClassifier(hidden_layer=srhl_tanh, regressor=log_reg),
                   GenELMClassifier(hidden_layer=srhl_sinsq),
                   GenELMClassifier(hidden_layer=srhl_tribas),
                   GenELMClassifier(hidden_layer=srhl_hardlim),
                   GenELMClassifier(hidden_layer=srhl_rbf)]

    return names, classifiers
def extension():
    global elm_error
    filename1 = open("C:/Users/Krish/Desktop/maj_proj/data1.csv", "r")
    user = input("Enter User ID")
    print("\n")
    train = pd.read_csv(filename1)
    X = train.values[:, 0:4]
    y = train.values[:, 0]

    trainX = []
    trainY = []
    trainY1 = []
    for i in range(len(X)):
        usr = X[i][0]
        x_loc = X[i][2]
        y_loc = X[i][3]
        if str(usr) == user:
            trainY.append(x_loc)
            trainY1.append(y_loc)
            trainX.append([x_loc, y_loc])

    trainX = np.asarray(trainX)
    trainY = np.asarray(trainY)
    trainY1 = np.asarray(trainY1)
    print(trainX)

    #svm_rbf = SVR(kernel='rbf', C=100, gamma=0.1, epsilon=.1)
    #svm_rbf.fit(trainX, trainY)
    #print(svm_rbf.predict(trainX))

    svm_rbf = SVR(kernel='rbf', C=100, gamma=0.1, epsilon=.1)
    #svm_rbf.fit(trainX, trainY1)
    #print(svm_rbf.predict(trainX))

    srhl_tanh = MLPRandomLayer(n_hidden=200, activation_func='tanh')
    cls = ELMRegressor(regressor=svm_rbf)
    cls.fit(trainX, trainY, epochs=10)
    y_pred = cls.predict(trainX)

    srhl_tanh = MLPRandomLayer(n_hidden=200, activation_func='tanh')
    cls = ELMRegressor(regressor=svm_rbf)
    cls.fit(trainX, trainY1)
    y_pred1 = cls.predict(trainX)

    err = []
    for i in range(len(y_pred)):
        err.append([y_pred[i], y_pred1[i]])
    err = np.asarray(err)
    elm_error = mean_squared_error(trainX, err)

    length = len(y_pred) - 1
    print("\nELM Extension Next Predicted Sequence is :\n")
    print("Latitude : " + str(y_pred[length]) + "\n")
    print("Longitude : " + str(y_pred1[length]) + "\n")
    print("ELM MSE Error : " + str(elm_error))
Exemple #3
0
    def fit(self, X, y):
        n_features = X.shape[-1]
        self.regressor = GenELMRegressor(hidden_layer=MLPRandomLayer(
            n_hidden=128, activation_func='sigmoid'))
        #mdl = ELM(30, 1)
        #mdl.add_neurons(16, 'sigm')
        #mdl.add_neurons(64, 'sigm')
        #mdl.add_neurons(32, 'sigm')
        #self.regressor = ELM(n_features, 1)
        #self.regressor.add_neurons(16, 'sigm')

        self.regressor.fit(X, y)
        return self
def make_classifiers():
    names = [
        "ELM(10,tanh)", "ELM(10,tanh,LR)", "ELM(10,sinsq)", "ELM(10,tribas)",
        "ELM(hardlim)", "ELM(20,rbf(0.1))"
    ]

    nh = 10

    # pass user defined transfer func
    sinsq = (lambda x: np.power(np.sin(x), 2.0))
    srhl_sinsq = MLPRandomLayer(n_hidden=nh, activation_func=sinsq)

    # use internal transfer funcs
    srhl_tanh = MLPRandomLayer(n_hidden=nh, activation_func='tanh')
    srhl_tribas = MLPRandomLayer(n_hidden=nh, activation_func='tribas')
    srhl_hardlim = MLPRandomLayer(n_hidden=nh, activation_func='hardlim')

    # use gaussian RBF
    srhl_rbf = RBFRandomLayer(n_hidden=nh * 2, rbf_width=0.1, random_state=0)
    #log_reg = LogisticRegression()
    '''
    classifiers = [GenELMClassifier(hidden_layer=srhl_tanh),
                   #GenELMClassifier(hidden_layer=srhl_tanh, regressor=log_reg),
                   GenELMClassifier(hidden_layer=srhl_sinsq),
                   GenELMClassifier(hidden_layer=srhl_tribas),
                   GenELMClassifier(hidden_layer=srhl_hardlim),
                   GenELMClassifier(hidden_layer=srhl_rbf)]
    '''
    '''
    from sklearn_extensions.extreme_learning_machines.elm import ELMClassifier
    classifiers = [ELMClassifier(n_hidden=30, rbf_width=0.01, random_state=0, alpha=0.1)]
    '''
    classifiers = [
        GenELMClassifier(hidden_layer=RBFRandomLayer(
            n_hidden=HIDDEN_NODE_COUNT, rbf_width=0.05, random_state=0))
    ]

    return names, classifiers
Exemple #5
0
    def _pred_op_choice(self, args, ops):
        if len(self._solutions) < 10:  # args.population_size // 4:
            logger.debug('population size is too small')
            return random.choice(ops)
        else:

            def vectorize(args):
                return [
                    args.depth, args.lr, args.ent_coef, args.value_coef,
                    args.frame_repeat
                ], args.fitness

            n_hidden = 1024

            xs, ys = [], []
            for solution in self._solutions:
                x, y = vectorize(solution.args)
                xs.append(x)
                ys.append(y)
            xs = np.array(xs)
            ys = np.array(ys)

            x_scaler = StandardScaler()
            x_scaler.fit(xs)
            ys_ = (ys - min(ys)) / (max(ys) - min(ys))

            hidden_layer = MLPRandomLayer(n_hidden=n_hidden,
                                          activation_func='tanh')
            net = GenELMRegressor(hidden_layer=hidden_layer)
            xs_ = x_scaler.transform(xs)
            net.fit(xs_, ys_)
            pred = net.predict(xs_)
            pred_error = ((ys_ - pred)**2).mean()
            print('pred error: {:5.4f}'.format(pred_error))
            logger.debug('pred error: {:5.4f}'.format(pred_error))
            with open('pred_error.txt', 'at') as f:
                f.write('{:5.4f}\n'.format(pred_error))

            xs = []
            for op in ops:
                args_, _ = op(copy.deepcopy(args), None)
                x, _ = vectorize(args_)
                xs.append(x)
            xs = np.array(xs)

            xs_ = x_scaler.transform(xs)
            ys = net.predict(xs_)

            return ops[np.argmax(ys)]
    def __init__(self,
                 hidden_layer=MLPRandomLayer(random_state=0),
                 regressor=None):
        """

        :param hidden_layer:
        :param regressor:
        :return:
        """

        super(GenELMRegressor, self).__init__(hidden_layer, regressor)

        self.coefs_ = None
        self.fitted_ = False
        self.hidden_activations_ = None
    def __init__(self,
                 hidden_layer=MLPRandomLayer(random_state=0),
                 binarizer=LabelBinarizer(-1, 1),
                 regressor=None):
        """

        :param hidden_layer:
        :param binarizer:
        :param regressor:
        :return:
        """

        super(GenELMClassifier, self).__init__(hidden_layer, regressor)

        self.binarizer = binarizer
        self.classes_ = None
        self.genelm_regressor_ = GenELMRegressor(hidden_layer, regressor)
Exemple #8
0
names = ['zero', 'one']
plot_confusion_matrix(preds, labels, names, title='ANN-MLP confusion matrix')

print('Classification report: ')
print(classification_report(labels, preds, target_names=names))

#------------------------------------------------------------------------------
#------------------------------------------------------------------------------
#------------------------------------------------------------------------------

#Applying ELM

# Define a monte-carlo cross-validation generator (reduce variance):
cv = KFold(len(labels), 10, shuffle=True, random_state=42)
elm_model = MLPRandomLayer(n_hidden=100000, activation_func='tanh')
clf = make_pipeline(CSP(n_components=len(picks), reg=None, log=True, norm_trace=False),
                    GenELMClassifier(hidden_layer=elm_model))

#Apply xDawnCovriance and TangentSpace instead of CSP
n_components = 1
elm_model = MLPRandomLayer(n_hidden=100000, activation_func='tanh')
clf = make_pipeline(XdawnCovariances(n_components),
                    TangentSpace(metric='riemann'),
                    GenELMClassifier(hidden_layer=elm_model))


preds = np.zeros(len(labels))

for train_idx, test_idx in cv:
    y_train, y_test = labels[train_idx], labels[test_idx]
Exemple #9
0
import pandas as pd
pima = pd.read_csv('pima-indians-diabetes.csv', encoding="shift-jis")
pima.columns = [
    'pregnant', 'plasmaGlucose', 'bloodP', 'skinThick', 'serumInsulin',
    'weight', 'pedigree', 'age', 'diabetes'
]
from sklearn.model_selection import train_test_split
y = pima['diabetes']
X = pima.drop(['diabetes'], axis=1)
nh = 4
X_train, X_test, y_train, y_test = train_test_split(X,
                                                    y,
                                                    test_size=0.2,
                                                    random_state=54,
                                                    shuffle=True)
from sklearn_extensions.extreme_learning_machines.elm import GenELMClassifier
from sklearn_extensions.extreme_learning_machines.random_layer import RBFRandomLayer, MLPRandomLayer
srhl_tanh = MLPRandomLayer(n_hidden=nh, activation_func='tanh')
srhl_rbf = RBFRandomLayer(n_hidden=nh * 2, rbf_width=0.1, random_state=0)
clf1 = GenELMClassifier(hidden_layer=srhl_tanh)
clf1.fit(X_train, y_train)
print(clf1.score(X_test, y_test))
'''
dic=dict(zip(X.columns,clf.feature_importances_))
for item in sorted(dic.items(), key=lambda x: x[1], reverse=True):
    print(item[0],round(item[1],4))
'''
Exemple #10
0
# transpose for cuda ELM
train = features1.transpose()
test = features2.transpose()
Y_train_T = Y_train.transpose()
Y_test_T = Y_test.transpose()

### save the data in csv file for ELM-CUDA
np.savetxt('cuda_elm/features_cifar100/train_features.csv',
           train,
           delimiter=',')
np.savetxt('cuda_elm/features_cifar100/test_features.csv', test, delimiter=',')
np.savetxt('cuda_elm/features_cifar100/train_labels.csv',
           Y_train_T,
           delimiter=',')
np.savetxt('cuda_elm/features_cifar100/test_labels.csv',
           Y_test_T,
           delimiter=',')

############ try with ELM on CPU and compare with our ELM-CUDA #################
# convert back to original labels
y_train = np.argmax(Y_train, axis=-1)
y_test = np.argmax(Y_test, axis=-1)
from sklearn_extensions.extreme_learning_machines.elm import GenELMClassifier
from sklearn_extensions.extreme_learning_machines.random_layer import RBFRandomLayer, MLPRandomLayer
clf = GenELMClassifier(
    hidden_layer=MLPRandomLayer(n_hidden=200, activation_func='tanh'))
clf.fit(features1, y_train)
res = clf.score(features2, y_test)
print("ELM score:", res * 100)
Exemple #11
0
embark_location = pd.get_dummies(titanic_data['Embarked'], drop_first=True)
titanic_data.drop(['Sex', 'Embarked'], axis=1, inplace=True)
titanic_dmy = pd.concat([titanic_data, gender, embark_location], axis=1)
titanic_dmy.drop(['Pclass'], axis=1, inplace=True)
titanic_dmy.drop(['Q'], axis=1, inplace=True)

X = titanic_dmy.iloc[:, [1, 2, 3, 4, 5, 6]].values
y = titanic_dmy.iloc[:, 0].values

X = StandardScaler().fit_transform(X)
X_train, X_test, y_train, y_test = train_test_split(X,
                                                    y,
                                                    test_size=.3,
                                                    random_state=25)

nh = 10

#Primeiro EML baseado em MPL com função de ativação sigmoid
srhl_sigmoid = MLPRandomLayer(n_hidden=nh, activation_func='sigmoid')
elm_model = GenELMClassifier(hidden_layer=srhl_sigmoid)
elm_model.fit(X_train, y_train)
score = elm_model.score(X_test, y_test)
print(score)

#Primeiro EML baseado em rede RBF
srhl_rbf = RBFRandomLayer(n_hidden=nh * 2, rbf_width=0.1, random_state=0)
elm_model = GenELMClassifier(hidden_layer=srhl_rbf)
elm_model.fit(X_train, y_train)
score = elm_model.score(X_test, y_test)
print(score)
Exemple #12
0
def make_classifier():
    # use internal transfer funcs
    srhl_tanh = MLPRandomLayer(n_hidden=10, activation_func='tanh')

    return GenELMClassifier(hidden_layer=srhl_tanh)