示例#1
0
def FA_ELM(X_train, Y_train, n_hidden=64):
    ## FA优化参数
    # 参数向量 parameters [n N_iteration alpha betamin gamma]
    # n为种群规模,N_iteration为迭代次数
    time1 = time.time()
    para = [20, 50, 0.3, 0.2, 1]

    #待优化参数上下界 Simple bounds/limits for d-dimensional problems
    d = 2
    # 待优化参数个数
    #weight(-1,1)  bias(0,1)
    Lb = np.array([-1, 0])
    # 下界
    Ub = np.array([1, 1])
    # 上界

    # 迭代寻优 Display results
    [bestsolutio, bestojb] = ffa_mincon_elm(d, Lb, Ub, para, X_train, Y_train)
    ## 打印参数选择结果
    bestw = bestsolutio[1]
    bestb = bestsolutio[0]

    #以最佳参数训练ELM
    rhl = RandomLayer(n_hidden=64, alpha=1.0)  #表示_use_mlp_input=true
    elmr = GenELMRegressor(hidden_layer=rhl)
    elmr.fit(X_train, Y_train, bias=bestb, weights=bestw)
    time2 = time.time()
    useTime = time2 - time1
    print("总用时:%d秒" % (useTime))
    print(bestw)
    print(bestb)
    return elmr
示例#2
0
def train_ann(X_train, targets, f_name):
    # training MLP
    #     n_neurons = 16
    #     mlp = MLPRegressor(hidden_layer_sizes=(n_neurons, n_neurons, n_neurons), activation='tanh', solver='lbfgs', tol=1e-12) #, alpha=1e-01
    #     mlp.fit(X_train, targets)
    #     tr_mse = mean_squared_error(targets, predict(mlp, X_train))
    #     print("n iterations: ", mlp.n_iter_)
    #     print("training mse: ", tr_mse)
    #     # save model
    #     joblib.dump(mlp, f_name)
    #
    #     return (mlp, tr_mse)
    elm = pipeline.Pipeline([('rhl',
                              RandomLayer(n_hidden=256,
                                          activation_func='multiquadric',
                                          alpha=0.5,
                                          rbf_width=0.3)),
                             ('lr', LinearRegression(fit_intercept=True))])
    elm.fit(X_train, targets)
    tr_mse = mean_squared_error(targets, predict(elm, X_train))
    print("training mse: ", tr_mse)
    # save model
    joblib.dump(elm, f_name)

    return (elm, tr_mse)
    def _create_random_layer(self):
        """Pass init params to RandomLayer"""

        return RandomLayer(n_hidden=self.n_hidden,
                           alpha=self.alpha, random_state=self.random_state,
                           activation_func=self.activation_func,
                           activation_args=self.activation_args,
                           user_components=self.user_components,
                           rbf_width=self.rbf_width)
def train_ann(X_train, targets):
    elm = pipeline.Pipeline([('rhl', RandomLayer(n_hidden=85, activation_func='tanh', alpha=0.65)),
                            ('lr', LinearRegression(fit_intercept=False))])
    elm.fit(X_train, targets)
    tr_elm = mean_squared_error(targets, predict(elm, X_train))
    #print("training mse: ", tr_elm)
    # save model
    joblib.dump(elm, 'models/6_12_17/elm/te_pc/elm_XX_te_pc.pkl')
    
    return (elm, tr_elm)
示例#5
0
def train_ann(X_train, targets, n_h_neurons):
    elm = pipeline.Pipeline([('rhl',
                              RandomLayer(n_hidden=n_h_neurons,
                                          activation_func='multiquadric',
                                          alpha=0.5)),
                             ('lr', LinearRegression(fit_intercept=False))])
    elm.fit(X_train, targets)
    tr_mse = mean_squared_error(targets, predict(elm, X_train))

    return (elm, tr_mse)
def train_ann(X_train, targets):
    # training ELM
    elm = pipeline.Pipeline([('rhl', RandomLayer(n_hidden=120, activation_func='multiquadric', alpha=0.7)),
                              ('lr', LinearRegression(fit_intercept=False))])
    elm.fit(X_train, targets)
    tr_mse = mean_squared_error(targets, predict(elm, X_train))
    #print("training mse: ", tr_mse)
    # save model
    joblib.dump(elm, 'models/6_12_17/elm/sc5/elm_XX_sc5_pc.pkl')
    
    return (elm, tr_mse)
示例#7
0
def train_ann(X_train, targets, f_name):
    # training ELM
    elm = pipeline.Pipeline([('rhl',
                              RandomLayer(n_hidden=240,
                                          activation_func='multiquadric')),
                             ('lr', LinearRegression(fit_intercept=False))])
    elm.fit(X_train, targets)
    tr_mse = mean_squared_error(targets, predict(elm, X_train))
    print("training mse: ", tr_mse)
    # save model
    #joblib.dump(elm, f_name)

    return (elm, tr_mse)
示例#8
0
def objfun_elm(bw, x_train, y_train, n_hidden):
    '''
    优化所用的损失函数,返回误差值。
    n_hidden 隐藏神经元数
    wb为长度为2的横向量,即ELM中参数w和b的值
    '''
    bias = bw[0]
    weights = bw[1]
    rhl = RandomLayer(n_hidden=n_hidden, alpha=1.0)  #表示_use_mlp_input=true
    elmr = GenELMRegressor(hidden_layer=rhl)
    elmr.fit(x_train, y_train, bias=bias, weights=weights)
    score = elmr.score(x_train, y_train)
    return score
示例#9
0
def define_classification_model(h):
    if config['model_type'] == 'linearSVM':
        return LinearSVC(C=h)
    elif config['model_type'] == 'ELM':
        rl = RandomLayer(n_hidden=h, activation_func='reclinear', alpha=1)
        return GenELMClassifier(hidden_layer=rl)
    elif config['model_type'] == 'MLP':
        return MLPClassifier(hidden_layer_sizes=(20, ),
                             max_iter=600,
                             verbose=10,
                             early_stopping=False)
    elif config['model_type'] == 'linear':
        return linear_model.SGDClassifier()
    elif config['model_type'] == 'KNN':
        return KNeighborsClassifier(n_neighbors=h)
示例#10
0
def train_ann(X_train, targets):
    elm = pipeline.Pipeline([('rhl',
                              RandomLayer(n_hidden=200,
                                          activation_func='multiquadric',
                                          alpha=0.69)),
                             ('lr', LinearRegression(fit_intercept=False))])

    #elmr = GenELMRegressor( hidden_layer = rl )
    #elmr = ELMRegressor(n_hidden=98,random_state=0, alpha=0.8)
    elm.fit(X_train, targets)
    tr_mse = mean_squared_error(targets, predict(elm, X_train))
    print("training mse: ", tr_mse)
    # save model
    joblib.dump(
        elm, 'models/2018/23_06_18/elm/diel_diamond/elm_XX_diel_diamond.pkl')

    return (elm, tr_mse)
mrx, mry = make_regression(n_samples=2000, n_targets=4)
mrx_train, mrx_test, mry_train, mry_test = train_test_split(mrx,
                                                            mry,
                                                            test_size=0.2)

xtoy, ytoy = make_toy()
xtoy, ytoy = stdsc.fit_transform(xtoy), stdsc.fit_transform(ytoy)
xtoy_train, xtoy_test, ytoy_train, ytoy_test = train_test_split(xtoy,
                                                                ytoy,
                                                                test_size=0.2)
plot(xtoy, ytoy)

# <codecell>

# RBFRandomLayer tests
for af in RandomLayer.activation_func_names():
    print af,
    elmc = ELMClassifier(activation_func=af)
    tr, ts = res_dist(irx, iry, elmc, n_runs=200, random_state=0)

# <codecell>

elmc.classes_

# <codecell>

for af in RandomLayer.activation_func_names():
    print af
    elmc = ELMClassifier(activation_func=af, random_state=0)
    tr, ts = res_dist(dgx, dgy, elmc, n_runs=100, random_state=0)
示例#12
0
diabetes = load_diabetes()
dbx, dby = stdsc.fit_transform(diabetes.data), diabetes.target
dbx_train, dbx_test, dby_train, dby_test = train_test_split(dbx, dby, test_size=0.2)

mrx, mry = make_regression(n_samples=2000, n_targets=4)
mrx_train, mrx_test, mry_train, mry_test = train_test_split(mrx, mry, test_size=0.2)

xtoy, ytoy = make_toy()
xtoy, ytoy = stdsc.fit_transform(xtoy), stdsc.fit_transform(ytoy)
xtoy_train, xtoy_test, ytoy_train, ytoy_test = train_test_split(xtoy, ytoy, test_size=0.2)
plot(xtoy, ytoy)

# <codecell>

# RBFRandomLayer tests
for af in RandomLayer.activation_func_names():
    print af,
    elmc = ELMClassifier(activation_func=af)
    tr,ts = res_dist(irx, iry, elmc, n_runs=200, random_state=0)

# <codecell>

elmc.classes_

# <codecell>

for af in RandomLayer.activation_func_names():
    print af
    elmc = ELMClassifier(activation_func=af, random_state=0)
    tr,ts = res_dist(dgx, dgy, elmc, n_runs=100, random_state=0)
mrx, mry = make_regression(n_samples=2000, n_targets=4)
mrx_train, mrx_test, mry_train, mry_test = train_test_split(mrx,
                                                            mry,
                                                            test_size=0.2)

xtoy, ytoy = make_toy()
xtoy, ytoy = stdsc.fit_transform(xtoy), stdsc.fit_transform(ytoy)
xtoy_train, xtoy_test, ytoy_train, ytoy_test = train_test_split(xtoy,
                                                                ytoy,
                                                                test_size=0.2)
plot(xtoy, ytoy)

# <codecell>

# RBFRandomLayer tests
for af in RandomLayer.activation_func_names():
    print af,
    elmc = ELMClassifier(activation_func=af)
    tr, ts = res_dist(irx, iry, elmc, n_runs=200, random_state=0)

# <codecell>

elmc.classes_

# <codecell>

for af in RandomLayer.activation_func_names():
    print af
    elmc = ELMClassifier(activation_func=af, random_state=0)
    tr, ts = res_dist(dgx, dgy, elmc, n_runs=100, random_state=0)
示例#14
0
    dbx, dby, test_size=0.2)

mrx, mry = make_regression(n_samples=2000, n_targets=4)
mrx_train, mrx_test, mry_train, mry_test = train_test_split(
    mrx, mry, test_size=0.2)

xtoy, ytoy = make_toy()
xtoy, ytoy = stdsc.fit_transform(xtoy), stdsc.fit_transform(ytoy)
xtoy_train, xtoy_test, ytoy_train, ytoy_test = train_test_split(
    xtoy, ytoy, test_size=0.2)
plot(xtoy, ytoy)

# <codecell>

# RBFRandomLayer tests
for af in RandomLayer.activation_func_names():
    print(af, end=' ')
    elmc = ELMClassifier(activation_func=af)
    tr, ts = res_dist(irx, iry, elmc, n_runs=200, random_state=0)

# <codecell>

elmc.classes_

# <codecell>

for af in RandomLayer.activation_func_names():
    print(af)
    elmc = ELMClassifier(activation_func=af, random_state=0)
    tr, ts = res_dist(dgx, dgy, elmc, n_runs=100, random_state=0)
示例#15
0
mrx, mry = make_regression(n_samples=2000, n_targets=4)
mrx_train, mrx_test, mry_train, mry_test = train_test_split(mrx,
                                                            mry,
                                                            test_size=0.2)

xtoy, ytoy = make_toy()
xtoy, ytoy = stdsc.fit_transform(xtoy), stdsc.fit_transform(ytoy)
xtoy_train, xtoy_test, ytoy_train, ytoy_test = train_test_split(xtoy,
                                                                ytoy,
                                                                test_size=0.2)
plot(xtoy, ytoy)

# <codecell>

# RBFRandomLayer tests
for af in RandomLayer.activation_func_names():
    print(af, end=' ')
    elmc = ELMClassifier(activation_func=af)
    tr, ts = res_dist(irx, iry, elmc, n_runs=200, random_state=0)

# <codecell>

elmc.classes_

# <codecell>

for af in RandomLayer.activation_func_names():
    print(af)
    elmc = ELMClassifier(activation_func=af, random_state=0)
    tr, ts = res_dist(dgx, dgy, elmc, n_runs=100, random_state=0)
示例#16
0
plot(xtoy, ytoy)

# <codecell>

elmr = ELMRegressor(random_state=0, activation_func='gaussian', alpha=0.0)
elmr.fit(xtoy_train, ytoy_train)
print(elmr.score(xtoy_train, ytoy_train), elmr.score(xtoy_test, ytoy_test))
plot(xtoy, ytoy, xtoy, elmr.predict(xtoy))

# <codecell>

from sklearn import pipeline
from sklearn.linear_model import LinearRegression

elmr = pipeline.Pipeline([('rhl',
                           RandomLayer(random_state=0,
                                       activation_func='multiquadric')),
                          ('lr', LinearRegression(fit_intercept=False))])
elmr.fit(xtoy_train, ytoy_train)
print(elmr.score(xtoy_train, ytoy_train), elmr.score(xtoy_test, ytoy_test))
plot(xtoy, ytoy, xtoy, elmr.predict(xtoy))

# <codecell>

rhl = RandomLayer(n_hidden=200, alpha=1.0)
elmr = GenELMRegressor(hidden_layer=rhl)
tr, ts = res_dist(mrx, mry, elmr, n_runs=200, random_state=0)
scatter(tr, ts, alpha=0.1, marker='D', c='r')

# <codecell>

rhl = RBFRandomLayer(n_hidden=15, rbf_width=0.8)