コード例 #1
0
ファイル: elm.py プロジェクト: cash2one/antoubao
    def fit(self, X, y):
        """
        Fit the model using X, y as training data.

        Parameters
        ----------
        X : {array-like, sparse matrix} of shape [n_samples, n_features]
            Training vectors, where n_samples is the number of samples
            and n_features is the number of features.

        y : array-like of shape [n_samples, n_outputs]
            Target values (class labels in classification, real numbers in
            regression)

        Returns
        -------
        self : object

            Returns an instance of self.
        """
        rhl = SimpleRandomHiddenLayer(n_hidden=self.n_hidden,
                                      activation_func=self.activation_func,
                                      activation_args=self.activation_args,
                                      random_state=self.random_state)

        self.elm_classifier_ = ELMClassifier(hidden_layer=rhl)
        self.elm_classifier_.fit(X, y)

        return self
コード例 #2
0
ファイル: elm.py プロジェクト: cash2one/antoubao
    def __init__(self,
                 hidden_layer=SimpleRandomHiddenLayer(random_state=0),
                 regressor=None):

        super(ELMClassifier, self).__init__(hidden_layer, regressor)

        self.classes_ = None
        self.binarizer_ = LabelBinarizer(-1, 1)
        self.elm_regressor_ = ELMRegressor(hidden_layer, regressor)
コード例 #3
0
def make_regressors():
    nh = 10
    names = ["ELM(10,tanh,LR)"]
    srhl_tanh = SimpleRandomHiddenLayer(n_hidden=nh,
                                        activation_func='tanh',
                                        random_state=0)
    log_reg = LogisticRegression()
    classifiers = [ELMRegressor(srhl_tanh, regressor=log_reg)]
    return names, classifiers
コード例 #4
0
ファイル: elm.py プロジェクト: cash2one/antoubao
    def __init__(self,
                 hidden_layer=SimpleRandomHiddenLayer(random_state=0),
                 regressor=None):

        super(ELMRegressor, self).__init__(hidden_layer, regressor)

        self.coefs_ = None
        self.fitted_ = False
        self.hidden_activations_ = None
コード例 #5
0
def make_classifiers():

    names = [
        "ELM(10,tanh)", "ELM(10,tanh,LR)", "ELM(10,sinsq)", "ELM(10,tribas)",
        "ELM(hardlim)", "ELM(20,rbf(0.1))"
    ]

    nh = 10

    # pass user defined transfer func
    sinsq = (lambda x: np.power(np.sin(x), 2.0))
    srhl_sinsq = SimpleRandomHiddenLayer(n_hidden=nh,
                                         activation_func=sinsq,
                                         random_state=0)

    # use internal transfer funcs
    srhl_tanh = SimpleRandomHiddenLayer(n_hidden=nh,
                                        activation_func='tanh',
                                        random_state=0)

    srhl_tribas = SimpleRandomHiddenLayer(n_hidden=nh,
                                          activation_func='tribas',
                                          random_state=0)

    srhl_hardlim = SimpleRandomHiddenLayer(n_hidden=nh,
                                           activation_func='hardlim',
                                           random_state=0)

    # use gaussian RBF
    srhl_rbf = RBFRandomHiddenLayer(n_hidden=nh * 2, gamma=0.1, random_state=0)

    log_reg = LogisticRegression()

    classifiers = [
        ELMClassifier(srhl_tanh),
        ELMClassifier(srhl_tanh, regressor=log_reg),
        ELMClassifier(srhl_sinsq),
        ELMClassifier(srhl_tribas),
        ELMClassifier(srhl_hardlim),
        ELMClassifier(srhl_rbf)
    ]

    return names, classifiers
    print "test_data 1%:", test_data.count()
    # 将数据集划分为训练集 与 无类标的数据集  3比7
    train_data, unlabel_data = split_data.randomSplit(
        [0.01, 0.99], 7
    )  # train_data.first() [4.6, 3.1, 1.5, 0.2, 0]   unlabel_data.first() #[5.1, 3.5, 1.4, 0.2, 0]
    unlabel_data.persist()
    # 训练集属性值不含类标 用于训练
    train_array = train_data.map(
        lambda x: x[0:feature_num]).collect()  # [4.6, 3.1, 1.5, 0.2]
    #     # 训练集 类标集合
    train_label = train_data.map(lambda x: x[class_index]).collect()

    start = time()

    # 创建隐含层
    srh = SimpleRandomHiddenLayer(activation_args=activation,
                                  n_hidden=hiddenLayer_num)
    # 创建ELM分类器
    elmc = ELMClassifier(hidden_layer=srh)
    for i in range(iter_num):
        print "-" * 20 + " %d train" % (i + 1) + "-" * 20
        #     ###############  ELM 训练  #############
        print "train_array_num:", len(train_array)
        #训练分类器
        elmc.fit(train_array, train_label)
        pred_class = elmc.predict_class(test_array)
        #分类精度
        soc = accuracy_score(pred_class, test_label)
        print "test_soc:", soc

        #对无类标的数据集进行预测 每个样例的到一个向量  然后进行软最大化处理 之后计算熵  按熵降序排序 取出前select_num 数的样例
        select_result = unlabel_data.map(lambda x: (enry(x), x)).sortByKey(
コード例 #7
0
ファイル: regress_platform.py プロジェクト: cash2one/antoubao
def make_regressor(func, nh):
    srhl = SimpleRandomHiddenLayer(n_hidden=nh,
                                        activation_func=func,
                                        random_state=0)
    return ELMRegressor(srhl, regressor=LogisticRegression())
コード例 #8
0
    print "\nTime: %.3f secs" % (time() - start_time)

    print "Test Min: %.3f Mean: %.3f Max: %.3f SD: %.3f" % (
        min(test_res), np.mean(test_res), max(test_res), np.std(test_res))
    print "Train Min: %.3f Mean: %.3f Max: %.3f SD: %.3f" % (
        min(train_res), np.mean(train_res), max(train_res), np.std(train_res))
    print
    return (train_res, test_res)


stdsc = StandardScaler()

iris = load_iris()
irx, iry = stdsc.fit_transform(iris.data), iris.target
irx_train, irx_test, iry_train, iry_test = train_test_split(irx,
                                                            iry,
                                                            test_size=0.2)

srh = SimpleRandomHiddenLayer(activation_args='sigmoid', n_hidden=500)
elmc = ELMClassifier(hidden_layer=srh)
# elmc = ELMClassifier(SimpleRandomHiddenLayer(activation_func='sigmoid'))
# print "SimpleRandomHiddenLayer(activation_func='sigmoid')"
# tr,ts = res_dist(irx, iry, elmc, n_runs=100, random_state=0)
# plt.hist(tr),plt.hist(tr)
# plt.show()

elmc.fit(irx_train, iry_train)
r = elmc.predict(irx_test)
print r
res = elmc.score(irx_test, iry_test)
print res
コード例 #9
0
# <codecell>

# RBF tests
elmc = ELMClassifier(RBFRandomHiddenLayer(activation_func='gaussian'))
tr, ts = res_dist(irx, iry, elmc, n_runs=100, random_state=0)

elmc = ELMClassifier(
    RBFRandomHiddenLayer(activation_func='poly_spline', gamma=2))
tr, ts = res_dist(irx, iry, elmc, n_runs=100, random_state=0)

elmc = ELMClassifier(RBFRandomHiddenLayer(activation_func='multiquadric'))
tr, ts = res_dist(irx, iry, elmc, n_runs=100, random_state=0)

# Simple tests
elmc = ELMClassifier(SimpleRandomHiddenLayer(activation_func='sine'))
tr, ts = res_dist(irx, iry, elmc, n_runs=100, random_state=0)

elmc = ELMClassifier(SimpleRandomHiddenLayer(activation_func='tanh'))
tr, ts = res_dist(irx, iry, elmc, n_runs=100, random_state=0)

elmc = ELMClassifier(SimpleRandomHiddenLayer(activation_func='tribas'))
tr, ts = res_dist(irx, iry, elmc, n_runs=100, random_state=0)

elmc = ELMClassifier(SimpleRandomHiddenLayer(activation_func='sigmoid'))
tr, ts = res_dist(irx, iry, elmc, n_runs=100, random_state=0)

elmc = ELMClassifier(SimpleRandomHiddenLayer(activation_func='hardlim'))
tr, ts = res_dist(irx, iry, elmc, n_runs=100, random_state=0)

# <codecell>