Beispiel #1
0
    @staticmethod
    def sigmoid_kernel(in_gamma, r, d, x1, x2):
        return math.tanh(in_gamma * np.dot(x1, x2.T) + r)

    @staticmethod
    def polynomial_kernel(in_gamma, r, d, x1, x2):
        return math.pow(in_gamma * np.dot(x1, x2.T) + r, d)

    def predict(self, features):
        pre = self.intercept
        for a, vec in zip(
                self.alpha.reshape(self.support_vectors.shape[0], -1),
                self.support_vectors):
            pre += a * self.kernel(self.gamma, self.r, self.d, features, vec)
        return np.asarray(pre)[0]


if __name__ == '__main__':
    features, labels = BreastCancer.features, BreastCancer.label
    train_feature, test_feature, train_label, test_label = ds.split(
        features, labels)
    regr = svm.SVR()
    regr.fit(train_feature, train_label)  # 训练逻辑
    pred = regr.predict(test_feature) > 0.5  #预测逻辑
    print(metrics.accuracy(test_label, pred))
    # 手动实现向量机的前向传播计算
    my_model = CustomSvr(regr)
    my_pred = my_model.predict(test_feature) > 0.5
    print(metrics.accuracy(test_label, pred))
Beispiel #2
0

class CustomComplementNB:
    def __init__(self, model):
        self.feature_log_pro_ = model.feature_log_prob_.T

    def predict_proba(self, X):
        jll = np.dot(X, self.feature_log_pro_)
        log_prob_x = np.log(np.sum(np.exp(jll), axis=1))
        result = jll - np.atleast_2d(log_prob_x).T
        return np.exp(result)


if __name__ == '__main__':
    feature, label = Iris.features, Iris.label
    train_feature, test_feature, train_label, test_label = data_split.split(
        feature, label)
    nativeBayes = bayes.GaussianNB()
    nativeBayes.fit(train_feature, train_label)
    pred = nativeBayes.predict_proba(test_feature)[0]
    print("pred", pred)
    myBayes = CustomGaussianBayes(nativeBayes)
    my_pred = myBayes.predict_proba(test_feature)[0]
    print("my_pred", my_pred)

    complementBayes = bayes.ComplementNB()
    complementBayes.fit(train_feature, train_label)
    pred = complementBayes.predict_proba(test_feature)[0]
    print("pred", pred)
    myComplementBayes = CustomComplementNB(complementBayes)
    my_pred = myComplementBayes.predict_proba(test_feature)[0]
    print("my_pred", my_pred)
Beispiel #3
0
        gradients = tape.gradient(loss, model.trainable_variables)
        optimizer.apply_gradients(zip(gradients, model.trainable_variables))
        # print(loss.numpy())


def test_step(images, labels):
    predictions = model(images, training=False)
    print(predictions)
    predictions = np.argmax(predictions, axis=1)
    print("accuracy = ", classify_metrics.accuracy(labels, predictions))


if __name__ == '__main__':
    data = TextClassify1(CharSegmenter.segment)
    features, labels = data.features, data.labels
    train_bin_feature, test_bin_feature, train_bin_label, test_bin_label = data_split.split(features, labels)
    model = MyRnnModel(data.word_dic.size() + 1)
    loss_object = tf.keras.losses.SparseCategoricalCrossentropy()
    optimizer = tf.keras.optimizers.Adam()
    EPOCHS = 50
    BATCH_SIZE = 32
    for i in range(EPOCHS):
        for batch in range(0, len(train_bin_feature) - BATCH_SIZE, BATCH_SIZE):
            print(i, batch)
            train_step(train_bin_feature[batch:batch + BATCH_SIZE][:, :100], train_bin_label[batch:batch + BATCH_SIZE])
        print("-------------------------")
        test_step(test_bin_feature[0:BATCH_SIZE][:, :100], test_bin_label[0:BATCH_SIZE])
        for i in range(10):
            result = model.custom_predict(test_bin_feature[0:BATCH_SIZE][:, :100][i])
            print(result)