コード例 #1
0
# -*- coding: utf-8 -*-
# adaboost MR
from adaboostmr import *
from sklearn import metrics

import time
from data_preprocess import loadData

classes_name = [
    'BRICKFACE', 'SKY', 'FOLIAGE', 'CEMENT', 'WINDOW', 'PATH', 'GRASS'
]

class_num = len(classes_name)
# 数据读取
train_x, train_y, test_x, test_y = loadData('segmentation.data',
                                            'segmentation.test')

# AdaboostMH
start = time.clock()
train_y_mat = mat(zeros((len(train_y), class_num)))

for j in xrange(len(train_y)):
    for i in xrange(class_num):
        if train_y[j] == classes_name[i]:
            train_y_mat[j, i] = 1
        else:
            train_y_mat[j, i] = -1

model = AdaBoostMR(class_num=7, max_iter=40)  # 基于决策树的adaboost,最大迭代次数max_iter
model.fit(train_x, train_y_mat)
コード例 #2
0
from sklearn.model_selection import train_test_split
from sklearn import svm
import numpy as np
from time import time
from tensorflow import keras
import deep_NN
import pdb


def evaluate_accuracy(predict, test):
    correct_proportion = np.sum(predict == test) / len(test)
    return correct_proportion


# %%
X, y = data_preprocess.loadData()
n_classes = len(np.unique(y))

# Split the data into training and testing sets
rng = np.random.RandomState(0)
X_train, X_test, y_train, y_test = train_test_split(X,
                                                    y,
                                                    test_size=0.25,
                                                    random_state=rng)

y_predicted = {}

# %% KNN
for weights in ['uniform', 'distance']:
    t0 = time()
    KNN = KNeighborsClassifier(int(n_classes), weights=weights)