예제 #1
0
class QuadraticDiscriminantAnalysisImpl():
    def __init__(self,
                 priors=None,
                 reg_param=0.0,
                 store_covariance=False,
                 tol=0.0001,
                 store_covariances=None):
        self._hyperparams = {
            'priors': priors,
            'reg_param': reg_param,
            'store_covariance': store_covariance,
            'tol': tol,
            'store_covariances': store_covariances
        }
        self._wrapped_model = Op(**self._hyperparams)

    def fit(self, X, y=None):
        if (y is not None):
            self._wrapped_model.fit(X, y)
        else:
            self._wrapped_model.fit(X)
        return self

    def predict(self, X):
        return self._wrapped_model.predict(X)

    def predict_proba(self, X):
        return self._wrapped_model.predict_proba(X)

    def decision_function(self, X):
        return self._wrapped_model.decision_function(X)
예제 #2
0
class QDA(object):
    def __init__(self,
                 priors=None,
                 reg_param=0.,
                 store_covariance=False,
                 tol=1.0e-4):
        """
        :param priors:  分来优先级, array, 可选项, shape=[n_classes]
        :param reg_param:  float, 可选项,将协方差估计正规化
        :param store_covariance: boolean 如果为真,则计算并存储协方差矩阵到self.covariance_中
        :param tol:  使用排序评估的阈值
        """
        self.model = QuadraticDiscriminantAnalysis(
            priors=priors,
            reg_param=reg_param,
            store_covariance=store_covariance,
            tol=tol)

    def fit(self, x, y):
        self.model.fit(X=x, y=y)

    def get_params(self, deep=True):
        return self.model.get_params(deep=deep)

    def predict(self, x):
        return self.model.predict(X=x)

    def predict_log_dict(self, x):
        return self.model.predict_log_proba(X=x)

    def predict_proba(self, x):
        return self.model.predict_proba(X=x)

    def score(self, x, y, sample_weight=None):
        return self.model.score(X=x, y=y, sample_weight=sample_weight)

    def set_params(self, **params):
        self.model.set_params(**params)

    def decision_function(self, x):  # 将决策函数应用于样本数组。
        return self.model.decision_function(X=x)

    def get_attribute(self):
        covariance = self.model.covariance_  # 每个种类的协方差矩阵, list of array-like of shape (n_features, n_features)
        means = self.model.means  # 种类均值, array-like of shape (n_classes, n_features)
        priors = self.model.priors_  # 种类占比, 求和为1, array-like of shape (n_classes)
        rotations = self.model.rotations_  # n_k = min(n_features, number of elements in class k) list_array,
        # 高斯分布的旋转
        scalings = self.model.scalings_  # list_array, 每个种类k,shape[n_k]的数组,包含高斯分布的缩放,
        # 如,旋转坐标系中的方差
        classes = self.model.classes_  # array-like, shape(n_classes,), 不同种类标签

        return covariance, means, priors, rotations, scalings, classes
def qda_reg(X_train, X_test, Y_train, Y_test, title, alpha):
	qda = QuadraticDiscriminantAnalysis(store_covariance=True)
	qda.fit(X_train, Y_train)
	Y_predicted_qda = qda.predict(X_test)
	print title + " Classification Report"
	target_names = ['Iris-setosa', 'Iris-versicolor','Iris-virginica']
	print(classification_report(Y_test, Y_predicted_qda, target_names=target_names))

	

	# plotting decision boundary
	nx, ny = 200, 100
	x_min, x_max = (0.0,10.0)

	y_min, y_max = (0.0,3.0)
	xx, yy = np.meshgrid(np.linspace(x_min, x_max, nx),np.linspace(y_min, y_max, ny))
	Z = qda.decision_function(np.c_[xx.ravel(), yy.ravel()])
	Z2 = Z[:, 2].reshape(xx.shape)
	Z1 = Z[:, 1].reshape(xx.shape)
	Z0 = Z[:, 0].reshape(xx.shape)

	bound1 = Z1 - Z0
	bound2 = Z2 - Z1

	#plotting where difference of decision fn is 0
	CS = plt.contour(xx, yy, bound1, levels = [0], color='k')
	plt.contour(xx, yy, bound2, levels = [0], color='k')
	
	ax = plt.gca()
	ax.hold(True)

	ax.scatter(X_train1[:,0], X_train1[:,1],color='r',label='setosa-train')
	ax.scatter(X_train2[:,0], X_train2[:,1],color='b',label='versicolor-train')
	ax.scatter(X_train3[:,0], X_train3[:,1],color='g',label='virginica-train')

	ax.scatter(X_test1[:,0], X_test1[:,1],color='y',label='setosa-test')
	ax.scatter(X_test2[:,0], X_test2[:,1],color='c',label='versicolor-test')
	ax.scatter(X_test3[:,0], X_test3[:,1],color='m',label='virginica-test')
	plt.title(title + ' Analysis')

	ax.set_xlabel('Petal Length')
	ax.set_ylabel('Petal Width')
	ax.set_ylim([0,3])
	plt.legend(numpoints=1, ncol=3, fontsize=8)
	plt.savefig(title +'.png',format='png')
	plt.show()
예제 #4
0
class QuadraticDiscriminantAnalysisImpl:
    def __init__(self, **hyperparams):
        self._hyperparams = hyperparams
        self._wrapped_model = Op(**self._hyperparams)

    def fit(self, X, y=None):
        if y is not None:
            self._wrapped_model.fit(X, y)
        else:
            self._wrapped_model.fit(X)
        return self

    def predict(self, X):
        return self._wrapped_model.predict(X)

    def predict_proba(self, X):
        return self._wrapped_model.predict_proba(X)

    def decision_function(self, X):
        return self._wrapped_model.decision_function(X)
nx, ny = 200, 100
x_min, x_max = plt.xlim()
y_min, y_max = plt.ylim()
xx, yy = np.meshgrid(np.linspace(x_min, x_max, nx),
                     np.linspace(y_min, y_max, ny))

Z_LDA = lda.decision_function(np.c_[xx.ravel(), yy.ravel()])
Z_LDA.shape = xx.shape
ax = plt.subplot(2, 2, 2)

ax.contourf(xx, yy, Z_LDA, cmap=cm_bright)
ax.scatter(X0[:, 0], X0[:, 1], marker='.', color='red')
ax.scatter(X1[:, 0], X1[:, 1], marker='.', color='blue')
plt.title('LDA')

Z_QDA = qda.decision_function(np.c_[xx.ravel(), yy.ravel()])
Z_QDA.shape = xx.shape
ax = plt.subplot(2, 2, 3)

ax.contourf(xx, yy, Z_QDA, cmap=cm_bright)
ax.scatter(X0[:, 0], X0[:, 1], marker='.', color='red')
ax.scatter(X1[:, 0], X1[:, 1], marker='.', color='blue')
plt.title('QDA')

poly = PolynomialFeatures(include_bias=False)

X_poly = poly.fit_transform(X)
lda_poly = LinearDiscriminantAnalysis(solver="svd", store_covariance=True)
lda_poly.fit(X_poly, y)

Z_LDA_poly = qda.decision_function(np.c_[xx.ravel(), yy.ravel()])