예제 #1
0
	def predict_soft(self, X):
		"""
		Make 'soft' (non-class) prediction of nnet on test data X.
		See constructor docstring for argument description.
		"""
		L = len(self.wts)
		Z = cols((np.ones((mat(X).shape[0],1)),X))							# init input features + constant

		for l in range(L - 1):
			Z = mat(Z) * mat(self.wts[l]).T									# compute linear response of next layer
			Z = cols((np.ones((mat(Z).shape[0],1)),Z))						# activation function + constant

		Z = mat(Z) * mat(self.wts[L - 1]).T									# compute output layer linear response
		return self.sig_0(Z)												# output layer activation function
예제 #2
0
def plot_classify_2D(learner, X, Y, pre=lambda x: x):
    """
	Plot data and classifier outputs on two-dimensional data.
	This function plot data (X,Y) and learner.predict(X, Y) 
	together. The learner is is predicted on a dense grid
	covering data X, to show its decision boundary.

	Parameters
	----------
	learner : learner object
		A trained learner object that inherits from one of
		the 'Classify' or 'Regressor' base classes.
	X : numpy array
		N x M array of data; N = number of data, M = dimension
		(number of features) of data.
	Y : numpy array
		1 x N array containing labels corresponding to data points
		in X.
	pre : function object (optional)
		Function that is applied to X before prediction.
	"""
    if twod(X).shape[1] != 2:
        raise ValueError(
            'plot_classify_2d: function can only be called using two-dimensional data (features)'
        )

    plt.plot(X[:, 0], X[:, 1], 'k.')
    ax = plt.xlim() + plt.ylim()  # get current axis limits
    N = 256  # density of evaluation

    # evaluate each point of feature space and predict the class
    X1 = np.linspace(ax[0], ax[1], N)
    X1sp = twod(X1).T * np.ones(N)
    X2 = np.linspace(ax[2], ax[3], N)
    X2sp = np.ones((N, 1)) * X2

    Xfeat = cols((twod(X1sp.flatten()).T, twod(X2sp.flatten()).T))

    # preprocess/create feature vector if necessary
    Xfeat = pre(Xfeat)

    # predict using learner
    pred = learner.predict(Xfeat)

    # plot decision values for space in 'faded' color
    clim = np.unique(Y)
    clim = [clim[0], clim[0] + 1] if len(clim) == 1 else list(clim)
    plt.imshow(np.reshape(pred, (N, N)).T,
               extent=[X1[0], X1[-1], X2[0], X2[-1]],
               cmap=plt.cm.Pastel2)
    plt.clim(*clim)

    plt.show()
예제 #3
0
    def __logistic(self, X):
        """
		This is a helper method that evaluates the logistic function
		for weights self.wts (1 x d + 1) on data X (n x d). Used in:
			__gradient_descent
			predict
		"""
        n, d = twod(X).shape

        X_train = cols((np.ones((n, 1)), twod(X)))

        f = twod(X_train).dot(twod(self.wts).T)
        return 1 / (1 + np.exp(-f))
예제 #4
0
    def predict_soft(self, X):
        """
		Compute the linear response of the ensemble by combining all learners.
		See constructor docstring for argument description.
		"""
        N, M = twod(X).shape

        pred = np.zeros((N, 1))
        for l in self:  # for each learner...
            # ...make a prediction (using -1/+1 convention)
            pred = cols((pred, 2 * l.predict(X) - 1))
        pred = pred[:, 1:]
        return pred * self.alpha  # return un-thresholded value
	def __logistic(self, X):
		"""
		This is a helper method that evaluates the logistic function
		for weights self.wts (1 x d + 1) on data X (n x d). Used in:
			__gradient_descent
			predict
		"""
		n,d = twod(X).shape

		X_train = cols((np.ones((n,1)), twod(X)))

		f = twod(X_train).dot(twod(self.wts).T)
		return 1 / (1 + np.exp(-f))
예제 #6
0
def plot_classify_2D(learner, X, Y, pre=lambda x: x):
	"""
	Plot data and classifier outputs on two-dimensional data.
	This function plot data (X,Y) and learner.predict(X, Y) 
	together. The learner is is predicted on a dense grid
	covering data X, to show its decision boundary.

	Parameters
	----------
	learner : learner object
		A trained learner object that inherits from one of
		the 'Classify' or 'Regressor' base classes.
	X : numpy array
		N x M array of data; N = number of data, M = dimension
		(number of features) of data.
	Y : numpy array
		1 x N array containing labels corresponding to data points
		in X.
	pre : function object (optional)
		Function that is applied to X before prediction.
	"""
	if twod(X).shape[1] != 2:
		raise ValueError('plot_classify_2d: function can only be called using two-dimensional data (features)')

	plt.plot(X[:,0], X[:,1], 'k.')
	ax = plt.xlim() + plt.ylim()					# get current axis limits
	N = 256											# density of evaluation

	# evaluate each point of feature space and predict the class
	X1 = np.linspace(ax[0], ax[1], N)
	X1sp = twod(X1).T * np.ones(N)
	X2 = np.linspace(ax[2], ax[3], N)
	X2sp = np.ones((N,1)) * X2
	
	Xfeat = cols((twod(X1sp.flatten()).T, twod(X2sp.flatten()).T))

	# preprocess/create feature vector if necessary
	Xfeat = pre(Xfeat)

	# predict using learner
	pred = learner.predict(Xfeat)

	# plot decision values for space in 'faded' color
	clim = np.unique(Y)
	clim = [clim[0], clim[0] + 1] if len(clim) == 1 else list(clim)
	plt.imshow(np.reshape(pred, (N,N)).T, extent=[X1[0], X1[-1], X2[0], X2[-1]], cmap=plt.cm.Pastel2)
	plt.clim(*clim)

	plt.show()
예제 #7
0
	def __responses(self, wts, X_in, sig, sig_0):
		"""
		Helper function that gets linear sum from previous layer (A) and
		saturated activation responses (Z) for a data point. Used in:
			train
		"""
		L = len(wts)
		constant_feat = np.ones((mat(X_in).shape[0],1)).flatten()	# constant feature
		# compute linear combination of inputs
		A = [arr([1])]
		Z = [concat((constant_feat, X_in))]

		for l in range(1, L):
			A.append(Z[l - 1].dot(wts[l - 1].T))					# compute linear combination of previous layer
			# pass through activation function and add constant feature
			Z.append(cols((np.ones((mat(A[l]).shape[0],1)),sig(A[l]))))

		A.append(arr(mat(Z[L - 1]) * mat(wts[L - 1]).T))
		Z.append(arr(sig_0(A[L])))									# output layer (saturate for classifier, not regressor)

		return A,Z
예제 #8
0
	plt.show()


################################################################################
################################################################################
################################################################################


################################################################################
## MAIN ########################################################################
################################################################################


if __name__ == '__main__':

	X,Y = load_data_from_csv('../data/binary.csv', -1, float)
	X,Y = bootstrap_data(X, Y, 25)
	X = X[:,2:]
	Xtr,Xte,Ytr,Yte = split_data(X, Y, .8)
	knn = KNNClassify(Xtr, Ytr)

	print(cols((X,knn.predict(X))))
	
	plot_classify_2D(knn, X, Y)


################################################################################
################################################################################
################################################################################
예제 #9
0
               extent=[X1[0], X1[-1], X2[0], X2[-1]],
               cmap=plt.cm.Pastel2)
    plt.clim(*clim)

    plt.show()


################################################################################
################################################################################
################################################################################

################################################################################
## MAIN ########################################################################
################################################################################

if __name__ == '__main__':

    X, Y = load_data_from_csv('../data/binary.csv', -1, float)
    X, Y = bootstrap_data(X, Y, 25)
    X = X[:, 2:]
    Xtr, Xte, Ytr, Yte = split_data(X, Y, .8)
    knn = KNNClassify(Xtr, Ytr)

    print(cols((X, knn.predict(X))))

    plot_classify_2D(knn, X, Y)

################################################################################
################################################################################
################################################################################
예제 #10
0
def fpoly_pair(X, degree, use_constant=True):
    """
	Create polynomial features of each individual feature (too many cross 
	products).

	Parameters
	----------
	X : numpy array
		N x M array of data.
	degree : int
		The degree.
	use_constant : bool (optional)
		If True (default), include a constant feature.

	Returns
	-------
	Xext : numpy array

	TODO: test more
	"""
    m, n = twod(X).shape

    npoly = np.ceil(
        (n**(degree + 1) - 1) / (n - 1))  # ceil to fix possible roundoff error
    if use_constant:
        Xext = np.zeros((m, npoly))
        Xext[:, 0] = 1
        Xcur = 1
        k = 1
    else:
        Xext = np.zeros((m, npoly - 1))
        Xcur = 1
        k = 0

    # hard coded to be a shorter length
    if degree == 2:
        Xext[:, k:k + n] = X
        k += n
        Z = np.reshape(X, (m, 1, n))
        X2 = np.zeros((m, 1))
        for i in range(twod(Z).shape[2]):
            X2 = cols((X2, X * Z[:, :, i]))
        X2 = X2[:, 1:]
        idx = np.where((twod(arr(range(1, n + 1))).T >= arr(range(
            1, n + 1))).T.ravel())[0]
        K = len(idx)
        Xext[:, k:k + K] = X2[:, idx]
        return Xext[:, 0:k + K]

    for p in range(degree):

        # workaround to make up for numpy's lack of bsxfun
        if type(Xcur) is int:
            Xcur = X * Xcur
        else:
            new_Xcur = np.zeros((m, 1))
            for i in range(Xcur.shape[2]):
                new_Xcur = cols((new_Xcur, X * Xcur[:, :, i]))
            Xcur = new_Xcur[:, 1:]

        Xcur = Xcur.reshape((m, np.size(Xcur) / m))
        K = Xcur.shape[1]
        Xext[:, k:k + K] = Xcur
        k = k + K
        Xcur = Xcur.reshape((m, 1, K))

    return Xext