コード例 #1
0
 def BuildModel(self, data, responses):
     # Create and train the classifier.
     model = Perceptron(RealFeatures(data.T), MulticlassLabels(responses))
     if self.iterations:
         model.set_max_iter(self.iterations)
     model.train()
     return model
コード例 #2
0
def classifier_perceptron_modular (n=100, dim=2, distance=5,learn_rate=1.,max_iter=1000,num_threads=1,seed=1):
	from modshogun import RealFeatures, BinaryLabels
	from modshogun import Perceptron

	random.seed(seed)

	# produce some (probably) linearly separable training data by hand
	# Two Gaussians at a far enough distance
	X=array(random.randn(dim,n))+distance
	Y=array(random.randn(dim,n))-distance
	X_test=array(random.randn(dim,n))+distance
	Y_test=array(random.randn(dim,n))-distance
	label_train_twoclass=hstack((ones(n), -ones(n)))

	#plot(X[0,:], X[1,:], 'x', Y[0,:], Y[1,:], 'o')
	fm_train_real=hstack((X,Y))
	fm_test_real=hstack((X_test,Y_test))

	feats_train=RealFeatures(fm_train_real)
	feats_test=RealFeatures(fm_test_real)

	labels=BinaryLabels(label_train_twoclass)

	perceptron=Perceptron(feats_train, labels)
	perceptron.set_learn_rate(learn_rate)
	perceptron.set_max_iter(max_iter)
	# only guaranteed to converge for separable data
	perceptron.train()

	perceptron.set_features(feats_test)
	out_labels = perceptron.apply().get_labels()
	return perceptron, out_labels
コード例 #3
0
ファイル: perceptron.py プロジェクト: rcurtin/benchmarks
 def BuildModel(self, data, responses):
   # Create and train the classifier.
   model = Perceptron(RealFeatures(data.T), MulticlassLabels(responses))
   if self.iterations:
     model.set_max_iter(self.iterations)
   model.train()
   return model
コード例 #4
0
def classifier_perceptron_modular(n=100,
                                  dim=2,
                                  distance=5,
                                  learn_rate=1.,
                                  max_iter=1000,
                                  num_threads=1,
                                  seed=1):
    from modshogun import RealFeatures, BinaryLabels
    from modshogun import Perceptron

    random.seed(seed)

    # produce some (probably) linearly separable training data by hand
    # Two Gaussians at a far enough distance
    X = array(random.randn(dim, n)) + distance
    Y = array(random.randn(dim, n)) - distance
    X_test = array(random.randn(dim, n)) + distance
    Y_test = array(random.randn(dim, n)) - distance
    label_train_twoclass = hstack((ones(n), -ones(n)))

    #plot(X[0,:], X[1,:], 'x', Y[0,:], Y[1,:], 'o')
    fm_train_real = hstack((X, Y))
    fm_test_real = hstack((X_test, Y_test))

    feats_train = RealFeatures(fm_train_real)
    feats_test = RealFeatures(fm_test_real)

    labels = BinaryLabels(label_train_twoclass)

    perceptron = Perceptron(feats_train, labels)
    perceptron.set_learn_rate(learn_rate)
    perceptron.set_max_iter(max_iter)
    # only guaranteed to converge for separable data
    perceptron.train()

    perceptron.set_features(feats_test)
    out_labels = perceptron.apply().get_labels()
    return perceptron, out_labels
コード例 #5
0
def classifier_perceptron_graphical(n=100, distance=5, learn_rate=1., max_iter=1000, num_threads=1, seed=None, nperceptrons=5):
	from modshogun import RealFeatures, BinaryLabels
	from modshogun import Perceptron
	from modshogun import MSG_INFO

	# 2D data
	_DIM = 2

	# To get the nice message that the perceptron has converged
	dummy = BinaryLabels()
	dummy.io.set_loglevel(MSG_INFO)

	np.random.seed(seed)

	# Produce some (probably) linearly separable training data by hand
	# Two Gaussians at a far enough distance
	X = np.array(np.random.randn(_DIM,n))+distance
	Y = np.array(np.random.randn(_DIM,n))
	label_train_twoclass = np.hstack((np.ones(n), -np.ones(n)))

	fm_train_real = np.hstack((X,Y))
	feats_train = RealFeatures(fm_train_real)
	labels = BinaryLabels(label_train_twoclass)

	perceptron = Perceptron(feats_train, labels)
	perceptron.set_learn_rate(learn_rate)
	perceptron.set_max_iter(max_iter)
	perceptron.set_initialize_hyperplane(False)

	# Find limits for visualization
	x_min = min(np.min(X[0,:]), np.min(Y[0,:]))
	x_max = max(np.max(X[0,:]), np.max(Y[0,:]))

	y_min = min(np.min(X[1,:]), np.min(Y[1,:]))
	y_max = max(np.max(X[1,:]), np.max(Y[1,:]))

	for i in xrange(nperceptrons):
		# Initialize randomly weight vector and bias
		perceptron.set_w(np.random.random(2))
		perceptron.set_bias(np.random.random())

		# Run the perceptron algorithm
		perceptron.train()

		# Construct the hyperplane for visualization
		# Equation of the decision boundary is w^T x + b = 0
		b = perceptron.get_bias()
		w = perceptron.get_w()

		hx = np.linspace(x_min-1,x_max+1)
		hy = -w[1]/w[0] * hx

		plt.plot(hx, -1/w[1]*(w[0]*hx+b))

	# Plot the two-class data
	plt.scatter(X[0,:], X[1,:], s=40, marker='o', facecolors='none', edgecolors='b')
	plt.scatter(Y[0,:], Y[1,:], s=40, marker='s', facecolors='none', edgecolors='r')

	# Customize the plot
	plt.axis([x_min-1, x_max+1, y_min-1, y_max+1])
	plt.title('Rosenblatt\'s Perceptron Algorithm')
	plt.xlabel('x')
	plt.ylabel('y')
	plt.show()

	return perceptron
コード例 #6
0
def classifier_perceptron_graphical(n=100,
                                    distance=5,
                                    learn_rate=1.,
                                    max_iter=1000,
                                    num_threads=1,
                                    seed=None,
                                    nperceptrons=5):
    from modshogun import RealFeatures, BinaryLabels
    from modshogun import Perceptron
    from modshogun import MSG_INFO

    # 2D data
    _DIM = 2

    # To get the nice message that the perceptron has converged
    dummy = BinaryLabels()
    dummy.io.set_loglevel(MSG_INFO)

    np.random.seed(seed)

    # Produce some (probably) linearly separable training data by hand
    # Two Gaussians at a far enough distance
    X = np.array(np.random.randn(_DIM, n)) + distance
    Y = np.array(np.random.randn(_DIM, n))
    label_train_twoclass = np.hstack((np.ones(n), -np.ones(n)))

    fm_train_real = np.hstack((X, Y))
    feats_train = RealFeatures(fm_train_real)
    labels = BinaryLabels(label_train_twoclass)

    perceptron = Perceptron(feats_train, labels)
    perceptron.set_learn_rate(learn_rate)
    perceptron.set_max_iter(max_iter)
    perceptron.set_initialize_hyperplane(False)

    # Find limits for visualization
    x_min = min(np.min(X[0, :]), np.min(Y[0, :]))
    x_max = max(np.max(X[0, :]), np.max(Y[0, :]))

    y_min = min(np.min(X[1, :]), np.min(Y[1, :]))
    y_max = max(np.max(X[1, :]), np.max(Y[1, :]))

    for i in xrange(nperceptrons):
        # Initialize randomly weight vector and bias
        perceptron.set_w(np.random.random(2))
        perceptron.set_bias(np.random.random())

        # Run the perceptron algorithm
        perceptron.train()

        # Construct the hyperplane for visualization
        # Equation of the decision boundary is w^T x + b = 0
        b = perceptron.get_bias()
        w = perceptron.get_w()

        hx = np.linspace(x_min - 1, x_max + 1)
        hy = -w[1] / w[0] * hx

        plt.plot(hx, -1 / w[1] * (w[0] * hx + b))

    # Plot the two-class data
    plt.scatter(X[0, :],
                X[1, :],
                s=40,
                marker='o',
                facecolors='none',
                edgecolors='b')
    plt.scatter(Y[0, :],
                Y[1, :],
                s=40,
                marker='s',
                facecolors='none',
                edgecolors='r')

    # Customize the plot
    plt.axis([x_min - 1, x_max + 1, y_min - 1, y_max + 1])
    plt.title('Rosenblatt\'s Perceptron Algorithm')
    plt.xlabel('x')
    plt.ylabel('y')
    plt.show()

    return perceptron