Exemplo n.º 1
0
import numpy as np

from sklearn.cross_validation import train_test_split 
from sklearn.metrics import classification_report
from multilayer_perceptron  import MultilayerPerceptronClassifier

# Dataset
X = np.loadtxt('../feature/5grams_count_mc_features')
y = np.loadtxt('../data/tag_mc')
X -= X.min()
X /= X.max()
X_train, X_test, y_train, y_test = train_test_split(X, y)

# Instanciation
mlp = MultilayerPerceptronClassifier(activation='relu', hidden_layer_sizes = (20,), max_iter = 200)

# Train
mlp.fit(X_train, y_train)

# Report
preds = mlp.predict(X_test)
tags = y_test 
print classification_report(tags, preds)
"""
==============================================
Using multilayer perceptron for classification
==============================================

This uses multi-layer perceptron to train on a digits dataset. The example
then reports the training score.
"""

from sklearn.datasets import load_digits

from multilayer_perceptron import MultilayerPerceptronClassifier

# Load dataset
digits = load_digits()
X, y = digits.data, digits.target

# Create MLP Object
# Please see line 562 in "multilayer_perceptron.py" for more information
# about the parameters
mlp = MultilayerPerceptronClassifier(hidden_layer_sizes = (50, 20), \
                                     max_iter = 200, alpha = 0.02)

# Train MLP
mlp.fit(X, y)

# Report scores
print "Training Score = ", mlp.score(X, y)
print "Predicted labels = ", mlp.predict(X)
print "True labels = ", y
Exemplo n.º 3
0
from sklearn.datasets import load_digits
from multilayer_perceptron  import MultilayerPerceptronClassifier, MultilayerPerceptronRegressor
import numpy as np
from matplotlib import pyplot as plt

# contrive the "exclusive or" problem
X = np.array([[0,0], [1,0], [0,1], [1,1]])
y = np.array([0, 1, 1, 0])

# MLP training performance
mlp = MultilayerPerceptronClassifier(n_hidden = 5,max_iter = 200, alpha = 0.02)
mlp.fit(X, y)

print "Training Score = ", mlp.score(X,y)
print "Predicted labels = ", mlp.predict(X)
print "True labels = ", y 
# plot decision function

xx, yy = np.meshgrid(np.linspace(-1, 2, 500),
                     np.linspace(-1, 2, 500))
Z = mlp.decision_function(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)

plt.imshow(Z, 
          extent=(xx.min(), xx.max(), yy.min(), yy.max()), aspect='auto',
          origin='lower', cmap=plt.cm.PuOr_r)
contours = plt.contour(xx, yy, Z, levels=[0], linewidths=2,
                      linetypes='--')
plt.scatter(X[:, 0], X[:, 1], s=70, c=y, cmap=plt.cm.Paired)

plt.axis([-1, 2, -1, 2])
Exemplo n.º 4
0
im_test=Image.open("/home/jonathan/Baobab/fromGE/256.png")
box_width = 40
box_height = 40

imheight = 700 #GET FROM IMAGE!!!
imwidth = 1200
x, y = 0, 0
baobabs = []
while y < (imheight-box_height):
	x = 0
	while x < (imwidth - box_width):
		region = im_test.crop((x,y,x+box_width, y+box_width))
		small_image = loadSubImage(region)
		haars = getFeatures1(small_image)
		test.append(haars)
		results = mlp.predict(test)
		test = []
		if results[0] == 1:
			baobabs.append((x,y))
		x += 10
	y += 10
print "narrowed down from ~8000 to",len(baobabs)
print "starting second round:",datetime.datetime.now()
baobabs2 = []
#Fix This, need to train a new net with getHaarFeatures...
for b in baobabs:
	x,y = b
	region = im_test.crop((x,y,x+box_width, y+box_width))
	small_image = loadSubImage(region)
	haars = getHaarFeatures(small_image)
	test.append(haars)
Exemplo n.º 5
0
#Data normalization
X -= X.min()
X /= X.max()


#Instanciation
mlp = MultilayerPerceptronClassifier(activation='relu', hidden_layer_sizes = (100,), max_iter = 8)


#Training
mlp.fit(X, y)


#Test dataset
X = np.loadtxt('../test_data')
y = np.loadtxt('../test_labels')
X, y = shuffle(X, y)


#Data normalization
X -= X.min()
X /= X.max()


#Predictions
preds = mlp.predict(X)


#Report
print classification_report(y, preds)
print 'Accuracy: ' + str(accuracy_score(y, preds))