示例#1
0
Created on May 22, 2017

@author: a3438
'''
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from SimulateModelData import simulateModelData
from sklearn.preprocessing import StandardScaler
import sklearn.neural_network

if __name__ == '__main__':
    n_samples = 10
    nFeatures = 2
    numClasses = 2
    nld = simulateModelData(nFeatures, n_samples)
    nld.simulateClassification(numClasses)
    ss = StandardScaler()
    ss.fit(nld.X)
    XNew = ss.transform(nld.X)
    print nld.X
    print nld.y

    clf = sklearn.neural_network.MLPClassifier(solver='lbfgs',
                                               alpha=1e-5,
                                               hidden_layer_sizes=(5, 2),
                                               random_state=1)
    clf.fit(XNew, nld.y)
    y1 = clf.predict(XNew)
    print np.max(y1 - nld.y), np.average(y1 - nld.y)
示例#2
0
'''
Created on Mar 7, 2017

@author: a3438
'''
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from SimulateModelData import simulateModelData
from sklearn.svm import SVR
from sklearn.preprocessing import StandardScaler

if __name__ == '__main__':
    n_samples = 200
    n_features = 2
    nld = simulateModelData(n_features, n_samples)
    nld.simulateNonLinear(sigma=0.2)
    ss = StandardScaler()
    ss.fit(nld.X)
    XNew = ss.transform(nld.X)

    clf = SVR(C=1.0, epsilon=0.1)
    clf.fit(XNew, nld.y)
    y1 = clf.predict(XNew)
    print np.max(y1 - nld.y), np.average(y1 - nld.y)

    nld2 = simulateModelData(n_features, n_samples)
    nld2.simulateNonLinear(sigma=0.2)
    XNew2 = ss.transform(nld2.X)
    y2 = clf.predict(XNew2)
    print np.max(y2 - nld2.y), np.average(y2 - nld2.y)
示例#3
0
np.random.seed(0)

# Generate datasets. We choose the size big enough to see the scalability
# of the algorithms, but not too big to avoid too long running times
n_samples = 1500
n_features = 10
n_classes = 4
noisy_circles = datasets.make_circles(n_samples=n_samples,
                                      factor=.5,
                                      noise=.05)
noisy_moons = datasets.make_moons(n_samples=n_samples, noise=.05)
blobs = datasets.make_blobs(n_samples=n_samples, random_state=8)

meanVectors = np.random.uniform(0.0, 10.0, size=[n_classes, n_features])
sigmaVectors = 4.0 * np.ones([n_classes, n_features])
cd = simulateModelData(n_features, n_samples)
cd.simulateClass(n_classes, mu=meanVectors, sigma=sigmaVectors)
normalClusters = (cd.X, cd.y)

no_structure = np.random.rand(n_samples, 2), None

colors = np.array([x for x in 'bgrcmykbgrcmykbgrcmykbgrcmyk'])
#colors = np.hstack([colors] * 20)

datasets = [noisy_circles, noisy_moons, blobs, normalClusters]

plt.figure(figsize=(len(datasets), 9.5))
plt.subplots_adjust(left=.02,
                    right=.98,
                    bottom=.001,
                    top=.96,