Example #1
0
def test_svm():
    clf = svm.SVC(C=c, kernel='linear', tol=1e-3)
    clf.fit(X, y)
    pd.plot_data(X, y)
    vb.visualize_boundary(clf, X, 0, 4.5, 1.5, 5)
    plt.show()
    input('Program paused. Press ENTER to continue')
Example #2
0
def plot_Decision_Boundary(X, Y, theta):
    plot_data(X[:, 1:3], Y)

    if X.shape[1] <= 3:
        plot_x = np.array([np.min(X[:, 1]) - 2, np.max(X[:, 1]) + 2])
        plot_y = (-1 / theta[2]) * (theta[1] * plot_x + theta[0])

        plt.plot(plot_x, plot_y)
        plt.legend(['Decision Boundary', 'Admitted', 'Not admitted'])
        plt.axis([30, 100, 30, 100])
    else:
        u = np.linspace(-1, 1.5, 50)
        v = np.linspace(-1, 1.5, 50)
        z = np.zeros((u.size, v.size))

        for i in range(0, u.size):
            for j in range(0, v.size):
                z[i, j] = np.dot(
                    map_feature(u[i], v[j], 6),
                    theta,
                )

        z = z.T  # 转置
        cs = plt.contour(u,
                         v,
                         z,
                         level=[0],
                         colors='b',
                         label='Decision Boundary')
        plt.legend(
            [cs.collections[0]],
            ['Decision Boundary'])  # [cs.collections[0]] 显示里面的分界线,与后面列表中的字符串对应
Example #3
0
def plot_decision_boundary(theta, X, y):
    """
    to plot classifier's decision boundary
    """
    plot_data(X[:, 1:3], y)

    if X.shape[1] <= 3:
        # Only need two points to define a line, so choose two endpoints
        plot_x = np.array([np.min(X[:, 1])-2, np.max(X[:, 1])+2])

        # Calculate the decision boundary line
        plot_y = (-1/theta[2])*(theta[1]*plot_x+theta[0])
        plt.plot(plot_x, plot_y)
        plt.legend(['Decision Boundary', 'Admitted', 'Not admitted'], loc=1)
        plt.axis([30, 100, 30, 100])
    else:
        # Here is the grid range
        u = np.linspace(-1, 1.5, 50)
        v = np.linspace(-1, 1.5, 50)
        z = np.zeros((u.size, v.size))

        # Evaluate z = theta*x over the grid
        for i in range(0, u.size):
            for j in range(0, v.size):
                z[i, j] = np.dot(map_feature(u[i], v[j]), theta)
        z = z.T

        # Plot z = 0
        # Notice you need to specify the range [0, 0]
        cs = plt.contour(u, v, z, levels=[0], colors='r', label='Decision Boundary')
        plt.legend([cs.collections[0]], ['Decision Boundary'])
def visualize_boundary_linear(X, y, model):
    """plots a linear decision boundary
       learned by the SVM and overlays the data on it
       """

    w = model.coef_.flatten()
    b = model.intercept_.flatten()
    xp = np.linspace(min(X[:, 0]), max(X[:, 0]), 100)
    yp = -(w[0] * xp + b) / w[1]
    plot_data(X, y)
    plt.plot(xp, yp, '-b')
def visualize_boundary(X, y, model):
    plot_data(X, y)

    x1plot = np.linspace(np.min(X[:, 0]), np.max(X[:, 0]), 100)
    x2plot = np.linspace(np.min(X[:, 1]), np.max(X[:, 1]), 100)

    X1, X2 = np.meshgrid(x1plot, x2plot)
    vals = np.zeros(X1.shape)

    for i in range(X1.shape[1]):
        this_X = np.c_[X1[:, i], X2[:, i]]
        vals[:, i] = model.predict(this_X)

    plt.contour(X1, X2, vals, colors='b', levels=[0])
Example #6
0
from computeCost import compute_cost
from gradientDescent import gradient_descent
from plotData import plot_data


# ===================== Part 1: Plotting =====================
print('Plotting Data...')
data = np.loadtxt('ex1data1.txt', delimiter=',', usecols=(0, 1)) # which col to read
X = data[:, 0]
y = data[:, 1]
m = y.size

plt.ion() # Turn interactive mode on
plt.figure(0) # Creates a new figure
plot_data(X, y)

input('Program paused. Press ENTER to continue')


# ===================== Part 2: Gradient descent =====================
print('Running Gradient Descent...')

X = np.c_[np.ones(m), X]  # Add a column of ones to X
theta = np.zeros(2)  # initialize fitting parameters

# Some gradient descent settings
iterations = 1500
alpha = 0.01

# Compute and display initial cost
Example #7
0
# ===================== Part 1: Loading and Visualizing Data =====================
# We start the exercise by first loading and visualizing the dataset.
# The following code will load the dataset into your environment and
# plot the data

print('Loading and Visualizing data ... ')

# Load from ex6data1:
data = scio.loadmat('ex6data1.mat')
X = data['X']
y = data['y'].flatten()
m = y.size

# Plot training data
pd.plot_data(X, y)

input('Program paused. Press ENTER to continue')

# ===================== Part 2: Training Linear SVM =====================
# The following code will train a linear SVM on the dataset and plot the
# decision boundary learned
#

print('Training Linear SVM')

# You should try to change the C value below and see how the decision
# boundary varies (e.g., try C = 1000)

c = 1000
clf = svm.SVC(c, kernel='linear', tol=1e-3)
Example #8
0
print(warm_up_exercise())

input('Program paused. Press <ENTER> to continue.\n')

# ======================= Part 2: Plotting =======================

print('Plotting Data ...\n')

data = np.loadtxt('ex1data1.txt', delimiter=',')
x = data[:, [0]]
y = data[:, [1]]
m = len(y)  # Number of training examples

# Plot Data

plot_data(x, y)

input('Program paused. Press <ENTER> to continue.\n')

# =================== Part 3: Cost and Gradient descent ===================

x = np.concatenate((np.ones([m, 1]), x),
                   axis=1)  # Add a column of ones to x as first column
theta = np.zeros([2, 1])  # Initialize fitting parameters

# Some gradient descent settings
num_iters = 1500
alpha = 0.01

print('\nTesting the cost function ...\n')
# Compute and display initial cost
Example #9
0
x_col_num = 0
with open("../machine-learning-ex2/ex2/ex2data1.txt") as file:
    line_list = file.readlines()
    m = len(line_list)
    feature_num = len(line_list[0].split(",")) - 1  # ignore x0 ,minus y
    # initialize x_data, y_data
    x_data = np.zeros((m, feature_num))
    y_data = np.zeros((m, 1))
    # assign value to x_data,y_data
    for i in range(m):
        line_tempt = line_list[i].split(",")
        x_data[i, :] = line_tempt[:feature_num]
        y_data[i, 0] = line_tempt[-1]
# ========================= Load end =============================
# ========================= plotData =============================
plotData.plot_data(x_data, y_data)
# ========================= compute cost and gradient ===========
# prepare x_data and initiate theta
n = x_data.shape[1]
x_data = np.column_stack((np.ones((m, 1)), x_data))
initial_theta = np.zeros((n + 1, 1)).flatten()  # must be a vector
# cost = costFunction.cost_function(initial_theta, x_data, y_data)
# print("initial_theta cost is {} (approx)".format(cost))
# print("expected cost is 0.693")

grad = costFunction.gradient(initial_theta.flatten(), x_data, y_data)
print("initial_theta grad is {}".format(
    grad))  #grad can be gotten from  costFunction.gradient
print('Expected gradients (approx):\n -0.1000\n -12.0092\n -11.2628\n')
# ============= Part 3: Optimizing using fminunc(matlab)/scipy(python)  =============
# theta must be a vector
Example #10
0
from plotData import plot_data
import costFunction as cf
import plotDecisionBoundary as pdb
from sigmoid import sigmoid
from predict import predict
"""
Logistic Regression 进行分类
"""

# ====== 1.读取数据并绘制散点图 ======
dataset = np.loadtxt('ex2data1.txt',delimiter =',')
X = dataset[:,0:2]
Y = dataset[:,2]

plot_data(X,Y)
plt.legend(['Admitted', 'Not admitted'])
plt.xlabel('Exam 1 score')
plt.ylabel('Exam 2 score')
# plt.show()

# ====== 2.计算代价和梯度 ======
(m,n) = X.shape
X = np.c_[np.ones(m),X]	# 添加偏置维度
# 初始theta值和lambda值
init_theta = np.zeros(X.shape[1])
lmd = 1
# 计算代价和梯度
cost,grad = cf.cost_Function_Reg(X,Y,init_theta,lmd)
print('Cost at initial theta (zeros): {:0.3f}'.format(cost))
print('Expected cost (approx): 0.693')
## ==================== Part 1: Basic Function ====================
# Complete warmUpExercise.py
print "Running warmUpExercise.py..."
print "5x5 Identity Matrix:"
print warmUpExercise.identity_matrix()

raw_input("Press Enter to continue")

## ======================= Part 2: Plotting =======================
print "Plotting Data..."
f = open("ex1data1.json", "r")
data = numpy.array(json.load(f))
f.close()
# Plot Data
# Note: You have to complete the code in plotData.py
plotData.plot_data(data)

raw_input("Press Enter to continue")

## =================== Part 3: Gradient descent ===================
print "Running Gradient Descent ..."
m = data.shape[0]  # Length of the data array
X = numpy.array([numpy.ones(m).T, data[:, 0]]).T  # Add a column of ones to X
y = data[:, 1]
theta = numpy.zeros(2)  # initialize fitting parameters

# Gradient descent settings
iterations = 1500
alpha = 0.01

# Compute and display initial cost
Example #12
0
# Sequential version
import glob
from sequentialLinter import run as srun
from ParallelLinter import run as prun
from plotData import plot_data
from timeit import default_timer as timer


directory = input("Enter your directory: ")
lstOfFiles = glob.glob(directory + "*.js")
print("Sequential start")
startS = timer()
pTimes = []
sTimes = []
for i in lstOfFiles:
    result = srun(i)
    print(result)
    Sdone = timer() - startS
    sTimes.append(Sdone)

print("Parallel start")
startP = timer()
for i in lstOfFiles:
    startP = timer()
    result = prun(i)
    print(result)
    Pdone = timer() - startP
    pTimes.append(Pdone)

plot_data(len(lstOfFiles), sTimes, pTimes)
Example #13
0
    y = np.array(data[:, 2]).reshape(len(data), 1)
    m = len(data)

    # Plot the data
    # plot_data(x, y)

    x1 = np.array(x[:, 0])
    x2 = np.array(x[:, 1])
    mappedX = mapFeature(x1, x2)

    # Add x-sub-0 (vector of 1's)
    # X = np.concatenate((np.ones((m, 1)), mappedX), axis=1)

    t = np.zeros((mappedX.shape[1], 1))

    lamb = 1
    J = compute_cost(t, mappedX, y, lamb)

    gradient = compute_gradient(t, mappedX, y, lamb)

    result = minimize(compute_cost,
                      t,
                      args=(mappedX, y, lamb),
                      method=None,
                      jac=compute_gradient,
                      options={'maxiter': 400})

    print(result.x)

    plot_data(x, y, result.x.T)