error += 1
        elif predict_c != Y[i]:
            error += 1
    return float(error) / X.shape[0]

#Parameters here
c = 1
kernal = linear_kernel
#Parameters here
alpha, theta, theta_0 = multi_svm_train(X_train, Y_train, c, kernal)
scoreFns = []
for c in alpha.keys():
    score = ScoreFns(X_train, Y_train, kernal, alpha[c], theta_0[c])
    scoreFns.append(score.fn)

plotMultiDecisionBoundary(X_train, Y_train, scoreFns, [0, 0, 0], title="Linear Kernal, toy_1")

#c = 1

#alpha, theta, theta_0 = multi_svm_train(X_train, Y_train, c, poly_kernel)
#er = error_rate(X_val, Y_val, X_train, Y_train, alpha, theta_0, poly_kernel)
#print er
"""
classes = np.sort(np.unique(Y_train), axis=None)
for c in classes:
    Y_plot = np.array(Y_train == c, dtype=float)
    # define your matrices
    Y_lot = Y_plot * 2 - 1.0


#for c in [1e3]:
    return float(error) / X.shape[0]
"""
#Parameters here
c = 100
kernel = poly_kernel
#Parameters here
alpha, theta, theta_0 = multi_svm_train(X_train, Y_train, c, kernel)

er_train = error_rate(X_train, Y_train, X_train, Y_train, alpha, theta_0, kernel)
print "er_train is {}".format(er_train)
er_val = error_rate(X_val, Y_val, X_train, Y_train, alpha, theta_0, kernel)
print "er_val is {}".format(er_val)
er_test = error_rate(X_test, Y_test, X_train, Y_train, alpha, theta_0, kernel)
print "er_test is {}".format(er_test)
"""
"""
scoreFns = []
for c in alpha.keys():
    score = ScoreFns(X_train, Y_train, kernal, alpha[c], theta_0[c])
    scoreFns.append(score.fn)

plotMultiDecisionBoundary(X_train, Y_train, scoreFns, [0, 0, 0], title="Linear Kernal, toy_1")
"""
#c = 1

#alpha, theta, theta_0 = multi_svm_train(X_train, Y_train, c, poly_kernel)
#er = error_rate(X_val, Y_val, X_train, Y_train, alpha, theta_0, poly_kernel)
#print er

"""
#for c in [1e3]: