def solutionTest(func, ns, dataNum, lnLambdas=[], SavePath=""): list = [] for i in ns: listi = [] list.append(listi) for lx in dataNum: listx = [] listi.append(listx) X, T = generateData(lx) testX, testT = generateData(lx // 2) if len(lnLambdas) == 0: W = func(i, X, T) print("%d;%d;None;%r" % (i, lx, RSS(testT, testX, W) / len(testX))) listx.append(W) else: for lnLambda in lnLambdas: W = analyticalSolve(i, X, T, lnLambda) listx.append(W) print("%d;%d;%.0f;%r" % (i, lx, lnLambda, RSS(testT, testX, W) / len(testX))) for i in range(len(ns)): for j in range(len(dataNum)): rW = [] labels = [] for k in range(len(lnLambdas)): rW.append(list[i][j][k]) labels.append("λ e^%.0f" % (lnLambdas[k])) visualPoly(*rW, *labels, title="%s poly%d datanum%d" % (func.__name__, ns[i], dataNum[j]), savePath=SavePath)
def __init__(self): print("Enter x range [A, B]\n") x_A = float(input("A = ")) print('\n') x_B = float(input("B = ")) print('\n') step = float(input("Step = ")) print('\n') print("Enter noise range [C, D]\n") n_C = float(input("C = ")) print('\n') n_D = float(input("D = ")) print('\n\n') self.xValues = arange(x_A, x_B, step) self.yValues = generateData(self.xValues, [n_C, n_D])
break sum /= 2 W.reverse() if isaverage: sum /= count return sum if __name__ == '__main__': dataNum = 10 n = 9 lr = 0.5 maxItrTimes = sys.maxsize batch = dataNum lnLambda = 0 X, T = generateData(dataNum) e, resultW = gradientDescent(n, X, T, lr, lnLambada=lnLambda, batch=batch, maxItrTimes=maxItrTimes) visualPoly(resultW, "λ e^%.0f" % (lnLambda), X=X, T=T, title="%s poly%d datanum%d lr%.3f batch%d" % ("gradientDescent", n, dataNum, lr, batch), isShow=True,
lambada = math.e ** lnLambada XX = mat([[x ** i for i in range(lenW)] for x in X]) vectorT = mat(T).T A = XX.T * XX + lambada * numpy.eye(lenW) # 带惩罚项 B = XX.T * vectorT W = mat(zeros((lenW, 1))) r = B - A * W p = r.copy() num = 0 while num < MaxIterationNum: num += 1 alpha = (r.T * r / (p.T * A * p))[0, 0] W += alpha * p lastr = r.copy() r -= alpha * A * p if (r.T * r)[0, 0] < limit: break beta = (r.T * r / (lastr.T * lastr))[0, 0] p = r + beta * p return W.T.tolist()[0], num if __name__ == '__main__': X, T = generateData(10) W, num = conjugateGradient(9, X, T, lnLambada=None) visualResultAndSampleAndTarget(W,X,T) # plt.plot(X, T, 'r*', linewidth=2) # plt.show()
from matplotlib import pyplot as plt import numpy as np from DataGenerator import generateData from Settings import (DEFAULT_NUMBER_OF_CLASSES, DEFAULT_NUMBER_OF_FEATURES, DEFAULT_NUMBER_OF_RECORDS_PER_CLASS, DEFAULT_FEATURE_MEAN_RANGE, DEFAULT_RANDOM_NUMBER_SEED, DEFAULT_TRUE_NUMBER_OF_CLASSES) data, labels = generateData(DEFAULT_NUMBER_OF_CLASSES, DEFAULT_NUMBER_OF_FEATURES, DEFAULT_NUMBER_OF_RECORDS_PER_CLASS, DEFAULT_FEATURE_MEAN_RANGE, DEFAULT_RANDOM_NUMBER_SEED, DEFAULT_TRUE_NUMBER_OF_CLASSES) distinctTrainLabels = np.unique(labels) OPACITY = 0.7 plt.figure() plt.title("Data Set") plt.xlabel("Feature 1") plt.ylabel("Feature 2") for i, label in enumerate(distinctTrainLabels): plt.scatter(data[labels == label, 0], data[labels == label, 1], c=np.random.rand(3, ), alpha=OPACITY, label="Class {}".format(i)) plt.legend() plt.show()
from scipy.optimize import curve_fit import numpy as np from pylab import * from DataGenerator import generateData def mySin(x, a=1, b=1): return a*sin(b*x) def fit(xValues, yValues): popt, pcov = curve_fit(mySin, xValues, yValues) yValuesFit = [ mySin(xV, popt[0], popt[1]) for xV in xValues ] return yValuesFit if __name__=='__main__': xValues = arange(0, 7, 0.1) yValues = generateData( xValues, [-2, 2]) yValuesFit = fit(xValues, yValues) figure(1) plot(xValues, yValues, 'go') plot(xValues, yValuesFit, 'b') sinValues = [ sin(x) for x in xValues] plot(xValues, sinValues, 'r') grid(True) show()