for item in data.getData(): x=item.getX() y=item.getY() X.append(x[1]) Y.append(y) print(x,y) theta = th.theta(dimension=2) theta.printData() ex1 = data.getExample(0) index = 0 print("Linear hypothesis value:\n{}".format(hL.hypothesisLinearTrain(theta, ex1))) print("Simple cost for hypothesis:\n{}".format(hL.cost(theta, ex1))) print("Number of examples:\n{}".format(data.getNumberOfExamples())) print("Total cost for hypothesis:\n{}".format(hL.costFunction(theta, data))) print("Cost function derivative for index {0}:\n{1}\ ".format(index, hL.costFunctionDerivative(theta, data, index))) theta = grad.gradientDescentLinear(theta, data, 0.01) print(theta.getData()) t = theta.getData() a = np.arange(min(X), max(X), 0.1) b = list(map(lambda x: t[0]+t[1]*x, a )) pylab.plot(X,Y,'o') pylab.plot(a,b) pylab.show()
def gradientDescentLinearIteration(theta, data, index, learnRate): return theta.getData()[index]-learnRate*hL.costFunctionDerivative(theta, data, index)