def gradFibonacci(xx: float, eps: float, x_min: float, x_max: float, dataPlot): stop = False p = plot(dataPlot.x_left, dataPlot.x_right, dataPlot.pr) p.on(xx) i = 0 while not stop: #p.showPoint(xx) def a(lmb): return f(xx - lmb * df(xx)) x_l = xx lmd = fibonacci(a, x_min, x_max, eps) xx = xx - lmd * df(xx) i += 1 #print("itter:", i, "| x:", xx) if (abs(x_l - xx) < Decimal(eps) and math.fabs(f(x_l) - f(xx)) < Decimal(eps)): stop = True p.off(xx) return xx, i
def conjucateGrad(xx: float, N, x_min, x_max, eps: float, dataPlot, lmd=0.1): stop = False i, k = 0, 0 pp = -df(xx) p = plot(dataPlot.x_left, dataPlot.x_right, dataPlot.pr) p.on(xx) while not stop: print("itter:", i, "| x:", xx) i += 1 p.showPoint(xx) x_l = xx def ff(lmb): return f(xx + lmd * pp) lmd = goldenRatio(ff, x_min, x_max, eps) xx = x_l + lmd * pp if abs(df(xx)) < eps: stop = True if k + 1 == N: k = 0 pp = -df(xx) continue else: bk = abs(df(xx))**2 / abs(df(x_l))**2 pp = -df(xx) + bk * pp k += 1 p.off(xx) return xx, i
def gradConst(lmd: float, xx: float, eps: float, dataPlot): stop = False i = 0 p = plot(dataPlot.x_left, dataPlot.x_right, dataPlot.pr) p.on(xx) while not stop: #p.showPoint(xx) x_l = xx xx = x_l - lmd * df(x_l) i += 1 #print("itter:", i, "| x:", xx) if (abs(x_l - xx) < Decimal(eps) and math.fabs(f(x_l) - f(xx)) < Decimal(eps)): stop = True p.off(xx) return xx, i
def newton(xx:float, x_min, x_max, eps:float, dataPlot, lmd = 0.1): stop = False p = plot(dataPlot.x_left, dataPlot.x_right, dataPlot.pr) p.on(xx) i = 0 #arg_xx = numpy.array([x_min, x_max]) while not stop: i += 1 p.showPoint(xx) x_l = xx xx = x_l - f(x_l) / df(x_l) print("itter:", i, "| x:", xx) if math.fabs(f(x_l) - f(xx)) < Decimal(eps): stop = True p.off(xx) return xx, i