def regression_tst(method, func, y0, t, y_ref, tol_boundary=(0,6), h0=0.5, mxstep=10e6, adaptive="order", p=4, solout=(lambda t: t), nworkers=2): tol = [1.e-3,1.e-5,1.e-7,1.e-9,1.e-11,1.e-13] a, b = tol_boundary tol = tol[a:b] err = np.zeros(len(tol)) print '' for i in range(len(tol)): print tol[i] ys, infodict = ex_parallel.extrapolation_parallel(method,func, None, y0, t, atol=tol[i], rtol=tol[i], mxstep=mxstep, full_output=True, nworkers=nworkers) y = solout(ys[1:len(ys)]) err[i] = relative_error(y, y_ref) return err
def convergenceTest(method, i, test, allSteps, order, dense=False): ''''' Perform a convergence test with the test problem (in test parameter) with the given steps in parameter allSteps. ''''' y_ref = np.loadtxt(tst.getReferenceFile(test.problemName)) denseOutput = test.denseOutput if(not dense): y_ref=y_ref[-1] denseOutput=[denseOutput[0], denseOutput[-1]] else: nhalf = np.ceil(len(y_ref)/2) y_ref = y_ref[nhalf] print("dense output time " + str(denseOutput[nhalf])) k=0 errorPerStep=[] for step in allSteps: #rtol and atol are not important as we are fixing the step size ys, infodict = ex_parallel.extrapolation_parallel(method,test.RHSFunction, None, test.initialValue, denseOutput, atol=1e-1, rtol=1e-1, mxstep=10000000, full_output=True, nworkers=4, adaptative='fixed', p=order, h0=step) # print("number steps: " + str(infodict['nst']) + " (should be " + str(denseOutput[-1]/step) + ")") ys=ys[1:len(ys)] if(dense): ys=ys[nhalf] error = relative_error(ys, y_ref) errorPerStep.append(error) coefficients = np.polyfit(np.log10(allSteps), np.log10(errorPerStep), 1) print("coefficients: " + str(coefficients) + " order is: " + str(order-methodsmoothing[i])) if(plotConv): plt.plot(np.log10(allSteps),np.log10(errorPerStep), marker="x") plt.show() return coefficients[0]