def test_compare_arma(): #this is a preliminary test to compare arma_kf, arma_cond_ls and arma_cond_mle #the results returned by the fit methods are incomplete #for now without random.seed #np.random.seed(9876565) x = fa.ArmaFft([1, -0.5], [1., 0.4], 40).generate_sample(size=200, burnin=1000) # this used kalman filter through descriptive # d = ARMA(x) # d.fit((1,1), trend='nc') # dres = d.res modkf = ARMA(x) ##rkf = mkf.fit((1,1)) ##rkf.params reskf = modkf.fit((1,1), trend='nc', disp=-1) dres = reskf modc = Arma(x) resls = modc.fit(order=(1,1)) rescm = modc.fit_mle(order=(1,1), start_params=[0.4,0.4, 1.], disp=0) #decimal 1 corresponds to threshold of 5% difference #still different sign corrcted #assert_almost_equal(np.abs(resls[0] / d.params), np.ones(d.params.shape), decimal=1) assert_almost_equal(resls[0] / dres.params, np.ones(dres.params.shape), decimal=1) #rescm also contains variance estimate as last element of params #assert_almost_equal(np.abs(rescm.params[:-1] / d.params), np.ones(d.params.shape), decimal=1) assert_almost_equal(rescm.params[:-1] / dres.params, np.ones(dres.params.shape), decimal=1)
def mcarma22(niter=10, nsample=1000, ar=None, ma=None, sig=0.5): '''run Monte Carlo for ARMA(2,2) DGP parameters currently hard coded also sample size `nsample` was not a self contained function, used instances from outer scope now corrected ''' #nsample = 1000 #ar = [1.0, 0, 0] if ar is None: ar = [1.0, -0.55, -0.1] #ma = [1.0, 0, 0] if ma is None: ma = [1.0, 0.3, 0.2] results = [] results_bse = [] for _ in range(niter): y2 = arma_generate_sample(ar,ma,nsample+1000, sig)[-nsample:] y2 -= y2.mean() arest2 = Arma(y2) rhohat2a, cov_x2a, infodict, mesg, ier = arest2.fit((2,2)) results.append(rhohat2a) err2a = arest2.geterrors(rhohat2a) sige2a = np.sqrt(np.dot(err2a,err2a)/nsample) #print 'sige2a', sige2a, #print 'cov_x2a.shape', cov_x2a.shape #results_bse.append(sige2a * np.sqrt(np.diag(cov_x2a))) if not cov_x2a is None: results_bse.append(sige2a * np.sqrt(np.diag(cov_x2a))) else: results_bse.append(np.nan + np.zeros_like(rhohat2a)) return np.r_[ar[1:], ma[1:]], np.array(results), np.array(results_bse)
def mcarma22(niter=10, nsample=1000, ar=None, ma=None, sig=0.5): '''run Monte Carlo for ARMA(2,2) DGP parameters currently hard coded also sample size `nsample` was not a self contained function, used instances from outer scope now corrected ''' #nsample = 1000 #ar = [1.0, 0, 0] if ar is None: ar = [1.0, -0.55, -0.1] #ma = [1.0, 0, 0] if ma is None: ma = [1.0, 0.3, 0.2] results = [] results_bse = [] for _ in range(niter): y2 = arma_generate_sample(ar, ma, nsample + 1000, sig)[-nsample:] y2 -= y2.mean() arest2 = Arma(y2) rhohat2a, cov_x2a, infodict, mesg, ier = arest2.fit((2, 2)) results.append(rhohat2a) err2a = arest2.geterrors(rhohat2a) sige2a = np.sqrt(np.dot(err2a, err2a) / nsample) #print 'sige2a', sige2a, #print 'cov_x2a.shape', cov_x2a.shape #results_bse.append(sige2a * np.sqrt(np.diag(cov_x2a))) if not cov_x2a is None: results_bse.append(sige2a * np.sqrt(np.diag(cov_x2a))) else: results_bse.append(np.nan + np.zeros_like(rhohat2a)) return np.r_[ar[1:], ma[1:]], np.array(results), np.array(results_bse)
print 'time used:', t2-t1 print "Arma.fit_mle results" # have to set nar and nma manually arma1.nar = 2 arma1.nma = 2 t2=time() ret = arma1.fit_mle() t3=time() print "params, first 4, sigma, last 1 ", ret.params results += ["Arma.fit_mle ", ret.params[:4], ret.params[-1], ret.llf] print 'time used:', t3-t2 print "Arma.fit method = \"ls\"" t3=time() ret2 = arma1.fit(order=(2,0,2), method="ls") t4=time() print ret2[0] results += ["Arma.fit ls", ret2[0]] print 'time used:', t4-t3 print "Arma.fit method = \"CLS\"" t4=time() ret3 = arma1.fit(order=(2,0,2), method="None") t5=time() print ret3 results += ["Arma.fit other", ret3[0]] print 'time used:', t5-t4 for i in results: print i
ar = [1.0, -0.8] ma = [1.0, 0.5] y1 = arma_generate_sample(ar, ma, 1000, 0.1) y1 -= y1.mean() #no mean correction/constant in estimation so far arma1 = Arma(y1) arma1.nar = 1 arma1.nma = 1 arma1res = arma1.fit_mle(order=(1, 1), method='fmin') print arma1res.params #Warning need new instance otherwise results carry over arma2 = Arma(y1) arma2.nar = 1 arma2.nma = 1 res2 = arma2.fit(method='bfgs') print res2.params print res2.model.hessian(res2.params) print ndt.Hessian(arma1.loglike, stepMax=1e-2)(res2.params) arest = tsa.arima.ARIMA(y1) resls = arest.fit((1, 0, 1)) print resls[0] print resls[1] print '\nparameter estimate - comparing methods' print '---------------------------------------' print 'parameter of DGP ar(1), ma(1), sigma_error' print[-0.8, 0.5, 0.1] print 'mle with fmin' print arma1res.params print 'mle with bfgs'
ar = [1.0, -0.8] ma = [1.0, 0.5] y1 = arma_generate_sample(ar,ma,1000,0.1) y1 -= y1.mean() #no mean correction/constant in estimation so far arma1 = Arma(y1) arma1.nar = 1 arma1.nma = 1 arma1res = arma1.fit_mle(order=(1,1), method='fmin') print arma1res.params #Warning need new instance otherwise results carry over arma2 = Arma(y1) arma2.nar = 1 arma2.nma = 1 res2 = arma2.fit(method='bfgs') print res2.params print res2.model.hessian(res2.params) print ndt.Hessian(arma1.loglike, stepMax=1e-2)(res2.params) arest = tsa.arima.ARIMA(y1) resls = arest.fit((1,0,1)) print resls[0] print resls[1] print '\nparameter estimate - comparing methods' print '---------------------------------------' print 'parameter of DGP ar(1), ma(1), sigma_error' print [-0.8, 0.5, 0.1] print 'mle with fmin'
from numpy.testing import assert_almost_equal import matplotlib.pyplot as plt import scikits.statsmodels.sandbox.tsa.fftarma as fa from scikits.statsmodels.tsa.descriptivestats import TsaDescriptive from scikits.statsmodels.tsa.arma_mle import Arma x = fa.ArmaFft([1, -0.5], [1., 0.4], 40).generate_sample(size=200, burnin=1000) d = TsaDescriptive(x) d.plot4() #d.fit(order=(1,1)) d.fit((1,1), trend='nc') print d.res.params modc = Arma(x) resls = modc.fit(order=(1,1)) print resls[0] rescm = modc.fit_mle(order=(1,1), start_params=[-0.4,0.4, 1.]) print rescm.params #decimal 1 corresponds to threshold of 5% difference assert_almost_equal(resls[0] / d.res.params, 1, decimal=1) assert_almost_equal(rescm.params[:-1] / d.res.params, 1, decimal=1) #copied to tsa.tests plt.figure() plt.plot(x, 'b-o') plt.plot(modc.predicted(), 'r-') plt.figure() plt.plot(modc.error_estimate) #plt.show()
#need to iterate, ar1 too large ma terms too small #fix large parameters, if hannan_rissannen are too large start_params_mle[:-1] = (np.sign(start_params_mle[:-1]) * np.minimum(np.abs(start_params_mle[:-1]),0.75)) print 'conditional least-squares' #print rhohat2 print 'with mle' arest2.nar = 2 arest2.nma = 2 # res = arest2.fit_mle(start_params=start_params_mle, method='nm') #no order in fit print res.params rhohat2, cov_x2a, infodict, mesg, ier = arest2.fit((2,2)) print '\nARIMA_old' arest = ARIMA_old(y22) rhohat1, cov_x1, infodict, mesg, ier = arest.fit((2,0,2)) print rhohat1 print np.sqrt(np.diag(cov_x1)) err1 = arest.errfn(x=y22) print np.var(err1) print 'bse ls, formula not checked' print np.sqrt(np.diag(cov_x1))*err1.std() print 'bsejac for mle' #print arest2.bsejac #TODO:check bsejac raises singular matrix linalg error #in model.py line620: return np.linalg.inv(np.dot(jacv.T, jacv)) print '\nyule-walker'