def VarSimul2(data,H): model = sm.VAR(data) results = model.fit(H) VARcoeff = results.params[1:] VARcoeff = np.array(VARcoeff).reshape(len(VARcoeff)/len(data.columns),len(data.columns),len(data.columns)) VARstd = results.stderr[1:] VARstd = np.array(VARstd).reshape(len(VARstd)/len(data.columns),len(data.columns),len(data.columns)) test = [] for i in range(10000): VarSim = np.zeros((len(VARcoeff)/len(data.columns),len(data.columns),len(data.columns))) for j in range(VarSim.shape[0]): for k in range(VarSim.shape[1]): for l in range(VarSim.shape[2]): VarSim[j][k,l] = np.random.normal(VARcoeff[j][k,l],VARstd[j][k,l]) marep = ma_rep(VarSim,15) tlist = [marep[j][9,0] for j in range(marep.shape[0])] test.append(tlist) mean = [np.mean([j[i] for j in test]) for i in range(len(test[0]))] up = [np.percentile([j[i] for j in test],97.5) for i in range(len(test[0]))] down = [np.percentile([j[i] for j in test],2.5) for i in range(len(test[0]))] plt.plot(mean,color=seaborn.xkcd_rgb['cornflower blue'],alpha=1,linestyle='-') plt.plot(up,color=seaborn.xkcd_rgb['indian red'],alpha=0.5,linestyle='--') plt.plot(down,color=seaborn.xkcd_rgb['indian red'],alpha=0.5,linestyle='--') plt.fill_between(range(len(mean)),up,down,alpha=0.5) plt.xlim(1) plt.show()
def EstimateVAR(data, H, sparse_method=False, GVD_output=True): """ :param data: A numpy array of log returns :param H: integer, size of step ahead forecast :return: a dataframe of connectivity or concentration parameters """ model = sm.VAR(data) results = model.fit(maxlags=H, ic='aic') SIGMA = np.cov(results.resid.T) if sparse_method == True: exit("METODEN BRUGER RESULTS.COEFS FREM FOR PARAMS") _nAssets = results.params.shape[1] _nLags = results.params.shape[0] / results.params.shape[1] custom_params = np.where(abs(results.params / results.stderr) > 1.96, results.params, 0)[1:].reshape( (_nLags, _nAssets, _nAssets)) _ma_rep = ma_rep(custom_params, maxn=H) else: _ma_rep = results.ma_rep(maxn=H) GVD = np.empty_like(SIGMA) if GVD_output: r, c = GVD.shape for i in range(r): for j in range(c): GVD[i, j] = 1 / np.sqrt(SIGMA[j, j]) * sum([_ma_rep[h, i].dot(SIGMA[j]) ** 2 for h in range(H)]) / sum( [_ma_rep[h, i, :].dot(SIGMA).dot(_ma_rep[h, i, :]) for h in range(H)]) GVD[i] /= GVD[i].sum() return pd.DataFrame(GVD), SIGMA, _ma_rep, results.resid
def VarSimul(data, H): model = sm.VAR(data) results = model.fit(H) VARcoeff = results.params[1:] VARcoeff = np.array(VARcoeff).reshape( len(VARcoeff) / len(data.columns), len(data.columns), len(data.columns)) VARstd = results.stderr[1:] VARstd = np.array(VARstd).reshape( len(VARstd) / len(data.columns), len(data.columns), len(data.columns)) test = [] for i in range(1000): VarSim = np.zeros( (len(VARcoeff) / len(data.columns), len(data.columns), len(data.columns))) for j in range(VarSim.shape[0]): for k in range(VarSim.shape[1]): for l in range(VarSim.shape[2]): VarSim[j][k, l] = np.random.normal(VARcoeff[j][k, l], VARstd[j][k, l]) marep = ma_rep(VarSim, 10) test.append(marep[1][0, 0]) print np.std(test) seaborn.distplot(test, norm_hist=True) plt.show()
def VarSimul2(data, H): model = sm.VAR(data) results = model.fit(H) VARcoeff = results.params[1:] VARcoeff = np.array(VARcoeff).reshape( len(VARcoeff) / len(data.columns), len(data.columns), len(data.columns)) VARstd = results.stderr[1:] VARstd = np.array(VARstd).reshape( len(VARstd) / len(data.columns), len(data.columns), len(data.columns)) test = [] for i in range(10000): VarSim = np.zeros( (len(VARcoeff) / len(data.columns), len(data.columns), len(data.columns))) for j in range(VarSim.shape[0]): for k in range(VarSim.shape[1]): for l in range(VarSim.shape[2]): VarSim[j][k, l] = np.random.normal(VARcoeff[j][k, l], VARstd[j][k, l]) marep = ma_rep(VarSim, 15) tlist = [marep[j][9, 0] for j in range(marep.shape[0])] test.append(tlist) mean = [np.mean([j[i] for j in test]) for i in range(len(test[0]))] up = [ np.percentile([j[i] for j in test], 97.5) for i in range(len(test[0])) ] down = [ np.percentile([j[i] for j in test], 2.5) for i in range(len(test[0])) ] plt.plot(mean, color=seaborn.xkcd_rgb['cornflower blue'], alpha=1, linestyle='-') plt.plot(up, color=seaborn.xkcd_rgb['indian red'], alpha=0.5, linestyle='--') plt.plot(down, color=seaborn.xkcd_rgb['indian red'], alpha=0.5, linestyle='--') plt.fill_between(range(len(mean)), up, down, alpha=0.5) plt.xlim(1) plt.show()
def VarSimul(data,H): model = sm.VAR(data) results = model.fit(H) VARcoeff = results.params[1:] VARcoeff = np.array(VARcoeff).reshape(len(VARcoeff)/len(data.columns),len(data.columns),len(data.columns)) VARstd = results.stderr[1:] VARstd = np.array(VARstd).reshape(len(VARstd)/len(data.columns),len(data.columns),len(data.columns)) test = [] for i in range(1000): VarSim = np.zeros((len(VARcoeff)/len(data.columns),len(data.columns),len(data.columns))) for j in range(VarSim.shape[0]): for k in range(VarSim.shape[1]): for l in range(VarSim.shape[2]): VarSim[j][k,l] = np.random.normal(VARcoeff[j][k,l],VARstd[j][k,l]) marep = ma_rep(VarSim,10) test.append(marep[1][0,0]) print np.std(test) seaborn.distplot(test,norm_hist=True) plt.show()
def EstimateVARTest(data, H, sparse_method=False): """ :param data: A numpy array of log returns :param H: integer, size of step ahead forecast :return: a dataframe of connectivity or concentration parameters """ model = sm.VAR(data) results = model.fit(maxlags=H, ic='aic') SIGMA = np.cov(results.resid.T) if sparse_method == True: _nAssets = results.params.shape[1] _nLags = results.params.shape[0] / results.params.shape[1] custom_params = np.where( abs(results.params / results.stderr) > 1.96, results.params, 0)[1:].reshape((_nLags, _nAssets, _nAssets)) _ma_rep = ma_rep(custom_params, maxn=H) else: _ma_rep = results.ma_rep(maxn=H) GVD = np.zeros_like(SIGMA) r, c = GVD.shape for i in range(r): for j in range(c): #GVD[i, j] = 1 / np.sqrt(SIGMA[i, i]) * sum([_ma_rep[h, i].dot(SIGMA[j]) ** 2 for h in range(H)]) / sum([_ma_rep[h, i, :].dot(SIGMA).dot(_ma_rep[h, i, :]) for h in range(H)]) GVD[i, j] = sum([_ma_rep[h, i].dot(SIGMA[j])**2 for h in range(H)]) / sum([ _ma_rep[h, i, :].dot(SIGMA).dot(_ma_rep[h, i, :]) for h in range(H) ]) #GVD[i] /= GVD[i].sum() print pd.DataFrame(SIGMA) * 10000000 print pd.DataFrame(GVD) * 10000000 print pd.DataFrame(SIGMA) - pd.DataFrame(GVD) return pd.DataFrame(GVD), SIGMA, _ma_rep, results.resid
def EstimateVAR(data, H, sparse_method=False, GVD_output=True): """ :param data: A numpy array of log returns :param H: integer, size of step ahead forecast :return: a dataframe of connectivity or concentration parameters """ model = sm.VAR(data) results = model.fit(maxlags=H, ic="aic") SIGMA = np.cov(results.resid.T) if sparse_method == True: exit("METODEN BRUGER RESULTS.COEFS FREM FOR PARAMS") _nAssets = results.params.shape[1] _nLags = results.params.shape[0] / results.params.shape[1] custom_params = np.where(abs(results.params / results.stderr) > 1.96, results.params, 0)[1:].reshape( (_nLags, _nAssets, _nAssets) ) _ma_rep = ma_rep(custom_params, maxn=H) else: _ma_rep = results.ma_rep(maxn=H) GVD = np.empty_like(SIGMA) if GVD_output: r, c = GVD.shape for i in range(r): for j in range(c): GVD[i, j] = ( 1 / np.sqrt(SIGMA[j, j]) * sum([_ma_rep[h, i].dot(SIGMA[j]) ** 2 for h in range(H)]) / sum([_ma_rep[h, i, :].dot(SIGMA).dot(_ma_rep[h, i, :]) for h in range(H)]) ) GVD[i] /= GVD[i].sum() return pd.DataFrame(GVD), SIGMA, _ma_rep, results.resid
def EstimateVARTest(data, H, sparse_method=False): """ :param data: A numpy array of log returns :param H: integer, size of step ahead forecast :return: a dataframe of connectivity or concentration parameters """ model = sm.VAR(data) results = model.fit(maxlags=H, ic='aic') SIGMA = np.cov(results.resid.T) if sparse_method == True: _nAssets = results.params.shape[1] _nLags = results.params.shape[0] / results.params.shape[1] custom_params = np.where(abs(results.params / results.stderr) > 1.96, results.params, 0)[1:].reshape( (_nLags, _nAssets, _nAssets)) _ma_rep = ma_rep(custom_params, maxn=H) else: _ma_rep = results.ma_rep(maxn=H) GVD = np.zeros_like(SIGMA) r, c = GVD.shape for i in range(r): for j in range(c): #GVD[i, j] = 1 / np.sqrt(SIGMA[i, i]) * sum([_ma_rep[h, i].dot(SIGMA[j]) ** 2 for h in range(H)]) / sum([_ma_rep[h, i, :].dot(SIGMA).dot(_ma_rep[h, i, :]) for h in range(H)]) GVD[i, j] = sum([_ma_rep[h, i].dot(SIGMA[j]) ** 2 for h in range(H)]) / sum([_ma_rep[h, i, :].dot(SIGMA).dot(_ma_rep[h, i, :]) for h in range(H)]) #GVD[i] /= GVD[i].sum() print pd.DataFrame(SIGMA)*10000000 print pd.DataFrame(GVD)*10000000 print pd.DataFrame(SIGMA)-pd.DataFrame(GVD) return pd.DataFrame(GVD), SIGMA, _ma_rep, results.resid