# Calculate VaR and Kupiec statistics
import pandas as pd
import numpy as np
from GARCHnumpy import GARCH as garch
import json
from pprint import pprint
import scipy.stats as sps

est_dct = json.load(open('GARCH_est_Sun_01-11-2015_21.06.57.json'))
#pprint(est_dct)
model = garch(est_dct['theta_opt'],mu=est_dct['mean'])
print(model)

df = pd.read_csv('./data/sp500.csv',index_col='Date',parse_dates=True).sort_index()
df['return'] = np.log(df['Close']).diff()
df = df.dropna()

var_index = df.index[df.index>est_dct['period'][1]]
var_data = pd.DataFrame()
var_data['return'] = df.loc[var_index,'return']

VaR = model.VaR(var_data['return'])
uncon_cov = pd.DataFrame()
N = len(var_data['return'])
i  = 0
for alpha in [0.01,0.025,0.05]:
	var_data['VaR_'+str(alpha)] = VaR[str(alpha)]
	N_ex = sum(var_data['return'] < VaR[str(alpha)])
	uncon_cov.loc[i,'alpha'] = alpha
	uncon_cov.loc[i,'coverage'] = N_ex/N 
	lam = 2*(N_ex * np.log(N_ex/N) + (N-N_ex) * np.log(1-N_ex/N) - (N_ex * np.log(alpha) + (N-N_ex) * np.log(1-alpha)))
Esempio n. 2
0
est_index = df.index[0:4495]
print('From ', min(est_index))
print('To   ', max(est_index))

# Calculating demeaned retuns and variance
# ----------------------------------------
est_data = pd.DataFrame()
est_data['return'] = df.loc[est_index, 'return']
mu = np.mean(est_data['return'])
est_data['return_dm'] = est_data['return'] - mu
variance = np.var(est_data['return'])
print('average  = ', mu)
print('variance = ', variance)
print('omega    = ', variance * (1 - 0.088 - 0.86))

model = garch()
theta0 = np.array([variance * (1 - 0.088 - 0.86), 0.088, 0.86])
gamma0 = np.log(theta0)
log_like, est_data['sigma2_non-opt'] = model.log_likelihood(
    gamma=gamma0, y=est_data['return_dm'], fmin=False)
print('Initial log likelihood =', log_like)


# Minimize loss function
# ----------------------
# Function for printing output during BFGS minimization
def callbackF(Xi):
    global Nfeval
    global log_like
    global w_list
    w_list.append(Xi)
Esempio n. 3
0
# Calculate VaR and Kupiec statistics
import pandas as pd
import numpy as np
from GARCHnumpy import GARCHnumpy as garch
import json
from pprint import pprint
import scipy.stats as sps

est_dct = json.load(open('GARCH_est_Sun_01-11-2015_21.06.57.json'))
#pprint(est_dct)
model = garch(est_dct['theta_opt'],mu=est_dct['mean'])
print(model)

df = pd.read_csv('./data/sp500.csv',index_col='Date',parse_dates=True).sort_index()
df['return'] = np.log(df['Close']).diff()
df = df.dropna()

var_index = df.index[df.index>est_dct['period'][1]]
var_data = pd.DataFrame()
var_data['return'] = df.loc[var_index,'return']

VaR = model.VaR(var_data['return'])
uncon_cov = pd.DataFrame()
N = len(var_data['return'])
i  = 0
for alpha in [0.01,0.025,0.05]:
	var_data['VaR_'+str(alpha)] = VaR[str(alpha)]
	N_ex = sum(var_data['return'] < VaR[str(alpha)])
	uncon_cov.loc[i,'alpha'] = alpha
	uncon_cov.loc[i,'coverage'] = N_ex/N 
	lam = 2*(N_ex * np.log(N_ex/N) + (N-N_ex) * np.log(1-N_ex/N) - (N_ex * np.log(alpha) + (N-N_ex) * np.log(1-alpha)))