# prices
b_GE = GE.Dirty_Prices / 100

# NS parameters' daily increments time series

t_ = len(GE.Date)
thetaGE = zeros((4, t_))
thetaGE[0], thetaGE[1], thetaGE[2], thetaGE[3], *_ = BootstrapNelSieg(
    GE.Date, b_GE, b_sched_GE, tau, par_start)
DateGE = GE.Date
# -

# ## Match the observations in the three datasets

# +
date, idx_sp, idx_GE = intersect(DateSP, DateGE)
ret_SP500 = ret_SP500[idx_sp]
thetaGE = thetaGE[:, idx_GE]
dates, I_sp_ge, I_vix = intersect(date, DateVIX)
ret_SP500 = ret_SP500[I_sp_ge]
thetaGE = thetaGE[:, I_sp_ge]
vix = vix[I_vix]

epsi = r_[ret_SP500[np.newaxis, ...], thetaGE]
i_, t_ = epsi.shape
# -

# ## Compute the Flexible Probabilities conditioned on VIX

# +
# prior
        os.path.join(TEMPORARY_DB, 'db_SwapCurve'),
        squeeze_me=True)  # rolling values used to computed the short rate

DF_Rolling = struct_to_dict(db['DF_Rolling'], False)

try:
    db = loadmat(os.path.join(GLOBAL_DB, 'db_VIX'), squeeze_me=True)
except FileNotFoundError:
    db = loadmat(os.path.join(TEMPORARY_DB, 'db_VIX'),
                 squeeze_me=True)  # Vix index values

VIX = struct_to_dict(db['VIX'], False)

# where the common observations between db_ImpliedVol_SPX (thus obtaining a
# reduced db_ImpliedVol_SPX database) and DF_Rolling (thus obtaining a reduced DF_Rolling database)
[_, i_impvol, i_rates] = intersect(db_ImpliedVol_SPX['Dates'],
                                   DF_Rolling['Dates'])
db_ImpliedVol_SPX['Dates'] = db_ImpliedVol_SPX['Dates'][i_impvol]
db_ImpliedVol_SPX['Underlying'] = db_ImpliedVol_SPX['Underlying'][i_impvol]
db_ImpliedVol_SPX['Sigma'] = db_ImpliedVol_SPX['Sigma'][:, :, i_impvol]
DF_Rolling['Dates'] = DF_Rolling['Dates'][i_rates]
DF_Rolling['Prices'] = DF_Rolling['Prices'][:, i_rates]

# where the common observations between the reduced db_ImpliedVol_SPX database
# (thus obtaining a new reduced db_ImpliedVol_SPX database) and db_VIX (thus obtaining a reduced db_VIX database)
[dates, i_impvol, i_vix] = intersect(db_ImpliedVol_SPX['Dates'], VIX['Date'])
VIX['Date'] = VIX['Date'][i_vix]
VIX['value'] = VIX['value'][i_vix]
db_ImpliedVol_SPX['Dates'] = db_ImpliedVol_SPX['Dates'][i_impvol]
db_ImpliedVol_SPX['Underlying'] = db_ImpliedVol_SPX['Underlying'][i_impvol]
db_ImpliedVol_SPX['Sigma'] = db_ImpliedVol_SPX['Sigma'][:, :, i_impvol]
Example #3
0
# +
dates_x = Data.Dates
x = Data.Prices

# compute the log-returns
epsi = log(x[:i_, 1:]/ x[:i_, : -1])
# conditioning variable (VIX)
z = VIX['value']
dates_z = VIX['Date']
# -

# ## Merge the datasets and select the first t_end observations

# +
[dates, i_x, i_z] = intersect(dates_x, dates_z)

epsi = epsi[:, i_x[:t_]]
z = z[i_z[:t_]].reshape(1,-1)
# -

# ## Estimate the distribution of the invariants

# +
# Correlation matrix
d = zeros((1, i_))
rank = 1  # rank

c2 = np.corrcoef(epsi)
c2, *_ = FactorAnalysis(c2, d, rank)
c2 = real(c2)
Example #4
0
from ARPM_utils import save_plot, struct_to_dict, date_mtop, datenum
from intersect_matlab import intersect
# -

# ## Upload rolling values from 03-Oct-2002 to 03-Oct-2007 with 1 year to maturity, contained in db_SwapCurve

# +
try:
    db = loadmat(os.path.join(GLOBAL_DB, 'db_SwapCurve'), squeeze_me=True)
except FileNotFoundError:
    db = loadmat(os.path.join(TEMPORARY_DB, 'db_SwapCurve'), squeeze_me=True)

DF_Rolling = struct_to_dict(db['DF_Rolling'])

# extraction of rolling values from 03-Oct-2002 to 03-Oct-2007 with tau = 1 year
_, _, dateIndices = intersect(
    [datenum('03-Oct-2002'), datenum('03-Oct-2007')], DF_Rolling.Dates)
_, _, tauIndex = intersect(1, DF_Rolling.TimeToMat)
zroll = DF_Rolling.Prices[tauIndex, dateIndices[0]:dateIndices[1] + 1]
dates = DF_Rolling.Dates[dateIndices[0]:dateIndices[1] + 1]
time = arange(dates[0], dates[-1] + 1)

t_end = array([
    '03-Oct-2003', '03-Oct-2004', '03-Oct-2005', '03-Oct-2006', '03-Oct-2007'
])

_, timeindex, _ = intersect(time, list(map(datenum, t_end)))
# -

# ## Interpolate the rolling value on an yearly spaced grid

zroll = interp(time, dates, zroll[0])
Example #5
0
    db = loadmat(os.path.join(GLOBAL_DB, 'db_OptionStrategy'), squeeze_me=True)
except FileNotFoundError:
    db = loadmat(os.path.join(TEMPORARY_DB, 'db_OptionStrategy'),
                 squeeze_me=True)

OptionStrategy = struct_to_dict(db['OptionStrategy'], as_namedtuple=False)

try:
    db = loadmat(os.path.join(GLOBAL_DB, 'db_VIX'), squeeze_me=True)
except FileNotFoundError:
    db = loadmat(os.path.join(TEMPORARY_DB, 'db_VIX'), squeeze_me=True)

VIX = struct_to_dict(db['VIX'], as_namedtuple=False)

DateOptStrat = array([datenum(i) for i in OptionStrategy['Dates']]).T
common, i_spvix, i_rates = intersect(SPX['Date'], DF_Rolling['Dates'])
SPX['Date'] = SPX['Date'][i_spvix]
SPX['Price_close'] = SPX['Price_close'][i_spvix]
VIX['value'] = VIX['value'][i_spvix]
VIX['Date'] = VIX['Date'][i_spvix]
DF_Rolling['Dates'] = DF_Rolling['Dates'][i_rates]
DF_Rolling['Prices'] = DF_Rolling['Prices'][:, i_rates]
common, i_others, i_options = intersect(common, DateOptStrat)
SPX['Date'] = SPX['Date'][i_others]
SPX['Price_close'] = SPX['Price_close'][i_others]
VIX['value'] = VIX['value'][i_others]
VIX['Date'] = VIX['Date'][i_others]
DF_Rolling['Dates'] = DF_Rolling['Dates'][i_others]
DF_Rolling['Prices'] = DF_Rolling['Prices'][:, i_others]
DateOptStrat = DateOptStrat[i_options]
OptionStrategy['cumPL'] = OptionStrategy['cumPL'][i_options]
db_ImpliedVol_FX = struct_to_dict(db['db_ImpliedVol_FX'])

# implied volatility surface for GBPUSD rate (in percentage format)

tau = db_ImpliedVol_FX.TimesToMaturity
delta =  db_ImpliedVol_FX.Delta
sigma_delta  =  db_ImpliedVol_FX.Sigma
t_ = sigma_delta.shape[2]
n_ = len(delta)
# -

# ## Plot the implied volatility surface and the evolution of implied volatility for the desired values of delta-moneyness and times to maturity

# +
_,tauIndex,_ = intersect(tau,1) # select 1 year of maturity
meanIndex_delta = int(ceil((n_)/2))-1

x,y = np.meshgrid(delta,tau)

f,ax = subplots(1,1,subplot_kw={'projection':'3d'})
ax.view_init(30,-120)
ax.plot_surface(x,y,sigma_delta[:,:,t_-1])
ax.scatter(x.flatten(),y.flatten(),sigma_delta[:,:,t_-1].flatten(),edgecolor='k')
plot(delta[[0]],tau[tauIndex],sigma_delta[tauIndex,0,t_-1],marker='.', color='r',markersize=20)
plot(delta[[meanIndex_delta]],tau[tauIndex],sigma_delta[tauIndex,meanIndex_delta,t_-1],marker='.', color='b'
     ,markersize=20)
plot(delta[[n_-1]],tau[tauIndex],sigma_delta[tauIndex,n_-1,t_-1],marker='.', color='g',markersize=20)
xlabel('$\delta$-moneyness', labelpad=10)
ylabel('Time to maturity (years)', labelpad=10)
ax.set_zlabel('Volatility (%)')
Example #7
0
# ## Recover the invariants and the time series of the conditioning variables

# +
# invariants (S&P500 returns)
epsi = diff(log(SPX.Price_close))

# CONDITIONING VARIABLES
# 1) VIX (VIX.value)

# 2) 5years Swap Zero Rate
ZeroRates, _ = RollPrices2YieldToMat(DF_Rolling.TimeToMat, DF_Rolling.Prices)
zr5 = ZeroRates[DF_Rolling.TimeToMat == 5, :]

# merging datasets
date, _, _ = intersect(intersect(SPX.Date[1:], VIX.Date), DF_Rolling.Dates)
_, i_spx, _ = intersect(SPX.Date[1:], date)
_, i_vix, _ = intersect(VIX.Date, date)
_, i_zr, _ = intersect(DF_Rolling.Dates, date)

epsi = epsi[i_spx].reshape(1, -1)
z1 = VIX.value[i_vix].reshape(1, -1)
z2 = zr5[0, i_zr].reshape(1, -1)
t_ = len(date)
# -

# ## Compute the Flexible Probabilities conditioning on each of the two factors

# +
alpha = 0.3
Example #8
0
sd_hat = zeros((1, t_vix))
for t in range(t_vix):
    p_scor_t = exp(-log(2) / tauHL_scor * (tile(t + 1,
                                                (1, t + 1)) - times[:t + 1]))
    gamma_scor_t = npsum(p_scor_t)
    mu_hat[0, t] = npsum(p_scor_t * z_vix[0, :t + 1]) / gamma_scor_t
    mu2_hat[0, t] = npsum(p_scor_t * (z_vix[0, :t + 1])**2) / gamma_scor_t
    sd_hat[0, t] = sqrt(mu2_hat[0, t] - (mu_hat[0, t])**2)

z_vix = (z_vix - mu_hat) / sd_hat
dates_zvix = VIX.Date
# -

# ## Match the time series of invariants with the time series of the conditioning variable

dates_SPX, tau_vix, tau_SPX = intersect(VIX.Date, dates)
z_vix_cond = z_vix[[0], tau_vix].reshape(1, -1)
epsi_SPX = epsi_SPX[tau_SPX].reshape(1, -1)
i_, t_ = epsi_SPX.shape

# ## Compute the state and time conditioning probabilities

z_vix_star = z_vix_cond[[0], -1]  # target value
prior = exp((-(log(2) / tauHL_prior)) * abs(arange(t_, 1 + -1, -1)))
prior = prior / npsum(prior)
# conditioner
conditioner = namedtuple('conditioner', 'Series TargetValue Leeway')
conditioner.Series = z_vix_cond
conditioner.TargetValue = np.atleast_2d(z_vix_star)
conditioner.Leeway = alpha
p = ConditionalFP(conditioner, prior)
SPX = struct_to_dict(db['SPX'])
StocksSPX = struct_to_dict(db['StocksSPX'])
# -

# ## Compute the log-returns

# +
SPX_ = SPX.Price_close  # S&P500
x1 = SPX_
dx1 = diff(log(x1))

x2, dx2 = Price2AdjustedPrice(StocksSPX.Date.reshape(1, -1),
                              StocksSPX.Prices[[0]],
                              StocksSPX.Dividends[0])  # Apple Inc

[date, i2, i3] = intersect(StocksSPX.Date[1:], SPX.Date[1:])
dx2 = dx2[[0], i2].reshape(1, -1)
dx1 = dx1[i3].reshape(1, -1)
# -

# ## Settings

j_ = 10000  # numbers of MC scenarios
n_ = 2  # numbers of securities
tau = 21  # projection horizon

# ## Estimate the daily compounded returns distribution

dx = r_[dx1, dx2]  # extract risk drivers increments (compounded returns)
demean = 1
eps = .01
Example #10
0
# scoring
mu_hat = zeros((1, t_vix))
mu2_hat = zeros((1, t_vix))
sd_hat = zeros((1, t_vix))
for t in range(t_vix):
    p_scor_t = exp(-log(2) / tauHL_scor * (tile(t + 1,
                                                (1, t + 1)) - times[:t + 1]))
    gamma_scor_t = npsum(p_scor_t)
    mu_hat[0, t] = npsum(p_scor_t * z_vix[0, :t + 1]) / gamma_scor_t
    mu2_hat[0, t] = npsum(p_scor_t * (z_vix[0, :t + 1])**2) / gamma_scor_t
    sd_hat[0, t] = sqrt(mu2_hat[0, t] - (mu_hat[0, t])**2)

z_vix = (z_vix - mu_hat) / sd_hat

# time series of invariants and VIX time series matching
dates_stocks, tau_vix, tau_stock = intersect(VIX.Date[1:], dates)
epsi_stocks = epsi_stocks[:, tau_stock]
z_vix = z_vix[[0], tau_vix]
z_vix_star = z_vix[-1]  # target value
i_, t_ = epsi_stocks.shape

# state and time conditioned probabilities
prior = exp(-log(2) / tauHL_prior * abs(arange(t_, 1 + -1, -1))).reshape(1, -1)
prior = prior / npsum(prior)

# conditioner
conditioner = namedtuple('conditioner', ['Series', 'TargetValue', 'Leeway'])
conditioner.Series = z_vix.reshape(1, -1)
conditioner.TargetValue = np.atleast_2d(z_vix_star)
conditioner.Leeway = alpha
Example #11
0
except FileNotFoundError:
    db = loadmat(os.path.join(TEMPORARY_DB, 'db_SwapCurve'), squeeze_me=True)

DF_Rolling = struct_to_dict(db['DF_Rolling'], as_namedtuple=False)

try:
    db = loadmat(os.path.join(GLOBAL_DB, 'db_ImpliedVol_SPX'), squeeze_me=True)
except FileNotFoundError:
    db = loadmat(os.path.join(TEMPORARY_DB, 'db_ImpliedVol_SPX'),
                 squeeze_me=True)

db_ImpliedVol_SPX = struct_to_dict(db['db_ImpliedVol_SPX'],
                                   as_namedtuple=False)

# merge
[common, i_stocks, i_rates] = intersect(StocksSPX['Date'], DF_Rolling['Dates'])
[common, i_others, i_options] = intersect(common, db_ImpliedVol_SPX['Dates'])
StocksSPX['Date'] = StocksSPX['Date'][i_stocks[i_others]]
StocksSPX['Prices'] = StocksSPX['Prices'][:, i_stocks[i_others]]
DF_Rolling['Dates'] = DF_Rolling['Dates'][i_rates[i_others]]
DF_Rolling['Prices'] = DF_Rolling['Prices'][:, i_rates[i_others]]
db_ImpliedVol_SPX['Dates'] = db_ImpliedVol_SPX['Dates'][i_options]
db_ImpliedVol_SPX['Underlying'] = db_ImpliedVol_SPX['Underlying'][i_options]
db_ImpliedVol_SPX['Sigma'] = db_ImpliedVol_SPX['Sigma'][:, :, i_options]

# len of the time series
t_riskdrivers = len(common)
# -

# ## 1a-1b Quest for invariance
# ## Stocks: compute the log-adjusted values and obtain the invariants, i.e. the compounded returns, as their increments
Example #12
0
def RollPrices2Prices(t_end_str, tau, dates, z_roll):
    # This function uses rolling values to compute the zero-coupon bond value
    # (i.e., discount factors) with maturities in t_end_str.
    # INPUTS
    #  t_end_str [vector]: (k_ x 1) selected maturities (as .Tdd-mmm-yy.T strings)
    #  tau [vector]: (n_ x 1) times to maturity corresponding to rows of z_roll
    #  Date [vector]: (1 x t_end) dates corresponding to columns of z_roll
    #  z_roll [matrix]: (n_ x t_end) rolling values
    # OUTPUTS
    #  date [cell vector]: (k_ x 1) cell date{j} contains the numerical value of
    #                              dates corresponding to columns of z{j}
    #  z [cell vector]: (k_ x 1) cell z{j} contains the evolution of zero-coupon
    #                           bond value with maturity t_end{j}
    #  t_end [vector]: (k_ x 1) contains the numerical value corresponding to
    #                          date strings in t_end_string

    # tau_int: vector of maturities for interpolation
    tauRange = ceil((dates[-1] - dates[0]) / 365)
    _, _, tauIndex = intersect(tauRange, tau)
    if not tauIndex:
        tauIndex = tau.shape[0]
    tau_int = arange(tau[0], tau[tauIndex] + tau[0], tau[0])
    # declaration and preallocation of variables
    t_ = z_roll.shape[1]
    n_ = npmax(tau_int.shape)
    z_roll_int = zeros((n_, t_))
    expiry = zeros((n_, t_))
    expiry_f = zeros((n_, t_))
    k_ = t_end_str.shape[0]
    t_end = zeros((k_, 1), dtype=int)
    z = {}
    date = {}

    for t in range(t_):
        # remove zeros
        indexPolished = np.where(abs(z_roll[:, t]) > 0)[0]
        # matrix of rolling values: z_roll(i,t)=z_{t}(tau[i]+t)
        z_roll_int[:, t] = interp(tau_int, tau[indexPolished],
                                  z_roll[indexPolished, t])
        # expiries
        for i in range(n_):
            expiry[i, t] = tau_int[i] * 365 + dates[t]
            expiry_f[i, t] = floor(expiry[i, t])  # to remove HH:mm:ss

    # zero-coupon bond values (i.e., discount factors) with fixed expiry
    for j in range(k_):
        z[j] = zeros((1, t_))
        date[j] = zeros((1, t_))
        t_end[j] = datenum(t_end_str[j])
        # z[j] = np.where(expiry_f==t_end[j],z_roll_int,z[j])
        # date[j] = np.where(expiry_f==t_end[j],dates,date[j])
        for t in range(t_):
            for i in range(n_):
                if expiry_f[i, t] == t_end[j]:
                    z[j][0, t] = z_roll_int[i, t]
                    date[j][0, t] = dates[t]
        # remove zeros
        indexzeros = np.where(date[j] == 0)
        date[j][indexzeros] = np.NAN
        z[j][indexzeros] = np.NaN
    return date, z, t_end
Example #13
0
KOSPI = struct_to_dict(db['KOSPI'])
# -

# ## Compute the log-prices and log-returns of the two indexes

# +
# S&P 500 (US)
NSprice = SPX.Price_close
NSdate = SPX.Date

# KOSPI (Korea)
KSprice = KOSPI.Price_close
KSdate = KOSPI.Date

# merge dataset
[dates, i1, i2] = intersect(NSdate, KSdate)
ret1 = diff(log(NSprice[i1])).reshape(1, -1)
ret2 = diff(log(KSprice[i2])).reshape(1, -1)

t_ = 500
ret1 = ret1[[0], -t_:]
ret2 = ret2[[0], -t_:]
dates = dates[-t_ + 1:]

epsi = r_[ret1, ret2]
# -

# ## Flexible Probabilities

# +
# flexible prob.
    db = loadmat(os.path.join(TEMPORARY_DB, 'db_FX'), squeeze_me=True)

USD_GBP = struct_to_dict(db['USD_GBP'])
# -

# ## Select the daily price of the Priceline.com Inc equity (S&P 500 dataset with n= 279)
# ## and the USD/GBP daily exchange rate (USD_GBP.FX from db_FX), and compute the risk drivers,
# ## that are the log-value for the equity and the log-rate for the spot exchange rate.

# +
t_end = 240
dt = 0.5
horiz_u = arange(0, t_end + dt, dt)
dates_stock = Data.Dates
dates_fx = USD_GBP.Date
[dates, i_stock, i_fx] = intersect(dates_stock, dates_fx)  # match the db

# risk drivers and invariants for the stock price
index_stock = 278  # choose the stock
x = log(Data.Prices[index_stock, i_stock])
t_ = len(x)
epsi_stock = diff(x)

# risk drivers and invariants for the foreign exchange rate
fx_USD_GBP = log(USD_GBP.FX[i_fx])
epsi_fx = diff(fx_USD_GBP)
# -

# ## Estimate the input parameters with Flexible Probabilities specified as rolling exponential decay prior
# ## where half-life is 1 year using function FPmeancov.
Example #15
0
# ## Compute the log-prices and log-returns of the two indexes

# +
# S&P 500 (US)
NSprice = SPX.Price_close
x1 = log(NSprice)
NSdate = SPX.Date

# KOSPI (Korea)
KSprice = KOSPI.Price_close
x2 = log(KSprice)
KSdate = KOSPI.Date

# merge dataset
Date, i1, i2 = intersect(NSdate, KSdate)
logprice1 = x1[i1]
logprice2 = x2[i2]
ret1 = diff(logprice1)
ret2 = diff(logprice2)
# -

# ## Estimate the correlation concatenating the log-returns over 5 days (l=4)

# +
# concatenate the daily log-returns
l = 4
tret_ = len(ret1)

y1 = zeros((1, tret_))
y2 = zeros((1, tret_))
p_tau = db['p_tau']
# -

# ## Select the key rates and recover the historical series of the shadow rates

# +
t_end = 3
fPaym = .5
coup_pay_t = arange(.5, t_end+fPaym,fPaym).reshape(1,-1)
t_ = coup_pay_t.shape[1]
dt = 1 / 252
horiz_u = arange(0,t_end+dt,dt)
u_ = len(horiz_u)

# match the db
[Dates, i_u, i_t] = intersect(horiz_u, coup_pay_t)

if len(i_u) != t_:
    raise ValueError('Setup a suitable dt')

timeStep = 1
pick = range(7)
tau_d = array([[1, 2, 5, 7, 10, 15, 30]]).T
y = Rates[pick, ::timeStep]
eta = 0.013
invcy = InverseCallTransformation(y, {1:eta})  # shadow rates
# -

# ## Fit the MVOU to the historical series of the shadow rates

#dinvcy = diff(invcy, 1, 2)
Example #17
0
try:
    db = loadmat(os.path.join(GLOBAL_DB, 'db_SwapCurve'), squeeze_me=True)
except FileNotFoundError:
    db = loadmat(os.path.join(TEMPORARY_DB, 'db_SwapCurve'), squeeze_me=True)

DF_Rolling = struct_to_dict(db['DF_Rolling'])
# -

# ## Compute yields, select observations and compute increments

# +
tau = [1, 5, 21]
nu = array([[2],[10]])  # times to maturity of interest (years)
y = {}

_, index, *_ = intersect(DF_Rolling.TimeToMat,nu)
# yields from rolling prices
y[0],_= RollPrices2YieldToMat(DF_Rolling.TimeToMat[index], DF_Rolling.Prices[index,:])  # yield daily observations
# extract weekly and monthly observations
for k in range(len(tau)):
    y[k] = y[0][:, ::tau[k]]  # computing increments
dy = {}
for k in range(3):
    dy[k] = diff(y[k], 1, 1)
# -

# ## Compute means and covariances

mu = {}
mu_tilde = {}
sigma2 = {}
Example #18
0
# -

# ## Upload JPM bond prices from db_CorporateBonds and restrict the yields to available dates

# +
try:
    db = loadmat(os.path.join(GLOBAL_DB, 'db_CorporateBonds'), squeeze_me=True)
except FileNotFoundError:
    db = loadmat(os.path.join(TEMPORARY_DB, 'db_CorporateBonds'), squeeze_me=True)

JPM = struct_to_dict(db['JPM'])

t_ = len(JPM.Date)

# dates extraction
_, _, dateIndices_JPM = intersect(JPM.Date, DF_Rolling.Dates)
y_ref = y_ref[:, dateIndices_JPM]

# Bond schedule
b_sched_JPM = zeros((JPM.Coupons.shape[0],2))
b_sched_JPM[:, 0] = JPM.Coupons/100
b_sched_JPM[:, 1] = JPM.Expiry_Date

# prices
v_bond_JPM = JPM.Dirty_Prices/100
# -

# ## Use function BootstrapNelSieg, which calibrates Nelson-Siegel model on the market prices of JPMorgan coupon-bearing bonds, returns JPMorgan yield curve and, given the reference curve, computes the spread curve

# fitting
_, _, _, _, _, y_JPM, _, y_ref_graph, _, s_JPM, _ = BootstrapNelSieg(JPM.Date, v_bond_JPM, b_sched_JPM, tau, par_start,
Example #19
0
# +
# S&P 500 log-returns
prices = SPX.Price_close
DateSP = SPX.Date

# swap rates
mat = DF_Rolling.TimeToMat
rolling_prices = DF_Rolling.Prices
dateSwap = DF_Rolling.Dates

yields, _ = RollPrices2YieldToMat(mat, rolling_prices)
yield5 = yields[mat == 5, :]  # Swap rate with time to mat = 5

# match the db
[dates, i_ret, i_yield] = intersect(DateSP.T, dateSwap.T)
prices = prices[i_ret]
yield5 = yield5[0, i_yield]

# S&P 500 returns
rets = diff(log(prices), 1)
# 5 years swap rate daily changes
y5changes = diff(yield5, 1)
# Dates
dates = dates[1:]
# -

# ## Normalize the series
# ## Compute sample interquartile range of S&P500 returns and changes in 5yr yield during the past period from 1 January 2005 to 31 December 2010

# +
Example #20
0
# ## Load the observations of VIX

# +
try:
    db = loadmat(os.path.join(GLOBAL_DB, 'db_VIX'), squeeze_me=True)
except FileNotFoundError:
    db = loadmat(os.path.join(TEMPORARY_DB, 'db_VIX'), squeeze_me=True)

VIX = struct_to_dict(db['VIX'])

Z = VIX.value
Vdates = VIX.Date
dates_Stocks = Data.Dates

# match the db
Dates, i_c, i_vix = intersect(dates_Stocks[1:], Vdates)
C = C[:, i_c]
Z_VIX = Z[i_vix]

n_, t_ = C.shape
# -

# ## Compute Historical distribution with Flexible Probabilities conditioned on the VIX

# +
lam = 0.0005

# exponential decay Flexible Probabilities (prior)
prior = zeros((1, t_))
for t in range(t_):
    prior[0, t] = exp(-(t_ - t) * lam)
Example #21
0
    db = loadmat(os.path.join(TEMPORARY_DB, 'db_VIX'), squeeze_me=True)

VIX = struct_to_dict(db['VIX'])

# invariants (daily P&L)
pnl = OptionStrategy.cumPL
epsi = diff(pnl)
dates_x = array([datenum(i) for i in OptionStrategy.Dates])
dates_x = dates_x[1:]

# conditioning variable (VIX)
z = VIX.value
dates_z = VIX.Date

    # merging datasets
[dates, i_epsi, i_z] = intersect(dates_x, dates_z)

pnl = pnl[i_epsi + 1]
epsi = epsi[i_epsi]
z = z[i_z]
t_ = len(epsi)
# -

# ## Compute the Flexible Probabilities conditioned via Entropy Pooling

# +
# prior
lam = log(2) / 1800  # half life 5y
prior = exp(-lam*abs(arange(t_, 1 + -1, -1))).reshape(1,-1)
prior = prior / npsum(prior)
Example #22
0
                 squeeze_me=True)

epsi_SPX = db['epsi_SPX'].reshape(1, -1)
dates_zvix = db['dates_zvix']
dates_SPX = db['dates_SPX']
z_vix = db['z_vix']
nu_marg_SPX = db['nu_marg_SPX']
mu_marg_SPX = db['mu_marg_SPX']
sig2_marg_SPX = db['sig2_marg_SPX']
if db['U_SPX_hor'].ndim == 2:
    U_SPX_hor = db['U_SPX_hor'][newaxis, ...]
else:
    U_SPX_hor = db['U_SPX_hor']
# ## Intersect the times series of the one-step invariants and of the conditioning variable

[dates_epsi, tau_stocks, tau_SPX] = intersect(dates_stocks, dates_SPX)
epsi_stocks = epsi_stocks[:, tau_stocks]
epsi_SPX = epsi_SPX[:, tau_SPX]
epsi = r_[epsi_SPX, epsi_stocks]
_, tau_vix, tau_epsi = intersect(dates_zvix, dates_epsi)
z_vix_cond = z_vix[tau_vix]
epsi = epsi[:, tau_epsi]

i_, t_ = epsi.shape
i_stocks, _ = epsi_stocks.shape
i_SPX, _ = epsi_SPX.reshape(1, -1).shape
_, m_, j_ = U_stocks_hor.shape
# -

# ## Estimate the joint correlation matrix