def createSpotCurve(self, trade_dt): ''' will make a spot curve (aka zero curve) from a fwd curve Assumes that trade_dt will be before first maturity on curve ''' # TODO zc = ZeroCurve([], []) # first rate from today to first mat is same as spot rate zc.addRate(self.mats[0][1], self.rates[0]) prev_mat = self.mats[0][1] prev_rate = self.rates[0] for pos in range(1, len(self.mats)): # assumes the next fwd rate picks up where the previous one drops off mat_diff = get_year_deltas([self.mats[pos][0], self.mats[pos][1]])[-1] prev_date_diff = get_year_deltas([trade_dt, prev_mat])[-1] # (1 + r(T*+T))^(T*+T) = (1 + r(T*))^T* * (1 + f(T,T*))^T spot_rate = ((1 + prev_rate)**prev_date_diff * (1 + self.rates[pos])**mat_diff)**( 1 / (prev_date_diff + mat_diff)) - 1 zc.addRate(self.mats[pos][1], spot_rate) prev_rate = spot_rate prev_mat = self.mats[pos][1] return zc
def calcPVwithSpotRates(self, curve): pv = 0 for cf in self._cash_flows: t = get_year_deltas([self._trade_dt, cf[0]])[-1] r = curve.get_interpolated_yields([self._trade_dt, cf[0]])[1][1] pv += calcPV(cf[1], r, t) return round(pv, 4)
def calcPrice(self, d): """ _price is the original price paid for the forward / futures This function calculates it for initial purchase. It is static for calculations after the origin of the contract """ pdb.set_trace() T = get_year_deltas([d, self._mat_dt])[-1] return self._spot * (1 + self._ir)**T
def getDF(self, trade_dt, mat): """ Return the associated discount factor for a maturity. Assumes that the maturity list is sorted. :param mat: float :return: float """ r = self.getZeroRate(mat) mat = get_year_deltas([trade_dt, mat])[-1] return calcDiscountFactor(mat, r)
def calcFixedRate(self): pdb.set_trace() B_sum = [] for d in self._fixed_pay_dates: delt = get_year_deltas([self._trade_dt, d])[-1] r = self._float_ref.get_interpolated_yields([self._trade_dt, d])[-1][1] B_sum.append(1 / (1 + r * delt)) B0 = B_sum[-1] return (1 - B0) / sum(B_sum)
def calcValue(self, d, spot, r): """ t = time to maturity r = interest rate incase rate has changed since time of purchase und = underlying value incase und has changed since time of purchase ^^^^ all of these maybe useful later """ pdb.set_trace() T = get_year_deltas([d, self._mat_dt])[-1] return spot - self._k / (1 + r)**T
def ols_calc(xvals, yvals): """ ordinary least squares calculation """ xvals = [ dt.datetime(int(x[0]), int(float(str(x[1]).replace("E", ""))), 1).date() for x in xvals.values ] xvals = get_year_deltas(xvals) a_mat = np.vstack([xvals, np.ones(len(xvals))]).T slope, yint = np.linalg.lstsq(a_mat, yvals.values)[0] return (slope, yint)
def createFwdCurve(self, trade_dt): ''' will make a forward curve from a zero curve Assumes that trade_dt will be before first maturity on curve ''' fc = FwdCurve([], []) # first rate from today to first mat is same as spot rate fc.addRate((trade_dt, self.mats[0]), self.rates[0]) prev_mat = self.mats[0] prev_rate = self.rates[0] for pos in range(1, len(self.mats)): y_mat = get_year_deltas([trade_dt, prev_mat])[-1] x_mat = get_year_deltas([trade_dt, self.mats[pos]])[-1] x_spot = self.rates[pos] # Forward = [(1 + spot rate for year x)^x / (1 + spot rate for year y)^y] - 1 fwd_rate = ((1 + x_spot)**x_mat / (1 + prev_rate)**y_mat) - 1 fc.addRate((prev_mat, self.mats[pos]), fwd_rate) prev_mat = self.mats[pos] prev_rate = x_spot return fc
def createZeroCurve(pc, trade_dt): ''' will create zero curve by boot strapping the instruments passed par curve passed in ''' insts = pc.insts pxs = pc.pxs zc = ZeroCurve([], []) for i, px in zip(insts, pxs): if i.isBullet(): zc.addRate(i._mat_dt, i.getYield(px, trade_dt)) else: # discount the current cash_flows based on the current rates, get the next rate discounted_pv = 0 if zc.mats: for cf in [c for c in i._cash_flows if c[0] <= zc.mats[-1]]: discounted_pv += calcPV( cf[1], zc.getZeroRate(cf[0]), get_year_deltas([trade_dt, cf[0]])[-1]) # rem_cfs = [(get_year_deltas([trade_dt, c[0]])[-1], c[1]) for c in i._cash_flows if c[0] > zc.mats[-1]] rem_cfs = [c for c in i._cash_flows if c[0] > zc.mats[-1]] else: rem_cfs = i._cash_flows # Get the maturity cfs (cpn and principal) to be used for spot rate mat_cfs = [(get_year_deltas([trade_dt, cf[0]])[-1], cf[1]) for cf in rem_cfs if cf[0] == i._mat_dt] # rest we discount by interpolating on the par rate curve rem_cfs = [cf for cf in rem_cfs if cf[0] != i._mat_dt] for cf in rem_cfs: discounted_pv += calcPV(cf[1], pc.getParRate(cf[0]), get_year_deltas([trade_dt, cf[0]])[-1]) ytm_func = lambda y: \ sum([c/(1+y*i._freq)**(t/i._freq) for t,c in mat_cfs]) - px + discounted_pv zc.addRate(i._mat_dt, newton_raphson(ytm_func, 0.01)) return zc
def model_est_ols(years, data, avg_cols=None, use_last=None): """ Create a model based on ordinary least squares regression """ hist = pd.DataFrame() data_ols = pd.DataFrame() # some cleanup for sheet in ['is', 'bs', 'cf', 'fr']: data[sheet] = data[sheet].reset_index()[ data[sheet].reset_index().year != 'TTM'].set_index(IDX) # next qurater est is equal to revenue * average est margin # over the last year for _ in range(years): if hist.empty: n_idx = list(data['is'].iloc[-1].name) else: n_idx = list(hist.iloc[-1].name) n_idx = get_next_year(n_idx) n_hist_dict = {k: v for k, v in zip(IDX, n_idx)} ######### # Use OLS to get projected values ######### for cat in OLS_COLS: # for columns that are all 0 for a particular security skip = False # Need this for columns that are too eradic for OLS if avg_cols and cat in avg_cols: n_hist_dict[cat] = data[cat].mean() continue # Need this for columns where we just use most recent value if use_last and cat in use_last: n_hist_dict[cat] = data[cat].values[-1] continue for sheet in ['is', 'bs', 'cf', 'fr']: try: val = data[sheet][cat].dropna() data_ols[cat] = data[sheet][cat] x_val = val.reset_index()[['year', 'month']] break except KeyError: if sheet == 'fr': n_hist_dict[cat] = 0 data_ols[cat] = 0 skip = True else: continue # column is 0 for this security if skip: continue slope, yint = ols_calc(x_val, val) start = dt.datetime(int(x_val.values[0][0]), int(x_val.values[0][1]), 1).date() new_x = get_year_deltas([ start, dt.datetime(int(n_idx[0]), int(n_idx[2][:-1]), 1).date() ])[-1] # Need this to convert terminology for quarterly, also need to divide by four n_hist_dict[cat] = (yint + new_x * slope) n_hist_dict['ebt'] = (n_hist_dict['oper_inc'] + n_hist_dict['net_int_inc']) # assume average tax rate over last 5 years n_hist_dict['taxes'] = (data['fr']['eff_tax_rate'].mean() * n_hist_dict['ebt']) n_hist_dict['net_inc'] = n_hist_dict['ebt'] - n_hist_dict['taxes'] n_hist_dict['eps'] = (n_hist_dict['net_inc'] / n_hist_dict['weight_avg_shares']) t_df = pd.DataFrame(n_hist_dict, index=[0]).set_index(IDX) hist = hist.append(t_df) hist = pd.concat([data_ols, hist]) hist['net_inc'] = pd.concat( [data['is']['net_inc'], hist['net_inc'].dropna()]) # hist = hist.replace({pd.np.nan: None}) return hist
def peer_derived_value(data, period, stock): """ Get the value of the stock as compared to its peers """ # get group values first group_vals = {} years_fwd = 2 vals = ['ps_ratio', 'pe_avg_hist', 'pb_ratio', 'pfcf_ratio'] ticks = list(data.keys()) if STEP_THRU: pdb.set_trace() # get group market cap group_mkt_cap = 0 for key, data_df in data.items(): per = tuple([period[0], key, data[key][0]['ols'].index.values[0][2]]) group_mkt_cap += (data_df[0]['fr']['market_cap'] ).reset_index().set_index('year')['market_cap'] # Need to project out vals for ind_val in vals + ['market_cap']: if ind_val in ['pe_avg_hist']: sheet = 'ols' else: sheet = 'fr' xvals = data_df[0][sheet][ind_val].dropna().reset_index()[[ 'year', 'month' ]] month = xvals['month'].values[-1] slope, yint = ols_calc( xvals, data_df[0][sheet][ind_val].dropna().astype('float')) for fwd in range(1, years_fwd + 1): start = dt.datetime(int(xvals.values[0][0]), int(xvals.values[0][1]), 1).date() per = tuple([str(int(period[0]) + fwd), key, month + "E"]) new_x = get_year_deltas([ start, dt.datetime(int(per[0]), int(per[2][:-1]), 1).date() ])[-1] data_df[0][sheet].at[per, ind_val] = (yint + new_x * slope) # ols for group market cap group_mkt_cap = group_mkt_cap.dropna() xvals = group_mkt_cap.reset_index() xvals['month'] = '06' month = '06' xvals = xvals[['year', 'month']] slope, yint = ols_calc(xvals, group_mkt_cap.dropna().astype('float')) for fwd in range(1, years_fwd + 1): start = dt.datetime(int(xvals.values[0][0]), int(xvals.values[0][1]), 1).date() per = tuple([str(int(period[0]) + fwd), month + "E"]) new_x = get_year_deltas( [start, dt.datetime(int(per[0]), int(month), 1).date()])[-1] group_mkt_cap[per[0]] = (yint + new_x * slope) for ind_val in vals: if ind_val in ['pe_avg_hist']: sheet = 'ols' else: sheet = 'fr' group_vals[ind_val] = 0 group_vals[ind_val + "_w_avg"] = 0 for yrf in range(1, years_fwd + 1): group_vals[ind_val + "_" + str(yrf) + "fwd"] = 0 group_vals[ind_val + "_" + str(yrf) + "fwd_w_avg"] = 0 for tick in ticks: per = tuple([ period[0], tick, [ val[2] for val in data[tick][0][sheet].index.values if val[0] == period[0] ][0] ]) fwd_pers = [ tuple([ str(int(period[0]) + yf), tick, [ val[2] for val in data[tick][0][sheet].index.values if val[0] == str(int(period[0]) + yf) ][0] ]) for yf in range(1, years_fwd + 1) ] # 5yr avgs, simple and weighted # pdb.set_trace() group_vals[ind_val] += data[tick][0][sheet][ind_val].dropna( ).rolling(center=False, window=5, min_periods=1).mean()[per] / len(ticks) group_vals[ind_val + "_w_avg"] += ( data[tick][0][sheet][ind_val].dropna().rolling( center=False, window=5, min_periods=1).mean()[per] * (data[tick][0]['fr']['market_cap'][per] / group_mkt_cap[per[0]])) for fwd in fwd_pers: year_diff = int(fwd[0]) - int(per[0]) # fwd avgs, simple and weighted group_vals[ ind_val + "_" + str(year_diff) + "fwd"] += data[tick][0][sheet][ind_val].dropna().rolling( center=False, window=years_fwd, min_periods=1).mean()[fwd] / len(ticks) group_vals[ind_val + "_" + str(year_diff) + "fwd_w_avg"] += ( data[tick][0][sheet][ind_val].dropna().rolling( center=False, window=years_fwd, min_periods=1).mean()[fwd] * (data[tick][0]['fr']['market_cap'][fwd] / group_mkt_cap[fwd[0]])) if DEBUG: print("{} 5Y simple avg: {}".format(ind_val, '%.3f' % group_vals[ind_val])) print("{} 5Y weighted avg: {}".format( ind_val, '%.3f' % group_vals[ind_val + "_w_avg"])) for yrf in range(1, years_fwd + 1): print("{} {}Y fwd avg: {}" "".format( ind_val, str(yrf), '%.3f' % group_vals[ind_val + "_" + str(yrf) + "fwd"])) print("{} {}Y fwd weighted avg: {}" "".format( ind_val, str(yrf), '%.3f' % group_vals[ind_val + "_" + str(yrf) + "fwd_w_avg"])) comp_df = pd.DataFrame() for key, data_df in data.items(): if key != stock: continue per = tuple([period[0], key, data_df[0]['ols'].index.values[0][2]]) for ratio in vals: if ratio in ['pe_avg_hist']: sheet = 'ols' else: sheet = 'fr' if comp_df.empty: comp_df = pd.DataFrame(columns=setup_pdv_cols(per, years_fwd)) row = [key, ratio] # 5y average row.append(data_df[0][sheet][ratio].dropna().rolling( center=False, window=5, min_periods=1).mean()[per]) # 5y avg vs weighted avg row.append(row[-1] / group_vals[ratio + "_w_avg"]) # get fwd years fwd_pers = [ tuple([ str(int(period[0]) + yf), key, data_df[0]['ols'].index.values[0][2] + "E" ]) for yf in range(1, years_fwd + 1) ] for fwd in fwd_pers: year_diff = int(fwd[0]) - int(per[0]) # fwd multiple row.append(data_df[0][sheet][ratio].dropna().rolling( center=False, window=year_diff, min_periods=1).mean()[fwd]) # fwd mult vs fwd group average row.append( row[-1] / group_vals[ratio + "_" + str(year_diff) + "fwd_w_avg"]) # premium / discount: (5yr avg / group wgt avg) # / (fwd mult vs fwd group wgt ratio) # aka relative fwd mult compared to current mult row.append(row[3] / row[-1]) # prem_discount * current price for Peer derived value row.append(data_df[0]['ols'].date_px[per] * row[-1]) data[key][1].append( tuple([ "pdv_" + ratio, per[1], str(int(per[0]) + year_diff), '%.3f' % row[-1] ])) comp_df.loc[len(comp_df)] = row return data, comp_df.set_index(['ticker', 'cat'])
def calcAccruedInterest(self, trade_dt): cf = min([c for c in self._cash_flows if c[0] > trade_dt], key=lambda t: t[0]) t = get_year_deltas([trade_dt, cf[0]])[-1] return ((self._freq - t) / self._freq) * cf[1]