def get_daily_return(ptf, st_date, ed_date, currency=None): """ Calculate the daily return of the given portfolio from the rebalanced date til the end date. Note this method is resource EXPENSIVE as it relay on Bloomberg period return and then back calculate the daily return. :param ptf: A DataFrame with a single column DtdPNL ptf (DataFrame): columns contain "Security", "Weight", "Date". :param st_date: the rebalance date (single value) the date the portfolio starts :param ed_date: the date of the end calculation :param currency: the currency the performance is based on. If None, then local :return: a tuple of daily returns that of the given portfolio and position """ st_date = pd.to_datetime(st_date) ed_date = pd.to_datetime(ed_date) dts = pd.date_range(st_date, end=ed_date, freq='D')[1:] # rebalance date does not generate pnl dfs = pd.DataFrame() for dt in dts: # get total pnl for each day if currency is None: df = LocalTerminal.get_reference_data( ptf.loc[:, 'Security'], "CUST_TRR_RETURN_HOLDING_PER", CUST_TRR_START_DT=pd.to_datetime(st_date).strftime("%Y%m%d"), CUST_TRR_END_DT=pd.to_datetime(dt).strftime("%Y%m%d")) else: df = LocalTerminal.get_reference_data( ptf.loc[:, 'Security'], "CUST_TRR_RETURN_HOLDING_PER", CUST_TRR_START_DT=pd.to_datetime(st_date).strftime("%Y%m%d"), CUST_TRR_END_DT=pd.to_datetime(dt).strftime("%Y%m%d"), CUST_TRR_CRNCY=currency) df = df.as_frame() df['EdDate'] = dt df['CUST_TRR_RETURN_HOLDING_PER'] = df[ 'CUST_TRR_RETURN_HOLDING_PER'] / 100 # PNL is in the unit of percentage points df.reset_index(inplace=True) df.rename(columns={ 'CUST_TRR_RETURN_HOLDING_PER': "PNL", "index": "Ticker" }, inplace=True) dfs = dfs.append(df) # get the period to date return tot = pd.merge(ptf, dfs, left_on="Security", right_on="Ticker") tot['TotalReturn'] = tot["Weight"] * tot["PNL"] daily_return = tot.groupby("EdDate").agg({"TotalReturn": "sum"}) # back calculate the daily return daily_return['StDate'] = st_date daily_return['YestTotalReturn'] = daily_return['TotalReturn'].shift(1) daily_return['DtdPNL'] = (daily_return['TotalReturn'] + 1) / (1 + daily_return['YestTotalReturn']) - 1 daily_return.iloc[ 0, daily_return.columns.get_loc('DtdPNL')] = daily_return.iloc[ 0, daily_return.columns.get_loc('TotalReturn')] return daily_return[['DtdPNL']], tot
def getAdvs(self): """ takes the symobl list created and uses the bloomberg api to get the average daily volume """ securities = self.symbolList() """ uses bloomberg api to create a list of average daily volume associated with each security. """ #------- get time zone to determine api field to pull - bloomberg will not get adv from t-1 if pre 9:45 est now = dt.datetime.now().time() threshold = dt.time(9, 45, 0, 0) if now > threshold: advs = LocalTerminal.get_reference_data( securities, ['VOLUME_AVG_30D', 'PX_VOLUME_1D'], ignore_security_error=True).as_frame() else: advs = LocalTerminal.get_reference_data( securities, ['VOLUME_AVG_30D', 'PX_VOLUME'], ignore_security_error=True).as_frame() advs.columns = ['VOLUME_AVG_30D', 'PX_VOLUME_1D'] advs['SYMBOL'] = [i.split(" ", 1)[0] for i in advs.index.tolist()] """ merges the frames from getSybmols above, with the api data. then adds to the total volume using .transform to get the total volume in specific sybmol. Adds the BKCM total volume for each unique symbol in a new colum which is used for filtering in the excptions methos below. """ frame = self.getSymbols() frame = frame.merge(advs, on='SYMBOL', how='left') frame['BKCM_TOTAL_VOL'] = frame.groupby('SYMBOL')['VOLUME'].transform( 'sum') frame['BKCM_%_ADV'] = (frame['BKCM_TOTAL_VOL'] / frame['VOLUME_AVG_30D']) * 100 frame['BKCM_%_OF_VOLUME_YESTERDAY'] = (frame['BKCM_TOTAL_VOL'] / frame['PX_VOLUME_1D']) * 100 return frame
def downloadData(self, tw): self.tw = tw self.d = pd.datetools.BDay(-self.tw).apply(pd.datetime.now()) self.m = pd.datetools.BMonthBegin(2).apply(pd.datetime.now()) self.prices = LocalTerminal.get_historical(self.assets, ['PX_LAST'], start=self.d) self.names = LocalTerminal.get_reference_data(idx, ['SHORT_NAME']) return True
def _get_memb_direct(index_ticker, as_of_date): memb = LocalTerminal.get_reference_data(index_ticker, "INDX_MWEIGHT_HIST", END_DATE_OVERRIDE=as_of_date) memb = memb.as_frame() memb = memb["INDX_MWEIGHT_HIST"].iloc[0] memb.columns = ['Ticker', 'Weight'] memb['Ticker'] = memb['Ticker'] + " Equity" return memb
def get_period_return(ptf, st_date, ed_date, currency=None): """ Calculate the periodic return of the given portfolio from the rebalance date til the end date :param ptf: A DataFrame with a single column DtdPNL ptf (DataFrame): columns contain "Security", "Weight" :param st_date: the rebalance date (single value) the date the portfolio starts :param ed_date: the date of the end calculation :param currency: the currency the performance is based on. If None, then local :return: the periodic returns of the given portfolio index by the EdDate and the calculation details """ st_date = pd.to_datetime(st_date) ed_date = pd.to_datetime(ed_date) if currency is None: df = LocalTerminal.get_reference_data( ptf.loc[:, 'Security'], "CUST_TRR_RETURN_HOLDING_PER", CUST_TRR_START_DT=pd.to_datetime(st_date).strftime("%Y%m%d"), CUST_TRR_END_DT=pd.to_datetime(ed_date).strftime("%Y%m%d")) else: df = LocalTerminal.get_reference_data( ptf.loc[:, 'Security'], "CUST_TRR_RETURN_HOLDING_PER", CUST_TRR_START_DT=pd.to_datetime(st_date).strftime("%Y%m%d"), CUST_TRR_END_DT=pd.to_datetime(ed_date).strftime("%Y%m%d"), CUST_TRR_CRNCY=currency) df = df.as_frame() df['StDate'] = st_date df['EdDate'] = ed_date df['CUST_TRR_RETURN_HOLDING_PER'] = df[ 'CUST_TRR_RETURN_HOLDING_PER'] / 100 # PNL is in the unit of percentage points df.reset_index(inplace=True) df.rename(columns={ 'CUST_TRR_RETURN_HOLDING_PER': "PNL", "index": "Ticker" }, inplace=True) # get the period to date return tot = pd.merge(ptf, df, left_on="Security", right_on="Ticker") tot['TotalReturn'] = tot["Weight"] * tot["PNL"] rtn = tot.groupby("EdDate").agg({"TotalReturn": "sum"}) return rtn[['TotalReturn']], tot
def downloadData(self,assets,tw): self.tw = tw self.idx = assets self.d = pd.datetools.BDay(-self.tw).apply(pd.datetime.now()) self.m = pd.datetools.BMonthBegin(-2).apply(pd.datetime.now()) self.response = LocalTerminal.get_historical(self.idx, ['px_last'], start=self.d) data = self.response symbols = [] for i in assets: symbols.append(LocalTerminal.get_reference_data( i, 'ID_BB_SEC_NUM_DES').as_frame()['ID_BB_SEC_NUM_DES'][0]) return data, symbols
def bdps(symbol, field): """ download current value for securities """ from tia.bbg import LocalTerminal import pandas as pd data = LocalTerminal.get_reference_data(symbol, field) data = data.as_frame() return data
def buildTree(self): jumpRate = 0.25 currentRate = LocalTerminal.get_reference_data('FDTR Index', 'px_last').as_frame()['px_last']['FDTR Index'] probabilities = self.probabilities tree = [{str(currentRate): 1}] for p_hike, p_nohike, p_cut in probabilities.values: new_branch = {} for rates_prev, prob_prev in tree[-1].iteritems(): rate_up = str(float(rates_prev) + jumpRate) rate_down = str(float(rates_prev) - jumpRate) if rate_up not in new_branch: new_branch[rate_up] = 0 if rate_down not in new_branch: new_branch[rate_down] = 0 if rates_prev not in new_branch: new_branch[rates_prev] = 0 new_branch[rate_up] += prob_prev*p_hike new_branch[rates_prev] += prob_prev*p_nohike new_branch[rate_down] += prob_prev*p_cut tree.append(new_branch) return tree
#!/usr/bin/env python import numpy as np from datetime import datetime from tia.bbg import LocalTerminal import QuantLib as ql import market_data.yield_curve as yc ''' Download BBG CDS curve ''' altice = LocalTerminal.get_reference_data('YCCD2204 Index', 'CURVE_TENOR_RATES', ).as_frame() curve = altice.iloc[0].loc['CURVE_TENOR_RATES'] memb = curve['Tenor Ticker'].tolist() memb = memb[1:] tenor = curve['Tenor'].tolist() tenor = tenor[1:] tenor = ([int(z.strip('Y')) for z in tenor]) rates = [] for i in memb: z = LocalTerminal.get_reference_data(i, 'CDS_FLAT_SPREAD', ).as_frame() rates.append((z.loc[i].item()/10000)) cc_raw = dict(zip(tenor,rates)) ''' Build Quantlib Credit Curve
import pandas as pd from tia.bbg import LocalTerminal if __name__ == "__main__": d = pd.datetools.BDay(-4).apply(pd.datetime.now()) m = pd.datetools.BMonthBegin(-2).apply(pd.datetime.now()) def banner(msg): print("*" * 25) print(msg) print("*" * 25) banner( "ReferenceDataRequest: single security, single field, frame response") response = LocalTerminal.get_reference_data("msft us equity", "px_last") print(response.as_map()) print(response.as_frame()) banner( "ReferenceDataRequest: single security, multi-field (with bulk), frame response" ) response = LocalTerminal.get_reference_data("eurusd curncy", ["px_last", "fwd_curve"]) print(response.as_map()) rframe = response.as_frame() print(rframe.columns) # show frame within a frame print(rframe.ix[0, "fwd_curve"].tail()) banner("ReferenceDataRequest: multi security, multi-field, bad field") response = LocalTerminal.get_reference_data(
d = {} #dict of original dataframes per asset class m = {} #dict of ref data b = {} #list of lists for asset diffs p = {} #list of list for $asset diffs f = {} #simplified asset dicts r = {} #daily rate of change dict u = {} #weekly rate of change pq = {} #monthly rate of change ip = {} #quarterly rate of change lp = {} #yearly rate of change idx = pd.IndexSlice for i, v in q.items(): #get ref data and underlying currency m[i] = LocalTerminal.get_reference_data(v, fields_ref).as_frame() #get asset data and calculate $Assets on a daily basis d[i] = LocalTerminal.get_historical( v, fields_hist, start_date, end_date, period='DAILY', non_trading_day_fill_option='ALL_CALENDAR_DAYS', non_trading_day_fill_method='PREVIOUS_VALUE').as_frame() d[i].columns = d[i].columns.droplevel(-1) d[i] = d[i].unstack().to_frame() d[i].columns = d[i].columns.astype(str) d[i].columns = d[i].columns.str.replace('0', 'Assets') d[i]['fx'] = m[i]['FUND_TOTAL_ASSETS_CRNCY'].loc[
d[file, name] = LocalTerminal.get_historical(name, fields, start_date, end_date, period='DAILY').as_frame() d[file, name].columns = d[file, name].columns.droplevel() d[file, name] = d[file, name].fillna(method='ffill') temp[file, name] = ts[file].loc[ts[file].Ticker == name][[ 'Date', 'Amount', 'Expiry', 'Direction', 'Shares' ]] temp[file, name].index = temp[file, name].Date temp[file, name] = temp[file, name].drop('Date', axis=1) m[file, name] = LocalTerminal.get_reference_data(name, ref_data).as_frame() n[file] = LocalTerminal.get_reference_data(name, ref_data).as_frame() #set option qtty equal to $1mm USD worth of bonds so they can be compared in 'return space' opt_curr = n[file]['CRNCY'].item() + " CURNCY" curr_px = LocalTerminal.get_reference_data( opt_curr, 'PX_LAST').as_frame().values.item() multy = 100.00 #n[file]['OPT_MULTIPLIER'].item() Hard coding as 100 multiplier undl = n[file]['OPT_UNDL_PX'].item() bond_size = 1000000.0 #1m USD b_size_adj = bond_size / curr_px opt1_qtty = np.round(((b_size_adj) / (multy * undl))) for l in IDs: quants.append(opt1_qtty)
def downloadData(self): self.response = LocalTerminal.get_reference_data(self.futureName, ['px_last'])
import pandas as pd from tia.bbg import LocalTerminal # Single SID, Multiple Valid Fields resp = LocalTerminal.get_reference_data( ['EURUSD Curncy', 'AUDUSD Curncy'], ['MOV_AVG_30D', 'MOV_AVG_50D', 'MOV_AVG_100D', 'MOV_AVG_200D']) df = resp.as_frame() #mov_avg_30d = df['MOV_AVG_30D'] #mov_avg_100d = df['MOV_AVG_100D'] #print(mov_avg_30d) #print(mov_avg_100d) # Adding a new column to the dataframe based on a calculation from other columns df['30d - 100d'] = df['MOV_AVG_30D'] - df['MOV_AVG_100D'] # testing the use of iloc and loc for indexing a specific cell test = df.loc['EURUSD Curncy', '30d - 100d'] test1 = df.iloc[0, 4] print(test) print(test1) print(df) ## adding a column to the dataframe to say 'B' when the 30dma is higher than the 100dma df['bs'] = 'NA' df['bs'][df['30d - 100d'] > 0] = 'B' df['bs'][df['30d - 100d'] < 0] = 'S' #eurusd = df.loc['EURUSD Curncy']
##essential imports for simple daily data query from tia.bbg import LocalTerminal import pandas as pd ##Import dict to map bberg tenors to months from TenorsDictionary import Tenors_dict # Multiple SID, Invalid Fields # allows for non-homogeneous security types to be batched together #These tickers are hard coded from the "USD ISDA CDS Fixing SWAP CURVE" #To get Tenors need to use bberg field "SECURITY_TENOR_ONE" on money market instruments and #"SECURITY_TENOR_TWO" on swap instruments. So run two queries and append data frame resp = LocalTerminal.get_reference_data(['USLFD1M ISCF Curncy', 'USLFD2M ISCF Curncy', 'USLFD3M ISCF Curncy', 'USLFD6M ISCF Curncy', 'USLFD12M ISCF Curncy'], ['LAST_UPDATE_DT','SECURITY_TENOR_ONE', 'SECURITY_TYP','PX_LAST'], ignore_field_error=1) df=resp.as_frame() #Rename Tenor column for consistency with Swap data fram before appending df.rename(columns={'SECURITY_TENOR_ONE': 'Tenor'}, inplace=True) resp = LocalTerminal.get_reference_data(['USSWAP2 Curncy', 'USSWAP3 Curncy', 'USSWAP4 Curncy', 'USSWAP5 Curncy', 'USSWAP6 Curncy', 'USSWAP7 Curncy', 'USSWAP8 Curncy',
#!/usr/bin/env python import numpy as np from datetime import datetime from tia.bbg import LocalTerminal import QuantLib as ql ''' Build Yield Curve retrieve USD curve; S23 USD Swaps (30/360, S/A) ''' today = datetime.date(datetime.now()) td = datetime.strftime(today, "%d,%m,%Y") todaysDate = ql.Date(td, "%d,%m,%Y") ql.Settings.instance().evaluationDate = todaysDate usd = LocalTerminal.get_reference_data('YCSW0023 Index', 'par_curve', ).as_frame() s23 = usd.iloc[0].loc['par_curve'] ###pull dates dates = s23['Date'].tolist() dates = [datetime.strftime(i, "%d,%m,%Y") for i in dates] ql_dates = [ql.Date(i, "%d,%m,%Y") for i in dates] ql_dates = [todaysDate] + ql_dates ###pull rates rates = s23['Rate'].tolist() on = LocalTerminal.get_reference_data('US00O/N Index', 'PX_LAST').as_frame() on = on.at['US00O/N Index','PX_LAST'] rates = [np.round(on,decimals = 5)] + rates rates = [i*.01 for i in rates] ###build yield curve spotDates = ql_dates
import pandas as pd from tia.bbg import LocalTerminal if __name__ == '__main__': d = pd.datetools.BDay(-4).apply(pd.datetime.now()) m = pd.datetools.BMonthBegin(-2).apply(pd.datetime.now()) def banner(msg): print('*' * 25) print(msg) print('*' * 25) banner('ReferenceDataRequest: single security, single field, frame response') response = LocalTerminal.get_reference_data('msft us equity', 'px_last') print(response.as_map()) print(response.as_frame()) banner('ReferenceDataRequest: single security, multi-field (with bulk), frame response') response = LocalTerminal.get_reference_data('eurusd curncy', ['px_last', 'fwd_curve']) print(response.as_map()) rframe = response.as_frame() print(rframe.columns) # show frame within a frame print(rframe.ix[0, 'fwd_curve'].tail()) banner('ReferenceDataRequest: multi security, multi-field, bad field') response = LocalTerminal.get_reference_data(['eurusd curncy', 'msft us equity'], ['px_last', 'fwd_curve'], ignore_field_error=1) print(response.as_frame()['fwd_curve']['eurusd curncy']) banner('HistoricalDataRequest: multi security, multi-field, daily data')
import pandas as pd from tia.bbg import LocalTerminal if __name__ == '__main__': d = pd.datetools.BDay(-4).apply(pd.datetime.now()) m = pd.datetools.BMonthBegin(-2).apply(pd.datetime.now()) def banner(msg): print ('*' * 25) print (msg) print ('*' * 25) banner('ReferenceDataRequest: single security, single field, frame response') response = LocalTerminal.get_reference_data('msft us equity', 'px_last') print (response.as_map()) print (response.as_frame()) banner('ReferenceDataRequest: single security, multi-field (with bulk), frame response') response = LocalTerminal.get_reference_data('eurusd curncy', ['px_last', 'fwd_curve']) print (response.as_map()) rframe = response.as_frame() print (rframe.columns) # show frame within a frame print (rframe.ix[0, 'fwd_curve'].tail()) banner('ReferenceDataRequest: multi security, multi-field, bad field') response = LocalTerminal.get_reference_data(['eurusd curncy', 'msft us equity'], ['px_last', 'fwd_curve'], ignore_field_error=1) print (response.as_frame()['fwd_curve']['eurusd curncy']) banner('HistoricalDataRequest: multi security, multi-field, daily data')
import pandas as pd from tia.bbg import LocalTerminal if __name__ == "__main__": d = pd.datetools.BDay(-4).apply(pd.datetime.now()) m = pd.datetools.BMonthBegin(-2).apply(pd.datetime.now()) def banner(msg): print "*" * 25 print msg print "*" * 25 banner("ReferenceDataRequest: single security, single field, frame response") response = LocalTerminal.get_reference_data("msft us equity", "px_last") print response.as_map() print response.as_frame() banner("ReferenceDataRequest: single security, multi-field (with bulk), frame response") response = LocalTerminal.get_reference_data("eurusd curncy", ["px_last", "fwd_curve"]) print response.as_map() rframe = response.as_frame() print rframe.columns # show frame within a frame print rframe.ix[0, "fwd_curve"].tail() banner("ReferenceDataRequest: multi security, multi-field, bad field") response = LocalTerminal.get_reference_data( ["eurusd curncy", "msft us equity"], ["px_last", "fwd_curve"], ignore_field_error=1 ) print response.as_frame()["fwd_curve"]["eurusd curncy"]
callability_price = ql.CallabilityPrice(call_price, ql.CallabilityPrice.Clean) callability_schedule.append( ql.Callability(callability_price, ql.Callability.Call, call_date)) call_date = null_calendar.advance(call_date, 3, ql.Months) ''' Build Yield Curve retrieve USD curve; S23 USD Swaps (30/360, S/A) ''' today = datetime.date(datetime.now()) td = datetime.strftime(today, "%d,%m,%Y") todaysDate = ql.Date(td, "%d,%m,%Y") ql.Settings.instance().evaluationDate = todaysDate usd = LocalTerminal.get_reference_data( 'YCSW0023 Index', 'par_curve', ).as_frame() s23 = usd.iloc[0].loc['par_curve'] ###pull dates dates = s23['Date'].tolist() dates = [datetime.strftime(i, "%d,%m,%Y") for i in dates] ql_dates = [ql.Date(i, "%d,%m,%Y") for i in dates] ql_dates = [todaysDate] + ql_dates ###pull rates rates = s23['Rate'].tolist() on = LocalTerminal.get_reference_data('US00O/N Index', 'PX_LAST').as_frame() on = on.at['US00O/N Index', 'PX_LAST'] rates = [np.round(on, decimals=5)] + rates rates = [i * .01 for i in rates] ###build yield curve
def BackDate(self, event): i = 0 try: while i < 1: wait = wx.BusyCursor() #run busy cursor until end date = self.dateCtrl.GetValue() #get the date input from the gui print(date) adv = self.advThreshold.GetValue() #get the adv string value from gui price = self.priceThreshold.GetValue() # get the price threshold from gui backdate = lowPriceSecBackDate(date, price, adv) #craate back date object backdate.formatDates() # get the dates and file dirs print(backdate.FILE_DIR, '\n', backdate.cpty_report,'\n', backdate.cpty_stepout) backDateCptyDf = pd.read_csv(backdate.cpty_report, sep="|") backDateAllocationDf = pd.read_csv(backdate.cpty_stepout, sep="|") bkDateReport = executedOrderReport(backdate.FILE_DIR, backdate.SAVE, 3, 10) # use the low price sec class to get symbols dont run the regulat low price report syms = bkDateReport.getSymbols() syms = syms.SYMBOL.tolist() syms = [i + " US EQUITY" for i in syms] print('sybmols found are: ', syms) print("date is report will run for is: ", backdate.RUN_DATE) print('running advs') advs = LocalTerminal.get_historical(syms, 'PX_VOLUME', backdate.RUN_DATE, backdate.RUN_DATE).as_frame() #uses custom bloomberg api based on TIA_BBG github adv2 = LocalTerminal.get_reference_data(syms, 'VOLUME_AVG_30D', backdate.RUN_DATE, backdate.RUN_DATE).as_frame() advs = advs.transpose().reset_index().set_index('level_0').iloc[:, -1:] advs.columns = ['PX_VOLUME_1D'] adv2 = adv2.join(advs).reset_index() adv2.columns = ['SYMBOL', 'VOLUME_AVG_30D', 'PX_VOLUME_1D'] adv2['SYMBOL'] = [i.split(" ", 1)[0] for i in adv2.SYMBOL.tolist()] exceptionFrame = bkDateReport.getSymbols() exceptionFrame = exceptionFrame.merge(adv2, on='SYMBOL', how='left') exceptionFrame['BKCM_TOTAL_VOL'] = exceptionFrame.groupby('SYMBOL')['VOLUME'].transform('sum') exceptionFrame['BKCM_%_ADV'] = (exceptionFrame['BKCM_TOTAL_VOL'] / exceptionFrame['VOLUME_AVG_30D']) * 100 exceptionFrame['BKCM_%_OF_VOLUME_YESTERDAY'] = (exceptionFrame['BKCM_TOTAL_VOL'] / exceptionFrame['PX_VOLUME_1D']) * 100 exceptionFrame = exceptionFrame[exceptionFrame['BKCM_%_ADV'] > 10] print('running backdate cpty') cpty = BackDateCpty(backDateAllocationDf, backDateCptyDf) cpty.merge() cpty = cpty.alloc exceptionFrame = pd.merge(exceptionFrame, cpty, left_on='PARENT_ORDER_ID', right_on='ORDER_ID', how='left') print("excpetion report found these counter parties :", exceptionFrame.COUNTERPARTY_CODE.tolist()) print('saving') exception = FormatSaveBackDate(exceptionFrame, backdate.date2) i = 2 return exception.save() except Exception as e: print(e) i = 2 return
for name in IDs: d[file, name] = LocalTerminal.get_historical(name, fields, start_date, end_date, period='DAILY').as_frame() d[file, name].columns = d[file, name].columns.droplevel() d[file, name] = d[file, name].fillna(method='ffill') temp[file, name] = ts[file].loc[ts[file].Ticker == name][[ 'Date', 'Amount', 'Expiry', 'Direction', 'Shares' ]] temp[file, name].index = temp[file, name].Date temp[file, name] = temp[file, name].drop('Date', axis=1) m[file, name] = LocalTerminal.get_reference_data(name, ref_data).as_frame() #because some of the price info does not extend to maturity, make new pricing # dataframes that have the full price set, including expiry value = 'd2' for i in IDs: temp2[file, i] = pd.DataFrame(np.nan, columns=['LAST PRICE_NA'], index=pd.date_range(start=d[file, i].index[0], end=temp[file, i]['Expiry'][-1], freq='B')) frames = [temp2[file, i], d[file, i]] d2[file, i] = pd.concat(frames, join='outer', axis=1) d2[file, i] = d2[file, i].drop(['LAST PRICE_NA'], axis=1)
def bbg_load_meta(bbg_tckr, bbg_flds): resp = LocalTerminal.get_reference_data(bbg_tckr, bbg_flds) return resp.as_frame().loc[bbg_tckr]
@author: dsugasa """ import pandas as pd from tia.bbg import LocalTerminal import numpy as np from datetime import datetime import QuantLib as ql import numpy as np #YCSW0022 Index # retrieve GBP curve using API; S22 GBP (vs. 6M Libor) gbp = LocalTerminal.get_reference_data( 'YCSW0022 Index', 'par_curve', ).as_frame() gbp2 = gbp.iloc[0].loc['par_curve'] dates = gbp2['Date'].tolist() disc = gbp2['Discount Factor'].tolist() dates = [datetime.strftime(i, "%d,%m,%Y") for i in dates] ql_dates = [ql.Date(i, "%d,%m,%Y") for i in dates] rates = gbp2['Rate'].tolist() #mgr = dm.BbgDataManager() ## set dates, securities, and fields #start_date = '01/01/2010' #end_date = "{:%m/%d/%Y}".format(datetime.now()) #IDs = ['SX5E Index', 'DAX Index', 'UKX Index', 'FTSEMIB Index', 'IBEX Index',
def get_bbg_futures_chain(bbg_root, yellow_key): tckr = bbg_root.upper() + 'A ' + yellow_key resp = LocalTerminal.get_reference_data(tckr, 'FUT_CHAIN ', {'INCLUDE_EXPIRED_CONTRACTS': 1}) x = resp.as_map() return list(x.values()[0].values()[0]['Security Description'])
price_fields, start_date, end_date, period='DAILY').as_frame() d[name].columns = d[name].columns.droplevel() d[name] = d[name].append( pd.DataFrame(data={ 'LAST PRICE': 100, 'HIGH': 100, 'LOW': 100 }, index=[(d[name].index[0] + timedelta(days=-1)) ])).sort_index() d[name] = d[name].fillna(method='ffill') m[name] = LocalTerminal.get_reference_data(code, ref_data).as_frame() n[name] = d[name]['LAST PRICE'].pct_change().dropna().to_frame() n[name] = n[name].rename(columns={'LAST PRICE': 'p_ret'}) n[name]['c_ret'] = (m[name]['CPN'].item() / 100) / 252 n[name]['cum_cpn'] = n[name]['c_ret'].expanding().sum() n[name]['f_ret'] = apply_fin((m[name]['CRNCY'].item()), 0.50) n[name]['f_ret'] = n[name]['f_ret'].fillna(method='ffill') n[name]['cum_f'] = n[name]['f_ret'].expanding().sum() n[name]['t_ret'] = n[name]['c_ret'] + n[name]['f_ret'] + n[name]['p_ret'] n[name]['cum_ret'] = n[name]['t_ret'].expanding().sum() date_now = "{:%m_%d_%Y}".format(d[name].last_valid_index()) for i in n.keys(): corp_tkr = m[i]['COMPANY_CORP_TICKER'].item() cum_ret = go.Scatter(
#Tia V3API tools - snippets. #https://github.com/bpsmith/tia #http://nbviewer.ipython.org/github/bpsmith/tia/blob/master/examples/v3api.ipynb #Dependencies from tia.bbg import LocalTerminal import pandas as pd #Pandas needs numpy #Example code from Tia snippets. Do not run this as module!! ########################### ##Reference Data Requests## ########################### # Single SID, Multiple Valid Fields resp = LocalTerminal.get_reference_data('MSFT US EQUITY', ['PX_LAST', 'GICS_SECTOR_NAME', 'VOLATILITY_30D']) resp.as_frame() # Get the response as a dict resp.as_map() # Single SID, Invalid Fields # Ability to ignore errors resp = LocalTerminal.get_reference_data('MSFT US EQUITY', ['PX_LAST', 'GICS_SECTOR_NAME', 'BAD FIELD'], ignore_field_error=1) resp.as_frame() # Multiple SID, Invalid Fields # allows for non-homogeneous security types to be batched together resp = LocalTerminal.get_reference_data(['ED1 COMDTY', 'MSFT US EQUITY'], ['PX_LAST', 'GICS_SECTOR_NAME'], ignore_field_error=1) resp.as_frame()