def bloomberg_prices(tickers, start_dt, frequency, names): """ Get Bloomberg price data """ df=pd.DataFrame() rep1 = LocalTerminal.get_historical([tickers],['PX_LAST'], start=start_dt, period=frequency) df=rep1.as_frame() df.columns = names return df
def bdps(symbol, field): """ download current value for securities """ from tia.bbg import LocalTerminal import pandas as pd data = LocalTerminal.get_reference_data(symbol, field) data = data.as_frame() return data
def _simu_memb_weight(ticker_list, as_of_date): mkt_cap = LocalTerminal.get_historical(ticker_list, "CUR_MKT_CAP", as_of_date, as_of_date, EQY_FUND_CRNCY='EUR').as_frame() mkt_cap = mkt_cap.stack(level=0) mkt_cap.index = mkt_cap.index.droplevel(0) mkt_cap['Weight_est'] = mkt_cap['CUR_MKT_CAP'] / mkt_cap[ 'CUR_MKT_CAP'].sum() * 100 return mkt_cap
def get_data_blp_historical(self): '''imports historical data from bbg api, converts to dataframe ''' formattedPair = self.pair #+str(' Curncy')##removed this as what if you want a Comdty or Index? resp = LocalTerminal.get_historical(formattedPair, self.fields, self.startDate, self.endDate, self.period) df = resp.as_frame() #use below to start at second row , to get rid of the extra column index ['XYZ Curncy'] title on top #else df MultiIndex(levels=[['EURUSD Curncy'], ['PX_OPEN', 'PX_LAST', 'PX_LOW', 'PX_HIGH']] df.columns = df.columns.get_level_values(1) return df
def download(ticker): # takes a list df = LocalTerminal.get_historical( ticker, fields, start, end, period="DAILY" ).as_frame() df.columns = df.columns.droplevel() df = df.rename( columns={ "OPEN": "open", "HIGH": "high", "LOW": "low", "LAST PRICE": "close", "VOLUME": "volume", } ).dropna() ticker = ticker.replace("/", ".") df.to_csv(fr"{output_dir}/{ticker}.csv")
def get_data_blp_intraday(self, daylag, minutes): '''imports intraday data from bbg api, converts to dataframe ''' formattedPair = self.pair # +str(' Curncy')##removed this as what if you want a Comdty or Index? event = 'TRADE' #starting point, if BDay(-10), starts 10days ago for instance #dt = pd.datetools.BDay(-dayLag).apply(pd.datetime.now()) delta = pd.datetools.BDay(-daylag).apply(self.endDate) start = pd.datetime.combine(delta, datetime.time(0, 0)) #time(hour, minute) #endDay = datetime.date(2017,12,3) #end = pd.datetime.combine(endDay, datetime.time(23, 30)) end = self.endDate print(end) intraDayDf = LocalTerminal.get_intraday_bar( formattedPair, event, start, end, interval=minutes).as_frame() #f.set_index('time') wrong intraDayDf = intraDayDf.set_index('time') return intraDayDf
def buildTree(self): jumpRate = 0.25 currentRate = LocalTerminal.get_reference_data('FDTR Index', 'px_last').as_frame()['px_last']['FDTR Index'] probabilities = self.probabilities tree = [{str(currentRate): 1}] for p_hike, p_nohike, p_cut in probabilities.values: new_branch = {} for rates_prev, prob_prev in tree[-1].iteritems(): rate_up = str(float(rates_prev) + jumpRate) rate_down = str(float(rates_prev) - jumpRate) if rate_up not in new_branch: new_branch[rate_up] = 0 if rate_down not in new_branch: new_branch[rate_down] = 0 if rates_prev not in new_branch: new_branch[rates_prev] = 0 new_branch[rate_up] += prob_prev*p_hike new_branch[rates_prev] += prob_prev*p_nohike new_branch[rate_down] += prob_prev*p_cut tree.append(new_branch) return tree
##This uses 'tia' package bloomberg api wrapper and utils from https://github.com/bpsmith/tia ##Installed at C:\Python27\Lib\site-packages ##Imports for simple daily data query from tia.bbg import LocalTerminal import pandas as pd #Other imports for all tia utils #import datetime #import matplotlib.pyplot as plt #Simple bloomberg data download to responce object resp = LocalTerminal.get_historical('INTC US EQUITY', ['PX_OPEN', 'PX_LAST'], start='1/1/2014', end='3/1/2014') #View data in terminal #resp.as_map() #Data as data frame resp_frame = resp.as_frame() #To csv resp_frame.to_csv('C:\Program Files\StrataStack\Bberg_Python_out\histTest.csv')#, sep='\t')
def bloomberg(self): securities = self.secList() df = self.frame() rundate = get_rundate(rundate=df) historical_data = LocalTerminal.get_historical( securities, ['PX_HIGH', 'PX_LOW'], start=self.rundate, end=self.rundate, ignore_security_error=1).as_frame() historical_data = historical_data.transpose().reset_index() historical_bval = LocalTerminal.get_historical( securities, ['YLD_YTM_MID', 'YLD_CHG_NET_2D_NO_BP'], start=self.rundate, end=self.rundate, PRICING_SOURCE='BVAL', ignore_security_error=1).as_frame() historical_bval = historical_bval.transpose().reset_index() frames = [historical_data, historical_bval] frames = pd.concat(frames) hd = historical_data hb = historical_bval hdcols = ['bond', 'pcs', 'price'] hd.columns = hdcols high = hd[hd['pcs'] == 'PX_HIGH'] high = high[['bond', 'price']] high.columns = ['bond', "PX_HIGH"] low = hd[hd['pcs'] == 'PX_LOW'] low = low[['bond', 'price']] low.columns = ['bond', "PX_LOW"] hbcols = ['bond', 'pcs', 'yield'] hb.columns = hbcols bid = hb[hb['pcs'] == 'YLD_YTM_MID'] bid = bid[['bond', 'yield']] bid.columns = ['bond', "YLD_YTM_MID"] ask = hb[hb['pcs'] == 'YLD_CHG_NET_2D_NO_BP'] ask = ask[['bond', 'yield']] ask.columns = ['bond', "YLD_CHG_NET_2D_NO_BP"] x = pd.merge(df, high, left_on='Parskeyeable Description', right_on='bond', how='inner') x = pd.merge(x, low, left_on='Parskeyeable Description', right_on='bond', how='inner') x = pd.merge(x, bid, left_on='Parskeyeable Description', right_on='bond', how='inner') x = pd.merge(x, ask, left_on='Parskeyeable Description', right_on='bond', how='inner') x['inside'] = np.where( x['Buy/Sell'] == 'B', np.where(x['Trade price'] > x['PX_LOW'], 'inside', 'outside'), np.where(x['Trade price'] < x['PX_HIGH'], 'inside', 'outside')) x['PX_HIGH_LOW_DIFF_BPS'] = np.where(x['Buy/Sell'] == 'B', x['Trade price'] - x['PX_LOW'], x['PX_HIGH'] - x['Trade price']) x['PX_HIGH_LOW_DIFF_%'] = np.where( x['Buy/Sell'] == 'B', ((x['Trade price'] - x['PX_LOW']) / x['PX_LOW']) * 100, ((x['PX_HIGH'] - x['Trade price']) / x['PX_HIGH']) * 100) return x[[ 'As of Date', 'Ticket Number', 'Security Description', 'Trader Name', 'Buy/Sell', 'TRADE FEED TRADE AMOUNT', 'Trade price', 'TBLT Ticket Type', 'Cusip Number', 'Parskeyeable Description', 'Security Type', 'Trader Login', 'Sales Login', 'Issue Date', 'Maturity Date', 'Principal', 'Counterparty', 'Master Account Long Name', 'Master Account', 'Yield', 'Int at Maturity', 'Days from Settle to Maturity', 'Accrued Number Of Days', 'Coupon', 'Inflation-Linked Indicator', 'Mid Modified Duration', 'tenor', 'Years_until_maturity', 'PX_HIGH', 'PX_LOW', 'YLD_YTM_MID', 'YLD_CHG_NET_2D_NO_BP', 'inside', 'PX_HIGH_LOW_DIFF_BPS', 'PX_HIGH_LOW_DIFF_%' ]]
fx = ['EURUSD Curncy', 'GBPUSD Curncy'] q = { 'open_end': oe, #'booty': oe } fields_hist = ['FUND_TOTAL_ASSETS'] fields_ref = ['FUND_TOTAL_ASSETS_CRNCY'] fields_fx = ['LAST_PRICE'] d_fx = LocalTerminal.get_historical( fx, fields_fx, start_date, end_date, period='DAILY', non_trading_day_fill_option='ALL_CALENDAR_DAYS', non_trading_day_fill_method='PREVIOUS_VALUE').as_frame() d_fx.columns = d_fx.columns.droplevel(-1) d_fx = d_fx.rename(columns={'EURUSD Curncy': 'EUR', 'GBPUSD Curncy': 'GBP'}) d_fx['USD'] = 1.0 d = {} #dict of original dataframes per asset class m = {} #dict of ref data b = {} #list of lists for asset diffs p = {} #list of list for $asset diffs f = {} #simplified asset dicts r = {} #daily rate of change dict u = {} #weekly rate of change pq = {} #monthly rate of change
from tia.bbg import LocalTerminal import matplotlib import matplotlib.pyplot as plt import tia.analysis.ta as ta # Single SID, Multiple Valid Fields resp = LocalTerminal.get_historical(['EURUSD Curncy'], ['PX_LAST'], start='1/1/2020', end='10/27/2020') df = resp.as_frame() #Add the 30 and 50 day rolling averages to the dataframe df['eurusd30dma'] = df['EURUSD Curncy']['PX_LAST'].rolling(window=30).mean() df['eurusd50dma'] = df['EURUSD Curncy']['PX_LAST'].rolling(window=50).mean() # Add the trading signals when the 30 day crosses the 50 day MA signal = ta.cross_signal(df['eurusd30dma'], df['eurusd50dma']).dropna() #only keep the entry/exit signals entry_signal = signal.copy() entry_signal[signal.shift(1) == signal] = 0 entry_signal = entry_signal[entry_signal != 0] #df['entry signal'] = entry_signal #print(entry_signal.head()) #print(df.head()) #print(df['eurusd30dma']['2020-06-01']) #matplotlib.style.use('ggplot') df.plot(kind='line',
d2 = {} #final dict of a prices temp = {} #dict of temp dataframes temp2 = {} #additional dict of temp dataframes ref_data = [ 'OPTION_ROOT_TICKER', 'OPT_MULTIPLIER', 'OPT_UNDL_PX', 'COMPANY_CORP_TICKER', 'CRNCY' ] #get initial prices in 'd', create a temp dataframe with entry/exit dates, # price, and expiry for each ticker for name in IDs: d[file, name] = LocalTerminal.get_historical(name, fields, start_date, end_date, period='DAILY').as_frame() d[file, name].columns = d[file, name].columns.droplevel() d[file, name] = d[file, name].fillna(method='ffill') temp[file, name] = ts[file].loc[ts[file].Ticker == name][[ 'Date', 'Amount', 'Expiry', 'Direction', 'Shares' ]] temp[file, name].index = temp[file, name].Date temp[file, name] = temp[file, name].drop('Date', axis=1) m[file, name] = LocalTerminal.get_reference_data(name, ref_data).as_frame() n[file] = LocalTerminal.get_reference_data(name, ref_data).as_frame()
market = { "BBG Barclays US HY": "LF98TRUU Index", "Crude Oil": "CL1 Comdty", "2s_10s": "USYC2Y10 Index", "USD_Index": "DXY Index", "IHYG": "IHYG LN Equity", "Oil_Equipment_Services": "XES US Equity", "Oil_E": "XOP US Equity", "OIH ETF": "OIH US Equity" } # BBG Barclays US HY cfields = ["LAST PRICE"] df = LocalTerminal.get_historical(list(long_tickers.values()), cfields, start_date, end_date, period="DAILY").as_frame() df.columns = df.columns.droplevel(-1) #%% for i, j in long_tickers.items(): df = df.rename(columns={j: i}) df_price = df.copy().dropna() #df = df.pct_change() ''' selected = ['CNP', 'F', 'WMT', 'GE', 'TSLA', 'SPY', 'QQQ', 'IWM'] select_string = ' '.join(selected) def download_yf(long_tickers): df = yf.download(long_tickers=long_tickers,
##essential imports for simple daily data query from tia.bbg import LocalTerminal import pandas as pd ##Import dict to map bberg tenors to months from TenorsDictionary import Tenors_dict # Multiple SID, Invalid Fields # allows for non-homogeneous security types to be batched together #These tickers are hard coded from the "USD ISDA CDS Fixing SWAP CURVE" #To get Tenors need to use bberg field "SECURITY_TENOR_ONE" on money market instruments and #"SECURITY_TENOR_TWO" on swap instruments. So run two queries and append data frame resp = LocalTerminal.get_reference_data(['USLFD1M ISCF Curncy', 'USLFD2M ISCF Curncy', 'USLFD3M ISCF Curncy', 'USLFD6M ISCF Curncy', 'USLFD12M ISCF Curncy'], ['LAST_UPDATE_DT','SECURITY_TENOR_ONE', 'SECURITY_TYP','PX_LAST'], ignore_field_error=1) df=resp.as_frame() #Rename Tenor column for consistency with Swap data fram before appending df.rename(columns={'SECURITY_TENOR_ONE': 'Tenor'}, inplace=True) resp = LocalTerminal.get_reference_data(['USSWAP2 Curncy', 'USSWAP3 Curncy', 'USSWAP4 Curncy', 'USSWAP5 Curncy', 'USSWAP6 Curncy', 'USSWAP7 Curncy', 'USSWAP8 Curncy',
@author: dsugasa """ import pandas as pd from tia.bbg import LocalTerminal import numpy as np from datetime import datetime import QuantLib as ql import numpy as np #YCSW0022 Index # retrieve GBP curve using API; S22 GBP (vs. 6M Libor) gbp = LocalTerminal.get_reference_data( 'YCSW0022 Index', 'par_curve', ).as_frame() gbp2 = gbp.iloc[0].loc['par_curve'] dates = gbp2['Date'].tolist() disc = gbp2['Discount Factor'].tolist() dates = [datetime.strftime(i, "%d,%m,%Y") for i in dates] ql_dates = [ql.Date(i, "%d,%m,%Y") for i in dates] rates = gbp2['Rate'].tolist() #mgr = dm.BbgDataManager() ## set dates, securities, and fields #start_date = '01/01/2010' #end_date = "{:%m/%d/%Y}".format(datetime.now()) #IDs = ['SX5E Index', 'DAX Index', 'UKX Index', 'FTSEMIB Index', 'IBEX Index',
# Save #dictionary = {'hello':'world'} np.save(fr'{output_dir2}/tickers.npy', ticker_dict) # # Load # read_dictionary = np.load('my_file.npy',allow_pickle='TRUE').item() # print(read_dictionary['hello']) # displays "world" #ticker_dict = np.load(fr'{output_dir2}/tickers.npy', allow_pickle=True).item() hist_tickers = list(set().union(*ticker_dict.values())) hist_tickers_bbg = [x + " Equity" for x in hist_tickers] fields = ["OPEN", "HIGH", "LOW", "LAST PRICE", "VOLUME"] start = "2000-01-01" end = "{:%m/%d/%Y}".format(datetime.now()) df = LocalTerminal.get_historical(hist_tickers_bbg, fields, start, end, period = 'DAILY').as_frame() df = df.reset_index() #df.to_csv(fr'{output_dir2}/historical_data.csv') df.to_pickle(fr'{output_dir2}/historical_data.pkl') #df = pd.read_csv(fr'{output_dir2}/historical_data.csv') df = pd.read_pickle(fr'{output_dir2}/historical_data.pkl') #%% data = {} date_list = list(ticker_dict.keys()) #fields = ["OPEN", "HIGH", "LOW", "LAST PRICE", "VOLUME"] #df.set_index('index', inplace=True)
dd2here = prices - max2here return np.round(dd2here.min(), 3) # set dates, securities, and fields start_date = '01/01/2005' end_date = "{:%m/%d/%Y}".format(datetime.now()) IDs = ['CVALIM 8.25 CORP', 'CVAL IM EQUITY'] price_fields = ['LAST PRICE', 'HIGH', 'LOW'] ref_data = ['ID_ISIN', 'CPN', 'CPN_FREQ', 'CRNCY', 'SECURITY_NAME', 'NXT_CALL_DT', 'ISSUE_DT', 'COMPANY_CORP_TICKER'] df = LocalTerminal.get_historical(IDs, 'LAST PRICE', start_date, end_date, period = 'DAILY').as_frame() df.columns = df.columns.droplevel(-1) df = df.fillna(method = 'ffill') df = df.dropna() #for q in IDs: # name = list(q.values())[1] # code = list(q.values())[0] # # d[name] = LocalTerminal.get_historical(code, price_fields, start_date, end_date, period = 'DAILY').as_frame() # d[name].columns = d[name].columns.droplevel() # d[name] = d[name].append(pd.DataFrame(data = {'LAST PRICE':100, 'HIGH':100, 'LOW':100}, index=[(d[name].index[0] + timedelta(days = -1))])).sort_index() # d[name] = d[name].fillna(method = 'ffill') # # m[name] = LocalTerminal.get_reference_data(code, ref_data).as_frame()
import pandas as pd from tia.bbg import LocalTerminal if __name__ == '__main__': d = pd.datetools.BDay(-4).apply(pd.datetime.now()) m = pd.datetools.BMonthBegin(-2).apply(pd.datetime.now()) def banner(msg): print('*' * 25) print(msg) print('*' * 25) banner('ReferenceDataRequest: single security, single field, frame response') response = LocalTerminal.get_reference_data('msft us equity', 'px_last') print(response.as_map()) print(response.as_frame()) banner('ReferenceDataRequest: single security, multi-field (with bulk), frame response') response = LocalTerminal.get_reference_data('eurusd curncy', ['px_last', 'fwd_curve']) print(response.as_map()) rframe = response.as_frame() print(rframe.columns) # show frame within a frame print(rframe.ix[0, 'fwd_curve'].tail()) banner('ReferenceDataRequest: multi security, multi-field, bad field') response = LocalTerminal.get_reference_data(['eurusd curncy', 'msft us equity'], ['px_last', 'fwd_curve'], ignore_field_error=1) print(response.as_frame()['fwd_curve']['eurusd curncy']) banner('HistoricalDataRequest: multi security, multi-field, daily data')
def downloadData(self, tw): self.tw = tw self.d = pd.datetools.BDay(-self.tw).apply(pd.datetime.now()) self.m = pd.datetools.BMonthBegin(-2).apply(pd.datetime.now()) self.response = LocalTerminal.get_historical(self.idx, ['px_last'], start=self.d)
#!/usr/bin/env python import numpy as np from datetime import datetime from tia.bbg import LocalTerminal import QuantLib as ql import market_data.yield_curve as yc ''' Download BBG CDS curve ''' altice = LocalTerminal.get_reference_data('YCCD2204 Index', 'CURVE_TENOR_RATES', ).as_frame() curve = altice.iloc[0].loc['CURVE_TENOR_RATES'] memb = curve['Tenor Ticker'].tolist() memb = memb[1:] tenor = curve['Tenor'].tolist() tenor = tenor[1:] tenor = ([int(z.strip('Y')) for z in tenor]) rates = [] for i in memb: z = LocalTerminal.get_reference_data(i, 'CDS_FLAT_SPREAD', ).as_frame() rates.append((z.loc[i].item()/10000)) cc_raw = dict(zip(tenor,rates)) ''' Build Quantlib Credit Curve
import pandas as pd from tia.bbg import LocalTerminal if __name__ == "__main__": d = pd.datetools.BDay(-4).apply(pd.datetime.now()) m = pd.datetools.BMonthBegin(-2).apply(pd.datetime.now()) def banner(msg): print "*" * 25 print msg print "*" * 25 banner("ReferenceDataRequest: single security, single field, frame response") response = LocalTerminal.get_reference_data("msft us equity", "px_last") print response.as_map() print response.as_frame() banner("ReferenceDataRequest: single security, multi-field (with bulk), frame response") response = LocalTerminal.get_reference_data("eurusd curncy", ["px_last", "fwd_curve"]) print response.as_map() rframe = response.as_frame() print rframe.columns # show frame within a frame print rframe.ix[0, "fwd_curve"].tail() banner("ReferenceDataRequest: multi security, multi-field, bad field") response = LocalTerminal.get_reference_data( ["eurusd curncy", "msft us equity"], ["px_last", "fwd_curve"], ignore_field_error=1 ) print response.as_frame()["fwd_curve"]["eurusd curncy"]
def bloomberg(self): securities = self.secList() df = self.frame() rundate = get_rundate(rundate=df) historical_data = LocalTerminal.get_historical(securities, ['PX_HIGH', 'PX_LOW'], start=rundate, end=rundate).as_frame() historical_bval = LocalTerminal.get_historical( securities, ['PX_ASK', 'PX_BID'], start=rundate, end=rundate, PRICING_SOURCE='BVAL').as_frame() historical_bval = historical_bval.transpose().reset_index() historical_data = historical_data.transpose().reset_index() frames = [historical_bval, historical_data] frames = pd.concat(frames) hd = historical_data hb = historical_bval hdcols = ['bond', 'pcs', 'price'] hd.columns = hdcols high = hd[hd['pcs'] == 'PX_HIGH'] high = high[['bond', 'price']] high.columns = ['bond', "PX_HIGH"] low = hd[hd['pcs'] == 'PX_LOW'] low = low[['bond', 'price']] low.columns = ['bond', "PX_LOW"] hbcols = ['bond', 'pcs', 'price'] hb.columns = hbcols bid = hb[hb['pcs'] == 'PX_BID'] bid = bid[['bond', 'price']] bid.columns = ['bond', "PX_BID"] ask = hb[hb['pcs'] == 'PX_ASK'] ask = ask[['bond', 'price']] ask.columns = ['bond', "PX_ASK"] x = pd.merge(df, high, left_on='Parskeyeable Description', right_on='bond', how='inner') x = pd.merge(x, low, left_on='Parskeyeable Description', right_on='bond', how='inner') x = pd.merge(x, bid, left_on='Parskeyeable Description', right_on='bond', how='inner') x = pd.merge(x, ask, left_on='Parskeyeable Description', right_on='bond', how='inner') x = x[[ 'As of Date', 'Ticket Number', 'Security Description', 'Trader Name', 'Buy/Sell', 'TRADE FEED TRADE AMOUNT', 'Trade price', 'TBLT Ticket Type', 'Cusip Number', 'Benchmark Cusip or Bloomberg', 'Parskeyeable Description', 'Security Type', 'Trader Login', 'Sales Login', 'Par Amount', 'Issue Date', 'Principal', 'Market Sector Description', 'Identifier', 'Counterparty', 'Master Account Long Name', 'Master Account', 'Benchmark', 'Z-Spread', 'Benchmark Price', 'Factor', 'PX_ASK', 'PX_BID', 'PX_HIGH', 'PX_LOW' ]] bestEx = x bestEx['inside'] = np.where( bestEx['Buy/Sell'] == 'B', np.where(bestEx['Trade price'] > bestEx['PX_LOW'], 'inside', 'outside'), np.where(bestEx['Trade price'] < bestEx['PX_HIGH'], 'inside', 'outside')) bestEx['insideBidAsk'] = np.where( bestEx['PX_HIGH'].astype(str) == 'nan', #if this is true look for Buy sell code np.where( bestEx['Buy/Sell'] == 'B', np.where(bestEx['Trade price'] > bestEx['PX_BID'], 'inside', 'outside'), np.where(bestEx['Trade price'] < bestEx['PX_ASK'], 'inside', 'outside')), bestEx['inside']) bestEx['PX_HIGH_LOW_DIFF_%'] = np.where( bestEx['Buy/Sell'] == 'B', ((bestEx['Trade price'] - bestEx['PX_LOW']) / bestEx['PX_LOW']) * 100, ((bestEx['Trade price'] - bestEx['PX_HIGH']) / bestEx['PX_HIGH']) * 100) bestEx['PX_BID_ASK_DIFF_%'] = np.where( bestEx['Buy/Sell'] == 'B', ((bestEx['Trade price'] - bestEx['PX_BID']) / bestEx['PX_BID']) * 100, ((bestEx['Trade price'] - bestEx['PX_ASK']) / bestEx['PX_ASK']) * 100) return bestEx return historical_data
callability_price = ql.CallabilityPrice(call_price, ql.CallabilityPrice.Clean) callability_schedule.append( ql.Callability(callability_price, ql.Callability.Call, call_date)) call_date = null_calendar.advance(call_date, 3, ql.Months) ''' Build Yield Curve retrieve USD curve; S23 USD Swaps (30/360, S/A) ''' today = datetime.date(datetime.now()) td = datetime.strftime(today, "%d,%m,%Y") todaysDate = ql.Date(td, "%d,%m,%Y") ql.Settings.instance().evaluationDate = todaysDate usd = LocalTerminal.get_reference_data( 'YCSW0023 Index', 'par_curve', ).as_frame() s23 = usd.iloc[0].loc['par_curve'] ###pull dates dates = s23['Date'].tolist() dates = [datetime.strftime(i, "%d,%m,%Y") for i in dates] ql_dates = [ql.Date(i, "%d,%m,%Y") for i in dates] ql_dates = [todaysDate] + ql_dates ###pull rates rates = s23['Rate'].tolist() on = LocalTerminal.get_reference_data('US00O/N Index', 'PX_LAST').as_frame() on = on.at['US00O/N Index', 'PX_LAST'] rates = [np.round(on, decimals=5)] + rates rates = [i * .01 for i in rates] ###build yield curve
import pandas as pd from tia.bbg import LocalTerminal if __name__ == '__main__': d = pd.datetools.BDay(-4).apply(pd.datetime.now()) m = pd.datetools.BMonthBegin(-2).apply(pd.datetime.now()) def banner(msg): print ('*' * 25) print (msg) print ('*' * 25) banner('ReferenceDataRequest: single security, single field, frame response') response = LocalTerminal.get_reference_data('msft us equity', 'px_last') print (response.as_map()) print (response.as_frame()) banner('ReferenceDataRequest: single security, multi-field (with bulk), frame response') response = LocalTerminal.get_reference_data('eurusd curncy', ['px_last', 'fwd_curve']) print (response.as_map()) rframe = response.as_frame() print (rframe.columns) # show frame within a frame print (rframe.ix[0, 'fwd_curve'].tail()) banner('ReferenceDataRequest: multi security, multi-field, bad field') response = LocalTerminal.get_reference_data(['eurusd curncy', 'msft us equity'], ['px_last', 'fwd_curve'], ignore_field_error=1) print (response.as_frame()['fwd_curve']['eurusd curncy']) banner('HistoricalDataRequest: multi security, multi-field, daily data')
import plotly.figure_factory as ff from fredapi import Fred import credentials fred = credentials.fred #set the script start time start_time = datetime.now() date_now = "{:%m_%d_%Y}".format(datetime.now()) start_date = '01/01/1950' end_date = "{:%m/%d/%Y}".format(datetime.now()) IDs = ['GDP CYOY Index', 'CPI YOY Index'] #'SPXT Index'] fields = ['LAST PRICE'] df = LocalTerminal.get_historical(IDs, fields, start_date, end_date).as_frame() #period = 'QUARTERLY', #non_trading_day_fill_option = 'ALL_CALENDAR_DAYS', #non_trading_day_fill_method = 'PREVIOUS_VALUE').as_frame() df.columns = df.columns.droplevel(-1) df = df.resample('Q').mean() df = df.dropna() df['gdp_ror'] = df['GDP CYOY Index'].pct_change() df['cpi_ror'] = df['CPI YOY Index'].pct_change() df['gdp_dir'] = df.apply(lambda x: 1 if x['gdp_ror'] > 0 else(-1 if \ x['gdp_ror'] < 0 else 0), axis = 1) df['gdp_dir'] = df['gdp_dir'].replace(to_replace=0, method='ffill') df['cpi_dir'] = df.apply(lambda x: 1 if x['cpi_ror'] > 0 else(-1 if \
def bbg_load_meta(bbg_tckr, bbg_flds): resp = LocalTerminal.get_reference_data(bbg_tckr, bbg_flds) return resp.as_frame().loc[bbg_tckr]
# set dates, securities, and fields start_date = '01/01/2012' end_date = "{:%m/%d/%Y}".format(datetime.now()) IDs = opt_tickers fields = ['LAST PRICE'] d = {} #dict of original dataframes per ID d2 = {} #final dict of a prices temp = {} #dict of temp dataframes temp2 = {} #additional dict of temp dataframes #get initial prices in 'd', create a temp dataframe with entry/exit dates, # price, and expiry for each ticker for name in IDs: d[name] = LocalTerminal.get_historical(name, fields, start_date, end_date, period = 'DAILY').as_frame() d[name].columns = d[name].columns.droplevel() d[name] = d[name].fillna(method = 'ffill') temp[name] = trade_sheet.loc[trade_sheet.Ticker == name][['Date', 'Amount', 'Expiry', 'Direction','Shares']] temp[name].index = temp[name].Date temp[name] = temp[name].drop('Date', axis=1) #because some of the price info does not extend to maturity, make new pricing # dataframes that have the full price set, including expiry value = 'd2' for i in opt_tickers: temp2[i] = pd.DataFrame(np.nan, columns = ['LAST PRICE_NA'], index = pd.date_range(start = d[i].index[0], end = temp[i]['Expiry'][-1], freq = 'B'))
def get_bbg_futures_chain(bbg_root, yellow_key): tckr = bbg_root.upper() + 'A ' + yellow_key resp = LocalTerminal.get_reference_data(tckr, 'FUT_CHAIN ', {'INCLUDE_EXPIRED_CONTRACTS': 1}) x = resp.as_map() return list(x.values()[0].values()[0]['Security Description'])
#!/usr/bin/env python import numpy as np from datetime import datetime from tia.bbg import LocalTerminal import QuantLib as ql ''' Build Yield Curve retrieve USD curve; S23 USD Swaps (30/360, S/A) ''' today = datetime.date(datetime.now()) td = datetime.strftime(today, "%d,%m,%Y") todaysDate = ql.Date(td, "%d,%m,%Y") ql.Settings.instance().evaluationDate = todaysDate usd = LocalTerminal.get_reference_data('YCSW0023 Index', 'par_curve', ).as_frame() s23 = usd.iloc[0].loc['par_curve'] ###pull dates dates = s23['Date'].tolist() dates = [datetime.strftime(i, "%d,%m,%Y") for i in dates] ql_dates = [ql.Date(i, "%d,%m,%Y") for i in dates] ql_dates = [todaysDate] + ql_dates ###pull rates rates = s23['Rate'].tolist() on = LocalTerminal.get_reference_data('US00O/N Index', 'PX_LAST').as_frame() on = on.at['US00O/N Index','PX_LAST'] rates = [np.round(on,decimals = 5)] + rates rates = [i*.01 for i in rates] ###build yield curve spotDates = ql_dates
import pandas as pd from tia.bbg import LocalTerminal # Single SID, Multiple Valid Fields resp = LocalTerminal.get_reference_data( ['EURUSD Curncy', 'AUDUSD Curncy'], ['MOV_AVG_30D', 'MOV_AVG_50D', 'MOV_AVG_100D', 'MOV_AVG_200D']) df = resp.as_frame() #mov_avg_30d = df['MOV_AVG_30D'] #mov_avg_100d = df['MOV_AVG_100D'] #print(mov_avg_30d) #print(mov_avg_100d) # Adding a new column to the dataframe based on a calculation from other columns df['30d - 100d'] = df['MOV_AVG_30D'] - df['MOV_AVG_100D'] # testing the use of iloc and loc for indexing a specific cell test = df.loc['EURUSD Curncy', '30d - 100d'] test1 = df.iloc[0, 4] print(test) print(test1) print(df) ## adding a column to the dataframe to say 'B' when the 30dma is higher than the 100dma df['bs'] = 'NA' df['bs'][df['30d - 100d'] > 0] = 'B' df['bs'][df['30d - 100d'] < 0] = 'S' #eurusd = df.loc['EURUSD Curncy']
# extra_ticks = ets['Ticker'].values # tl2.append('EK3988418 Corp') q = set(tl2) # set dates, securities, and fields start_date = "01/04/2017" end_date = "{:%m/%d/%Y}".format(datetime.now()) cfields = ["LAST PRICE"] window = 90 df = LocalTerminal.get_historical(tl2, cfields, start_date, end_date, period="DAILY").as_frame() df.columns = df.columns.droplevel(-1) #df = df.pct_change() #df = df.std(axis=1) #df = df.rolling(window=window).mean() #df = df.dropna() month = df.last_valid_index().month month_full = df.last_valid_index().strftime("%B") day = df.last_valid_index().day year = df.last_valid_index().year output_dir = Path( fr"D:\OneDrive - Northlight Group\Images\Dispersion\{year}\{month_full}")
def downloadData(self): self.response = LocalTerminal.get_reference_data(self.futureName, ['px_last'])
fin_IDs = ['EUR001M Index', 'US0001M Index'] price_fields = ['LAST PRICE', 'HIGH', 'LOW'] ref_data = [ 'ID_ISIN', 'CPN', 'CPN_FREQ', 'CRNCY', 'SECURITY_NAME', 'NXT_CALL_DT', 'ISSUE_DT', 'COMPANY_CORP_TICKER' ] d = {} #dict of original dataframes per ID m = {} #reference data n = {} #pnl data z = {} #financing data for i in fin_IDs: z[i] = LocalTerminal.get_historical(i, 'LAST PRICE', start_date, end_date, period='DAILY').as_frame() z[i].columns = z[i].columns.droplevel(-1) z[i] = z[i].fillna(method='ffill') for q in IDs: name = list(q.values())[1] code = list(q.values())[0] d[name] = LocalTerminal.get_historical(code, price_fields, start_date, end_date, period='DAILY').as_frame() d[name].columns = d[name].columns.droplevel()
#Tia V3API tools - snippets. #https://github.com/bpsmith/tia #http://nbviewer.ipython.org/github/bpsmith/tia/blob/master/examples/v3api.ipynb #Dependencies from tia.bbg import LocalTerminal import pandas as pd #Pandas needs numpy #Example code from Tia snippets. Do not run this as module!! ########################### ##Reference Data Requests## ########################### # Single SID, Multiple Valid Fields resp = LocalTerminal.get_reference_data('MSFT US EQUITY', ['PX_LAST', 'GICS_SECTOR_NAME', 'VOLATILITY_30D']) resp.as_frame() # Get the response as a dict resp.as_map() # Single SID, Invalid Fields # Ability to ignore errors resp = LocalTerminal.get_reference_data('MSFT US EQUITY', ['PX_LAST', 'GICS_SECTOR_NAME', 'BAD FIELD'], ignore_field_error=1) resp.as_frame() # Multiple SID, Invalid Fields # allows for non-homogeneous security types to be batched together resp = LocalTerminal.get_reference_data(['ED1 COMDTY', 'MSFT US EQUITY'], ['PX_LAST', 'GICS_SECTOR_NAME'], ignore_field_error=1) resp.as_frame()