def postcleanup(self):
     """Function to cleanse data after split trades have been remerged. <--Still in use?
     """
     self.df['USDQty']=self.df['Qty']
     nusd=self.df[self.df['CCY']!='USD'][['USDQty','Qty','Year','CCY']].copy()
     #for y in range(2009,2015,1):
     #    for c in ccy.index:
     #        nusd['USDQty'][(nusd['Year']==y) & (nusd['CCY']==c)]=nusd['Qty']/ccy.get_value(c,str(y))#bitwise & - careful! otherwise pandas.np.all(c1,c2,axis=0) works                      
     for y in range(2009,2017,1):
         for c in ccy.index:
             nusd.loc[(nusd['Year']==y) & (nusd['CCY']==c),'USDQty']=nusd['Qty']/ccy.loc[c,str(y)]#bitwise & - careful! otherwise pandas.np.all(c1,c2,axis=0) works                      
     self.df.loc[self.df['CCY']!='USD','USDQty']=nusd['USDQty']
     self.df['MKu'].fillna(0,inplace=True)
     self.df['AbsQty']=self.df['USDQty'].abs()
     self.df['SC']=self.df['SCu']*self.df['AbsQty']/10000.
     self.df['MK']=self.df['MKu']*self.df['AbsQty']/10000.
     self.df['TotalSC']=self.df['SC']+self.df['MK']
     #del self.df['DateSTR']
     #self.df.sort(columns='DateDT',inplace=True)
     self.df.sort_values(by='DateDT',inplace=True)
     self.df.reset_index(inplace=True)
     self.df=self.df.join(bonds['TICKER'], on='Bond')
     self.df=self.df.join(bonds['CNTRY_OF_RISK'], on='Bond')
     self.df=self.df.join(bonds['INDUSTRY_GROUP'], on='Bond')
     #self.df['INDUSTRY_GROUP'][self.df['INDUSTRY_GROUP']!='Sovereign']='Corporate'
     self.df.loc[self.df['INDUSTRY_GROUP']!='Sovereign','INDUSTRY_GROUP']='Corporate'
     self.df.rename(columns={'TICKER':'Issuer','CNTRY_OF_RISK':'Country','INDUSTRY_GROUP':'Industry'},inplace=True)
     self.df=self.df.join(countries.set_index('Country code',verify_integrity=True)['Region'],on='Country')
     self.df['Country'].fillna('na',inplace=True)
     self.df['Region'].fillna('na',inplace=True)
     self.df=self.df[self.df['Book'].apply(filterLondonBooks)]
     pass
 def __init__(self, rebuild=False, forceLastDay=False):
     self.savepath = MAPATH+'ma_full.csvz'
     if rebuild or (not os.path.exists(self.savepath)):
         self.load_files_full()
     elif datetime.datetime.fromtimestamp(os.path.getmtime(self.savepath)).date()<datetime.datetime.today().date() or forceLastDay:
         self.load_files()
     else:
         self.df = pandas.read_csv(self.savepath, parse_dates=['Inquiry Timestamp'], index_col=0, compression='bz2', dtype=IMPORT_DIC, low_memory=False)#, usecols=USECOLS)
     self.df = self.df[['Status','Client','CLT Trader','Bid/Offer','ISIN','Inquiry Timestamp','Currency','Local Inquiry Volume','Product']]
     self.df = self.df[self.df['Product']=='Emerging Markets'].copy()
     del self.df['Product']
     self.df.rename(columns = {'Local Inquiry Volume':'AbsQty', 'Currency':'CCY','Inquiry Timestamp':'Date'}, inplace = True)
     self.df = self.df[self.df['CCY'].isin(['USD','EUR','CHF','GBP'])]
     self.df = self.df.join(allisins, on = 'ISIN')
     ma_counterparties = counterparties[counterparties['MAName'].notnull()]
     ma_counterparties.set_index('MAName', inplace=True)
     self.df = self.df.join(ma_counterparties['Counterparty'],on='Client')
     self.df = self.df.join(ccy['2017'],on='CCY')
     #self.df['AbsUSDQty'] = self.df.apply(lambda row:row['AbsQty']/ccy.loc[row['CCY'],'2016'],axis=1) ##TOO SLOW
     self.df['AbsUSDQty'] = self.df['AbsQty'] / self.df['2017']
     del self.df['2017']
     self.df['USDQty'] = self.df['AbsUSDQty']
     self.df.loc[self.df['Bid/Offer']=='Offer','USDQty'] = -self.df.loc[self.df['Bid/Offer']=='Offer','USDQty']
     self.df = self.df.join(bonds[['TICKER','CNTRY_OF_RISK']],on='Bond')
     self.df.rename(columns={'TICKER':'Issuer','CNTRY_OF_RISK':'Country'},inplace=True)
     self.df = self.df.join(countries.set_index('Country code',verify_integrity=True)['Region'],on='Country')
     self.df['DateDT'] = self.df['Date']
     #self.df['DateDT'] = pandas.to_datetime(self.df['Date'],format='%d/%m/%Y %H:%M:%S')
     self.df['Date'] = self.df['DateDT'].apply(lambda x:x.date())
     # we filter for error trades
     self.df = self.df[self.df['AbsUSDQty']<100000] # filtering for likely error trades
     #del self.df['Client'] - WE NEED THIS SO WE CAN LOOK FOR NAN
     self.df['Venue'] = 'MA'
    def build_positions(self):
        """Builds trade positions. Function is called when building trade history data in __init__
        """
        #Hard-coded 2016 FX rates
        positions = self.df.groupby(self.df['Bond'])['Qty'].sum()
        positions = pandas.DataFrame(positions)
        #positions=positions[positions['Qty']!=0]#this seems to mess up the risktree build on position refresh
        positions['Issuer'] = bonds['TICKER']
        positions['Country'] = bonds['CNTRY_OF_RISK']
        positions['CCY'] = bonds['CRNCY']
        positions['Maturity'] = bonds['MATURITY']
        positions['MaturityDT'] = positions['Maturity'].apply(getMaturityDate)
        positions = positions[positions['MaturityDT']>=datetime.datetime.today()]
        positions = positions.join(countries.set_index('Country code',verify_integrity=True)['Region'],on='Country')
        #positions['USDQty']=positions['Qty']
        positions['Country'].fillna('na',inplace=True)
        positions['Region'].fillna('na',inplace=True)
        try:
            #This can fail if you add a new bond and then forget to update the bonduniverse.
            positions['USDQty'] = positions.apply(lambda row:row['Qty']/ccy.loc[row['CCY'],'2016'],axis=1)
        except:
            positions['USDQty'] = 0

        # nusd=positions[positions['CCY']!='USD'][['USDQty','Qty','CCY']].copy()
        # #for c in ccy.index:
        # #    nusd['USDQty'][nusd['CCY']==c]=nusd['Qty']/ccy.get_value(c,'2015')
        # for c in ccy.index:
        #     nusd.loc[nusd['CCY']==c,'USDQty']=nusd['Qty']/ccy.loc[c,'2016']
        # positions.loc[positions['CCY']!='USD','USDQty']=nusd['USDQty']
        positions['Bond'] = positions.index#needed
        self.positions = positions
        pass
 def build_positionsISINBook(self):
     """only launched once to get the start of day risk, which will be used for PnL
     """
     positions = self.df.groupby(['ISIN','Book'],as_index=False)['Qty'].sum()
     positions = positions[(positions['Qty']>1) | (positions['Qty']<-1)]#useful here, filter zero positions and errors on amortized bonds
     positions = positions.join(allisins,on='ISIN')
     positions = positions.join(bonds['TICKER'],on='Bond')
     positions = positions.join(bonds['CNTRY_OF_RISK'],on='Bond')
     positions = positions.join(bonds['CRNCY'],on='Bond')
     positions = positions.join(bonds['MATURITY'],on='Bond')
     positions['MaturityDT']=positions['MATURITY'].apply(getMaturityDate)
     positions=positions[positions['MaturityDT']>=datetime.datetime.today()]
     positions.rename(columns={'Qty':'SOD_Pos','CNTRY_OF_RISK':'Country','TICKER':'Issuer','CRNCY':'CCY'},inplace=True)
     positions = positions.join(countries.set_index('Country code',verify_integrity=True)['Region'],on='Country')
     positions['Country'].fillna('na',inplace=True)
     positions['Region'].fillna('na',inplace=True)
     #positions['ISIN']=positions.index#needed
     positions=positions[['Bond','Book','CCY','ISIN','Issuer','Country', 'Region', 'SOD_Pos']]
     positions=positions[pandas.notnull(positions['Bond'])]#filter for bonds that are not understood
     positions['Key'] = positions['Book']+'-'+positions['ISIN']
     positions['Series'] = ''
     for (i,row) in positions.iterrows():
         if row['ISIN']==bonds.loc[row['Bond'],'REGS']:
             positions.loc[i,'Series'] = 'REGS'
         else:
             positions.loc[i,'Series'] = '144A'
     self.positionsByISINBook = positions
     pass
    def build_positions_new(self):
        positions = self.df.groupby(['ISIN','Book'],as_index=False)['Qty'].sum()
        positions = positions[(positions['Qty']>1) | (positions['Qty']<-1)]#useful here, filter zero positions and errors on amortized bonds
        positions = positions.join(allisins,on='ISIN')
        positions['Series'] = ''
        positions = positions.loc[positions['Bond'].notnull()].copy()
        for (i,row) in positions.iterrows():
            if row['ISIN'] == bonds.loc[row['Bond'],'REGS']:
                positions.loc[i,'Series'] = 'REGS'
            else:
                positions.loc[i,'Series'] = '144A'
        grp = positions.groupby(['Bond','Series'])
        positions = grp['Qty'].sum().unstack().fillna(0)
        positions['Issuer'] = bonds['TICKER']
        positions['Country'] = bonds['CNTRY_OF_RISK']
        positions['CCY'] = bonds['CRNCY']
        positions['Maturity'] = bonds['MATURITY']
        positions['MaturityDT'] = positions['Maturity'].apply(getMaturityDate)
        positions = positions[positions['MaturityDT']>=datetime.datetime.today()]
        positions = positions.join(countries.set_index('Country code',verify_integrity=True)['Region'],on='Country')
        positions['Country'].fillna('na',inplace=True)
        positions['Region'].fillna('na',inplace=True)
        try:
            #This can fail if you add a new bond and then forget to update the bonduniverse.
            positions['USDQty'] = positions.apply(lambda row:row['Qty']/ccy.loc[row['CCY'],'2017'],axis=1)
        except:
            positions['USDQty'] = 0

        positions['Bond'] = positions.index#needed
        self.positions_new = positions
        pass
 def postcleanup(self):
     """Function to cleanse data after split trades have been remerged. <--Still in use?
     """
     self.df['USDQty']=self.df['Qty']
     nusd=self.df[self.df['CCY']!='USD'][['USDQty','Qty','Year','CCY']].copy()
     #for y in range(2009,2015,1):
     #    for c in ccy.index:
     #        nusd['USDQty'][(nusd['Year']==y) & (nusd['CCY']==c)]=nusd['Qty']/ccy.get_value(c,str(y))#bitwise & - careful! otherwise pandas.np.all(c1,c2,axis=0) works                      
     for y in range(2009,2017,1):
         for c in ccy.index:
             nusd.loc[(nusd['Year']==y) & (nusd['CCY']==c),'USDQty']=nusd['Qty']/ccy.loc[c,str(y)]#bitwise & - careful! otherwise pandas.np.all(c1,c2,axis=0) works                      
     self.df.loc[self.df['CCY']!='USD','USDQty']=nusd['USDQty']
     self.df['MKu'].fillna(0,inplace=True)
     self.df['AbsQty']=self.df['USDQty'].abs()
     self.df['SC']=self.df['SCu']*self.df['AbsQty']/10000.
     self.df['MK']=self.df['MKu']*self.df['AbsQty']/10000.
     self.df['TotalSC']=self.df['SC']+self.df['MK']
     #del self.df['DateSTR']
     #self.df.sort(columns='DateDT',inplace=True)
     self.df.sort_values(by='DateDT',inplace=True)
     self.df.reset_index(inplace=True)
     self.df=self.df.join(bonds['TICKER'], on='Bond')
     self.df=self.df.join(bonds['CNTRY_OF_RISK'], on='Bond')
     self.df=self.df.join(bonds['INDUSTRY_GROUP'], on='Bond')
     #self.df['INDUSTRY_GROUP'][self.df['INDUSTRY_GROUP']!='Sovereign']='Corporate'
     self.df.loc[self.df['INDUSTRY_GROUP']!='Sovereign','INDUSTRY_GROUP']='Corporate'
     self.df.rename(columns={'TICKER':'Issuer','CNTRY_OF_RISK':'Country','INDUSTRY_GROUP':'Industry'},inplace=True)
     self.df=self.df.join(countries.set_index('Country code',verify_integrity=True)['Region'],on='Country')
     self.df['Country'].fillna('na',inplace=True)
     self.df['Region'].fillna('na',inplace=True)
     self.df=self.df[self.df['Book'].apply(filterLondonBooks)]
     pass
Exemple #7
0
 def __init__(self, rebuild=False):
     self.savepath = MAPATH+'ma_full.csvz'
     if rebuild:
         self.load_files_full()
     if not(os.path.exists(self.savepath)) or datetime.datetime.fromtimestamp(os.path.getmtime(self.savepath)).date()<datetime.datetime.today().date():
         self.load_files()
     else:
         self.df = pandas.read_csv(self.savepath, parse_dates=['Inquiry Timestamp'], index_col=0, compression='bz2')
     self.df = self.df[['Status','Client','CLT Trader','Bid/Offer','ISIN','Inquiry Timestamp','Currency','Local Inquiry Volume']]
     self.df.rename(columns = {'Local Inquiry Volume':'AbsQty', 'Currency':'CCY','Inquiry Timestamp':'Date'}, inplace = True)
     self.df = self.df[self.df['CCY'].isin(['USD','EUR','CHF','GBP'])]
     self.df = self.df.join(allisins, on = 'ISIN')
     ma_counterparties = counterparties[counterparties['MAName'].notnull()]
     ma_counterparties.set_index('MAName', inplace=True)
     self.df = self.df.join(ma_counterparties['Counterparty'],on='Client')
     self.df = self.df.join(ccy['2016'],on='CCY')
     #self.df['AbsUSDQty'] = self.df.apply(lambda row:row['AbsQty']/ccy.loc[row['CCY'],'2016'],axis=1) ##TOO SLOW
     self.df['AbsUSDQty'] = self.df['AbsQty'] / self.df['2016']
     del self.df['2016']
     self.df['USDQty'] = self.df['AbsUSDQty']
     self.df.loc[self.df['Bid/Offer']=='Offer','USDQty'] = -self.df.loc[self.df['Bid/Offer']=='Offer','USDQty']
     self.df = self.df.join(bonds[['TICKER','CNTRY_OF_RISK']],on='Bond')
     self.df.rename(columns={'TICKER':'Issuer','CNTRY_OF_RISK':'Country'},inplace=True)
     self.df = self.df.join(countries.set_index('Country code',verify_integrity=True)['Region'],on='Country')
     self.df['DateDT'] = self.df['Date']
     #self.df['DateDT'] = pandas.to_datetime(self.df['Date'],format='%d/%m/%Y %H:%M:%S')
     self.df['Date'] = self.df['DateDT'].apply(lambda x:x.date())
Exemple #8
0
 def __init__(self, th, parent):
     """
     Keyword arguments:
     parent : parent 
     th = trade history (defaults to empty array if not specified)
     """
     self.th = th
     self.parent = parent
     self.EODPricesFilled = False
     #self.LivePricesFilled = False
     self.bdmReady = False
     self.lock = threading.Lock()
     self.cntrymap = countries.set_index('Country code')
     self.cntrymap.rename(columns={'Long name': 'LongCountry'},
                          inplace=True)
     self.riskFreeIssuers = ['T', 'DBR', 'UKT', 'OBL']
     #RISK TREE
     self.th.positions['EODPrice'] = 0.0
     self.th.positions['EODValue'] = 0.0
     #self.th.positions['Risk'] = 0.0
     self.displayPositions = self.th.positions[
         (self.th.positions['Qty'] <= -1) |
         (self.th.positions['Qty'] >= 1)].copy()
     # del self.displayPositions['REGS']
     # del self.displayPositions['144A']
     # self.displayPositions = self.displayPositions.join(bonds['REGS'])
     self.displayPositions = self.displayPositions.join(
         self.cntrymap['LongCountry'], on='Country')
     self.displayPositions['NewTrades'] = 0.0
     self.displayPositions.index.name = 'BondName'  # to avoid warning in the next line
     self.displayGroup = self.displayPositions.groupby(
         ['Region', 'LongCountry', 'Issuer', 'Bond']).sum()
     #self.rateDisplayGroup = self.displayPositions.groupby(['CCY','Bond']).sum()
     #print self.rateDisplayGroup
     #BOOK AND PnL TREE
     self.th.positionsByISINBook['Qty'] = self.th.positionsByISINBook[
         'SOD_Pos']  #Qty will be current, SOD is start of day
     for c in [
             'EODPrice', 'EODValue', 'PriceY', 'Risk', 'USDQty', 'PriceT',
             'SODPnL', 'TradePnL', 'TotalPnL', 'MK', 'PRINCIPAL_FACTOR',
             'RISK_MID', 'SAVG', 'IRRisk'
     ]:
         self.th.positionsByISINBook[c] = pandas.np.nan
     self.th.positionsByISINBook = self.th.positionsByISINBook.join(
         self.cntrymap['LongCountry'], on='Country')
     self.th.positionsByISINBook.set_index('Key', inplace=True)
     self.th.positionsByISINBook['NewTrades'] = 0.0
     self.displayGroupBook = self.th.positionsByISINBook.groupby(
         ['Book', 'LongCountry', 'Issuer', 'Bond', 'Series']).sum()
     self.traded_bonds = []  # IMPORTANT
     self.new_trades = self.th.df[self.th.df['Date'] == todayDateSTR].copy()
     self.EODPrices = self.th.positions['EODPrice'].copy()
     pub.subscribe(self.updatePrice, "BOND_PRICE_UPDATE")
     pub.subscribe(self.switchBDMReady, "BDM_READY")
     pub.subscribe(self.onUpdateTree, "POSITION_UPDATE")
     #pub.subscribe(self.updateBGNPrices, "BGN_PRICE_UPDATE")
     pass
 def onUpdateTreeNew(self, firstBuild=False):
     pythoncom.CoInitialize()
     if firstBuild:
         self.th.positions['EODPrice']=0
         self.th.positions['EODValue']=0
         self.displayPositions=self.th.positions[(self.th.positions['Qty']<=-1) | (self.th.positions['Qty']>=1)].copy()
         self.displayPositions=self.displayPositions.join(bonds['REGS'])
         c=countries.set_index('Country code')
         self.displayPositions=self.displayPositions.join(c['Long name'],on='Country')
         self.displayPositions.rename(columns={'Long name':'LongCountry'},inplace=True)
         displayGroup=self.displayPositions.groupby(['Region','LongCountry','Issuer','Bond']).sum()
         self.doBuildTree(displayGroup,[])
     else:
         wx.CallAfter(self.treeRebuild)
Exemple #10
0
 def __init__(self,filename):
     ma=pandas.read_csv(MAPATH+filename,thousands=',')
     self.df=ma[['Action','Size (000\'s)','Currency','Security','Identifier','Client','Client Trader','Date']].copy()
     self.df.rename(columns={'Size (000\'s)':'AbsQty','Currency':'CCY','Client':'MACounterparty','Identifier':'ISIN'}, inplace = True)
     self.df=self.df[self.df['CCY'].isin(['USD','EUR','CHF','GBP'])]
     self.df['AbsQty']=self.df['AbsQty']/1000.
     self.df['Qty']=self.df['AbsQty']
     self.df.loc[self.df['Action']=='Offer','Qty']=-1*self.df['AbsQty']
     self.df['Qty']=self.df['Qty']
     self.df=self.df.join(allisins,on='ISIN')
     self.df['USDQty']=self.df.apply(lambda row:row['Qty']/ccy.loc[row['CCY'],'2016'],axis=1)
     self.df=self.df.join(bonds[['TICKER','CNTRY_OF_RISK']],on='Bond')
     self.df.rename(columns={'TICKER':'Issuer','CNTRY_OF_RISK':'Country'},inplace=True)
     self.df=self.df.join(countries.set_index('Country code',verify_integrity=True)['Region'],on='Country')
     pass
    def build_positions(self):
        """Builds trade positions. Function is called when building trade history data in __init__
        """
        #Hard-coded 2017 FX rates
        positions = self.df.groupby(self.df['Bond'])['Qty'].sum()
        positions = pandas.DataFrame(positions)
        #positions=positions[positions['Qty']!=0]#this seems to mess up the risktree build on position refresh
        positions['Issuer'] = bonds['TICKER']
        positions['Country'] = bonds['CNTRY_OF_RISK']
        positions['CCY'] = bonds['CRNCY']
        positions['Maturity'] = bonds['MATURITY']
        positions['MaturityDT'] = positions['Maturity'].apply(getMaturityDate)
        positions = positions[positions['MaturityDT']>=datetime.datetime.today()]
        positions = positions.join(countries.set_index('Country code',verify_integrity=True)['Region'],on='Country')
        #positions['USDQty']=positions['Qty']
        positions['Country'].fillna('na',inplace=True)
        positions['Region'].fillna('na',inplace=True)
        try:
            #This can fail if you add a new bond and then forget to update the bonduniverse.
            positions['USDQty'] = positions.apply(lambda row:row['Qty']/ccy.loc[row['CCY'],'2017'],axis=1)
        except:
            positions['USDQty'] = 0

        positions['Bond'] = positions.index#needed
        self.positions = positions
        #
        positions = self.df.groupby(['ISIN','Book'],as_index=False)['Qty'].sum()
        positions = positions[(positions['Qty']>1) | (positions['Qty']<-1)]#useful here, filter zero positions and errors on amortized bonds
        positions = positions.join(allisins, on='ISIN')
        positions['Series'] = ''
        positions = positions.loc[positions['Bond'].notnull()].copy()
        for (i, row) in positions.iterrows():
            if row['ISIN'] == bonds.loc[row['Bond'], 'REGS']:
                positions.loc[i,'Series'] = 'REGS'
            else:
                positions.loc[i,'Series'] = '144A'
        grp = positions.groupby(['Bond','Series'])
        positions = grp['Qty'].sum().unstack().fillna(0)
        if not 'REGS' in positions.columns:
            positions['REGS'] = 0
        if not '144A' in positions.columns:
            positions['144A'] = 0
        self.positions[['REGS','144A']] = positions[['REGS','144A']]
        pass
    def __init__(self, rebuild=False, forceLastDay=False):
        self.savepath = BBGPATH+'bbg_full.csvz'
        if rebuild or (not os.path.exists(self.savepath)):
            self.load_files_full()
        elif datetime.datetime.fromtimestamp(os.path.getmtime(self.savepath)).date()<datetime.datetime.today().date() or forceLastDay:
            self.load_files()
        else:
            self.df = pandas.read_csv(self.savepath, index_col=0, compression='bz2', dtype=IMPORT_DIC_BBG)#, usecols=USECOLS)
        

        self.df = self.df[['Time','DlrSide','Qty (M)','Status','Customer', 'ISIN','Trade Dt','UserName']]
        self.df = self.df.copy()
        self.df['strDate'] = self.df['Trade Dt'] + ' ' + self.df['Time']
        # Convert NY time to UK time - adding 5h which is not always correct (DST)
        self.df['DateDT'] = pandas.to_datetime(self.df['strDate']) + pandas.Timedelta(hours=5)
        self.df['Date'] = self.df['DateDT'].apply(lambda x: x.date())
        # self.df['Time'] =  self.df['DateDT'].dt.strftime('%X')
        del self.df['strDate']
        del self.df['Time']
        del self.df['Trade Dt']
        #
        self.df = self.df.join(allisins, on = 'ISIN')
        self.df = self.df.join(bonds['CRNCY'], on = 'Bond')
        self.df.rename(columns = {'Qty (M)':'AbsQty', 'Customer': 'Client', 'CRNCY': 'CCY', 'DlrSide': 'Bid/Offer', 'UserName': '******'}, inplace = True)
        bbg_counterparties = counterparties[counterparties['BBGName'].notnull()]
        bbg_counterparties.set_index('BBGName', inplace=True)
        self.df = self.df.join(bbg_counterparties['Counterparty'], on='Client')
        self.df = self.df.join(ccy['2017'], on='CCY')
        #self.df['AbsUSDQty'] = self.df.apply(lambda row:row['AbsQty']/ccy.loc[row['CCY'],'2016'],axis=1) ##TOO SLOW
        self.df['AbsUSDQty'] = self.df['AbsQty'] / self.df['2017']
        del self.df['2017']
        self.df['USDQty'] = self.df['AbsUSDQty']
        self.df.loc[self.df['Bid/Offer']=='S','USDQty'] = -self.df.loc[self.df['Bid/Offer']=='S','USDQty']
        self.df = self.df.join(bonds[['TICKER','CNTRY_OF_RISK']],on='Bond')
        self.df.rename(columns={'TICKER':'Issuer','CNTRY_OF_RISK':'Country'},inplace=True)
        self.df = self.df.join(countries.set_index('Country code',verify_integrity=True)['Region'],on='Country')
        self.df['Venue'] = 'BBG'
 def __init__(self, th, parent):
     """Keyword arguments:
     parent : parent 
     th = trade history (defaults to empty array if not specified)
     """
     self.th = th
     self.parent = parent
     self.EODPricesFilled = False
     #self.LivePricesFilled = False
     self.bdmReady = False
     self.lock = threading.Lock()
     self.cntrymap = countries.set_index('Country code')
     self.cntrymap.rename(columns={'Long name':'LongCountry'}, inplace=True)
     self.riskFreeIssuers = ['T','DBR','UKT']
     #RISK TREE
     self.th.positions['EODPrice'] = 0.0
     self.th.positions['EODValue'] = 0.0
     self.th.positions['Risk'] = 0.0
     self.displayPositions = self.th.positions[(self.th.positions['Qty']<=-1) | (self.th.positions['Qty']>=1)].copy()
     self.displayPositions = self.displayPositions.join(bonds['REGS'])
     self.displayPositions = self.displayPositions.join(self.cntrymap['LongCountry'],on='Country')
     self.displayGroup = self.displayPositions.groupby(['Region','LongCountry','Issuer','Bond']).sum()
     #BOOK AND PnL TREE
     self.th.positionsByISINBook['Qty'] = self.th.positionsByISINBook['SOD_Pos']#Qty will be current, SOD is start of day
     for c in ['EODPrice','EODValue','PriceY','Risk','USDQty','PriceT','SODPnL','TradePnL','TotalPnL','MK','PRINCIPAL_FACTOR','RISK_MID']:
         self.th.positionsByISINBook[c] = pandas.np.nan
     self.th.positionsByISINBook = self.th.positionsByISINBook.join(self.cntrymap['LongCountry'],on='Country')
     self.th.positionsByISINBook.set_index('Key',inplace=True)
     self.displayGroupBook = self.th.positionsByISINBook.groupby(['Book','LongCountry','Issuer','Bond','Series']).sum()
     self.traded_bonds = [] # IMPORTANT
     self.new_trades = self.th.df[self.th.df['Date']==todayDateSTR].copy()
     self.EODPrices = self.th.positions['EODPrice'].copy()
     pub.subscribe(self.updatePrice, "BOND_PRICE_UPDATE")
     pub.subscribe(self.switchBDMReady, "BDM_READY")
     pub.subscribe(self.onUpdateTree, "POSITION_UPDATE")
     pass
 def treeRebuild(self):
     self.tree.DeleteAllItems()
     self.root = self.tree.AddRoot("Total")
     pythoncom.CoInitialize()
     _offsets = (3, 1, 1, 1, 1, 1, 2)
     yesterday = (datetime.datetime.today() - datetime.timedelta(days=_offsets[datetime.datetime.today().weekday()])).strftime('%Y-%m-%d')
     c=countries.set_index('Country code')
     traded_bonds = self.th.df[self.th.df['Date']==todayDateSTR]['Bond'].drop_duplicates().dropna().copy()
     new_bonds = list(set(traded_bonds)-set(self.displayPositions.index))
     self.th.positions['EODPrice']=self.EODPrices
     self.th.positions['EODPrice'].fillna(0,inplace=True)
     #print new_bonds
     for bond in new_bonds:
         price = self.fc.historical_price_query(bonds.loc[bond,'REGS'], yesterday)
         if price==0:
             price = self.th.df[self.th.df['Bond']==bond].iloc[-1]['Price']
         self.th.positions.loc[bond,'EODPrice'] = price
     self.EODPrices = self.th.positions['EODPrice'].copy()
     #Retrieve principal factor for traded bonds
     self.th.positions['PRINCIPAL_FACTOR']=self.principalFactor
     if len(new_bonds)>0:
         newisins=map(lambda x:bonds.loc[x,'REGS']+ ' Corp',new_bonds)
         blpts = blpapiwrapper.BLPTS(newisins, ['PRINCIPAL_FACTOR'])
         blpts.get()
         blpts.closeSession()
         blpts.output['REGS'] = blpts.output.index.str[:-5]
         blpts.output['Bond'] = blpts.output['REGS'].replace(isinsregs)
         blpts.output.set_index('Bond', inplace=True)
         self.th.positions.loc[new_bonds,'PRINCIPAL_FACTOR']=blpts.output['PRINCIPAL_FACTOR'].astype(float)
         self.principalFactor=self.th.positions['PRINCIPAL_FACTOR']
     self.th.positions['EODValue']=self.th.positions['EODPrice']*self.th.positions['USDQty']/100.*(self.th.positions['PRINCIPAL_FACTOR'])
     self.displayPositions=self.th.positions.loc[list(self.displayPositions.index)+new_bonds]#SOD risk + new trades
     self.displayPositions=self.displayPositions.join(c['Long name'],on='Country')
     self.displayPositions.rename(columns={'Long name':'LongCountry'},inplace=True)
     displayGroup=self.displayPositions.groupby(['Region','LongCountry','Issuer','Bond']).sum()
     self.doBuildTree(displayGroup,traded_bonds)
Exemple #15
0
    def getBookPnL(self, book):
        """Function to get PnLitems and calculate PnL for the specified book.

        Keyword argument:
        book : 'APGSG'/'HYCRE'/'KAZAK'/'RUSSI'/'SPCOR'/'SPOT'/'SPTEE'/'TURKE'/'STLBK'
        """
        if self.tradeHistory==[]:
            df = self.fc.load_book(book,self.today)
        else:
            #todaySOD=datetime.datetime.today()
            #todaySOD=datetime.datetime(todaySOD.year,todaySOD.month,todaySOD.day)
            #tdf=self.tradeHistory.df[(self.tradeHistory.df['Book']==book) & (self.tradeHistory.df['DateDT']<todaySOD)]
            #print tdf.shape
            # df = pandas.DataFrame(tdf.groupby('ISIN')['Qty'].sum())
            # df = df[df['Qty']!=0]
            # df['Bond']=allisins
            # df = df.dropna()
            # df = df.join(bonds['MATURITY'], on='Bond')
            # df['MaturityDT']=df['MATURITY'].apply(getMaturityDate)
            # df = df[df['MaturityDT']>=datetime.datetime.today()]
            # df['ISIN'] = df.index
            # df.rename(columns={'Qty':'SOD_Pos'},inplace=True)

            df=self.tradeHistory.positionsByISINBook[self.tradeHistory.positionsByISINBook['Book']==book].copy()            
            df.set_index('ISIN',drop=False,inplace=True)
            df['PriceY'] = 0.0
            df['PriceT'] = 0.0
            df = df[['Bond','ISIN','SOD_Pos','PriceY','PriceT']]
            df['PriceY'] = df['ISIN'].apply(lambda i:self.fc.historical_price_query(i,self.yesterday))
            df['PriceT'] = df['ISIN'].apply(lambda i:self.fc.closing_price_query(i))
        df = df.drop_duplicates('ISIN')#seems some spurious data in STLBK
        #print df
        newTradesB=self.newTrades[self.newTrades['Book']==book].copy()
        newIsins=set(newTradesB['ISIN'])-set(df['ISIN'])
        for i in newIsins:
            #print iz`
            if not i in allisins:
                continue
            row=[allisins[i],i,0,self.fc.historical_price_query(i,self.yesterday),self.fc.closing_price_query(i)]
            rowdf=pandas.DataFrame(data=[row],columns=df.columns)
            df=df.append(rowdf,ignore_index=True)
        df['dP']=df['PriceT']-df['PriceY']
        #Added a try here, because program crashes if newTrades.csv is empty
        try:
            if book in self.positionDeltas.index.get_level_values('Book'): #This line can't execute if newTrades.csv is empty
                df=df.join(self.positionDeltas[book],on='ISIN')
            else:
                df['Qty']=0
        except:
            df['Qty']=0 #Manually create a new column and hack it to 0
        df['Qty'].fillna(0,inplace=True)
        #print df
        df.rename(columns={'Qty':'dPos'},inplace=True)
        if self.tradeHistory==[]:
            df.rename(columns={'Position':'EOD_Pos'},inplace=True)
            df['SOD_Pos']=df['EOD_Pos']-df['dPos']
        else:
            df['EOD_Pos']=df['SOD_Pos']+df['dPos']
        df['SOD_PnL']=df['SOD_Pos']*df['dP']/100.
        df['Book']=book
        df['Key']=df['ISIN'].apply(lambda x:book+'-'+x)            
        df=df.set_index('ISIN', drop=False, verify_integrity=True)
        newTradesB=newTradesB.join(df['PriceT'],on='ISIN')
        newTradesB['TradePnL']=newTradesB['Qty']*(newTradesB['PriceT']-newTradesB['Price'])/100.
        #self.newTradesB=newTradesB
        isinPnL=newTradesB.groupby('ISIN')['TradePnL'].sum()
        df['Trade_PnL']=isinPnL
        df['Trade_PnL'].fillna(0,inplace=True)
        df['Total_PnL']=df['SOD_PnL']+df['Trade_PnL']
        df=df[df['Total_PnL']!=0]
        df=df.join(bonds['CNTRY_OF_RISK'], on='Bond')
        df=df.join(bonds['CRNCY'], on='Bond')
        df=df.join(bonds['TICKER'], on='Bond')
        df=df.join(countries.set_index('Country code'), on='CNTRY_OF_RISK')
        df.rename(columns={'Long name':'Country','TICKER':'Issuer'},inplace=True)
        del df['CNTRY_OF_RISK']
        df['USD_Total_PnL']=df['Total_PnL']
        nusd=df[df['CRNCY']!='USD'][['USD_Total_PnL','Total_PnL','CRNCY']].copy()
        for c in ccy.index:
            i = nusd['CRNCY']==c
            nusd.loc[i,'USD_Total_PnL']=nusd['Total_PnL']/ccy.get_value(c,'2015')
        
        df.loc[df['CRNCY']!='USD','USD_Total_PnL']=nusd['USD_Total_PnL']
        ##HACK - could be pandas bug - https://github.com/pydata/pandas/issues/6322##
        df['Region'].fillna('US',inplace=True)
        df['Country'].fillna('US',inplace=True)
        df['Issuer'].fillna('US',inplace=True)
        ##
        df.sort(['Region','Country','Bond'],inplace=True)
        df=df[['Key','Book','Bond','ISIN','CRNCY','Issuer','Region','Country','SOD_Pos','EOD_Pos','dPos','PriceY','PriceT','dP','SOD_PnL','Trade_PnL','Total_PnL','USD_Total_PnL']]
        return df
    def getBookPnL(self, book):
        """Function to get PnLitems and calculate PnL for the specified book.

        Keyword argument:
        book : 'APGSG'/'HYCRE'/'KAZAK'/'RUSSI'/'SPCOR'/'SPOT'/'SPTEE'/'TURKE'/'STLBK'
        """
        if self.tradeHistory == []:
            df = self.fc.load_book(book, self.today)
        else:
            #todaySOD=datetime.datetime.today()
            #todaySOD=datetime.datetime(todaySOD.year,todaySOD.month,todaySOD.day)
            #tdf=self.tradeHistory.df[(self.tradeHistory.df['Book']==book) & (self.tradeHistory.df['DateDT']<todaySOD)]
            #print tdf.shape
            # df = pandas.DataFrame(tdf.groupby('ISIN')['Qty'].sum())
            # df = df[df['Qty']!=0]
            # df['Bond']=allisins
            # df = df.dropna()
            # df = df.join(bonds['MATURITY'], on='Bond')
            # df['MaturityDT']=df['MATURITY'].apply(getMaturityDate)
            # df = df[df['MaturityDT']>=datetime.datetime.today()]
            # df['ISIN'] = df.index
            # df.rename(columns={'Qty':'SOD_Pos'},inplace=True)

            df = self.tradeHistory.positionsByISINBook[
                self.tradeHistory.positionsByISINBook['Book'] == book].copy()
            df.set_index('ISIN', drop=False, inplace=True)
            df['PriceY'] = 0.0
            df['PriceT'] = 0.0
            df = df[['Bond', 'ISIN', 'SOD_Pos', 'PriceY', 'PriceT']]
            df['PriceY'] = df['ISIN'].apply(
                lambda i: self.fc.historical_price_query(i, self.yesterday))
            df['PriceT'] = df['ISIN'].apply(
                lambda i: self.fc.closing_price_query(i))
        df = df.drop_duplicates('ISIN')  #seems some spurious data in STLBK
        #print df
        newTradesB = self.newTrades[self.newTrades['Book'] == book].copy()
        newIsins = set(newTradesB['ISIN']) - set(df['ISIN'])
        for i in newIsins:
            #print iz`
            if not i in allisins:
                continue
            row = [
                allisins[i], i, 0,
                self.fc.historical_price_query(i, self.yesterday),
                self.fc.closing_price_query(i)
            ]
            rowdf = pandas.DataFrame(data=[row], columns=df.columns)
            df = df.append(rowdf, ignore_index=True)
        df['dP'] = df['PriceT'] - df['PriceY']
        #Added a try here, because program crashes if newTrades.csv is empty
        try:
            if book in self.positionDeltas.index.get_level_values(
                    'Book'
            ):  #This line can't execute if newTrades.csv is empty
                df = df.join(self.positionDeltas[book], on='ISIN')
            else:
                df['Qty'] = 0
        except:
            df['Qty'] = 0  #Manually create a new column and hack it to 0
        df['Qty'].fillna(0, inplace=True)
        #print df
        df.rename(columns={'Qty': 'dPos'}, inplace=True)
        if self.tradeHistory == []:
            df.rename(columns={'Position': 'EOD_Pos'}, inplace=True)
            df['SOD_Pos'] = df['EOD_Pos'] - df['dPos']
        else:
            df['EOD_Pos'] = df['SOD_Pos'] + df['dPos']
        df['SOD_PnL'] = df['SOD_Pos'] * df['dP'] / 100.
        df['Book'] = book
        df['Key'] = df['ISIN'].apply(lambda x: book + '-' + x)
        df = df.set_index('ISIN', drop=False, verify_integrity=True)
        newTradesB = newTradesB.join(df['PriceT'], on='ISIN')
        newTradesB['TradePnL'] = newTradesB['Qty'] * (
            newTradesB['PriceT'] - newTradesB['Price']) / 100.
        #self.newTradesB=newTradesB
        isinPnL = newTradesB.groupby('ISIN')['TradePnL'].sum()
        df['Trade_PnL'] = isinPnL
        df['Trade_PnL'].fillna(0, inplace=True)
        df['Total_PnL'] = df['SOD_PnL'] + df['Trade_PnL']
        df = df[df['Total_PnL'] != 0]
        df = df.join(bonds['CNTRY_OF_RISK'], on='Bond')
        df = df.join(bonds['CRNCY'], on='Bond')
        df = df.join(bonds['TICKER'], on='Bond')
        df = df.join(countries.set_index('Country code'), on='CNTRY_OF_RISK')
        df.rename(columns={
            'Long name': 'Country',
            'TICKER': 'Issuer'
        },
                  inplace=True)
        del df['CNTRY_OF_RISK']
        df['USD_Total_PnL'] = df['Total_PnL']
        nusd = df[df['CRNCY'] != 'USD'][[
            'USD_Total_PnL', 'Total_PnL', 'CRNCY'
        ]].copy()
        for c in ccy.index:
            i = nusd['CRNCY'] == c
            nusd.loc[i, 'USD_Total_PnL'] = nusd['Total_PnL'] / ccy.get_value(
                c, '2015')

        df.loc[df['CRNCY'] != 'USD', 'USD_Total_PnL'] = nusd['USD_Total_PnL']
        ##HACK - could be pandas bug - https://github.com/pydata/pandas/issues/6322##
        df['Region'].fillna('US', inplace=True)
        df['Country'].fillna('US', inplace=True)
        df['Issuer'].fillna('US', inplace=True)
        ##
        df.sort(['Region', 'Country', 'Bond'], inplace=True)
        df = df[[
            'Key', 'Book', 'Bond', 'ISIN', 'CRNCY', 'Issuer', 'Region',
            'Country', 'SOD_Pos', 'EOD_Pos', 'dPos', 'PriceY', 'PriceT', 'dP',
            'SOD_PnL', 'Trade_PnL', 'Total_PnL', 'USD_Total_PnL'
        ]]
        return df