def CalcPortfolioHistoricalCost(platform=None, start_date=None, base_ccy='HKD'): if start_date is None: tn = setup.GetAllTransactions() supported_instruments = setup.GetListOfSupportedInstruments() tn = tn[tn.BBGCode.isin(supported_instruments)] start_date = tn.Date.min() #platform='FSM HK' tn_cost = setup.GetTransactionsETFs() tn_cost = tn_cost[tn_cost.Date > start_date] # need to add balance brought forward bf = _GetExistingHoldings(start_date) if platform is not None: tn_cost = tn_cost[tn_cost.Platform == platform] bf = bf[bf.Platform == platform] for i in range(len(bf)): row = bf.iloc[i] dic = { 'Platform': row.Platform, 'Date': start_date, 'Type': 'Buy', 'BBGCode': row.BBGCode, 'CostInPlatformCcy': row.CostInPlatformCcy, 'NoOfUnits': row.NoOfUnits } tn_cost = tn_cost.append(dic, ignore_index=True) tn_cost.sort_values(['Date', 'BBGCode'], inplace=True) # convert all values into HKD before aggregating (need to convert platform ccy to HKD) platforms = list(tn_cost.Platform.unique()) platform_ccys = [setup.GetPlatformCurrency(x) for x in platforms] platform_ccy_mapping = { platforms[i]: platform_ccys[i] for i in range(len(platforms)) } tn_cost['PlatformCcy'] = tn_cost.Platform.map(platform_ccy_mapping) ccys = tn_cost.PlatformCcy.unique() fx_rate = [] for i in range(len(ccys)): ccy = ccys[i] if ccy == base_ccy: rate = 1 else: rate = calc_fx.ConvertFX(ccy, base_ccy) fx_rate.append(rate) ToBaseCcyRate = {ccys[i]: fx_rate[i] for i in range(len(ccys))} tn_cost['ToHKDrate'] = tn_cost.PlatformCcy.map(ToBaseCcyRate) tn_cost['CostInBaseCcy'] = tn_cost.ToHKDrate * tn_cost.CostInPlatformCcy agg = tn_cost.groupby(['Date']).agg({'CostInBaseCcy': 'sum'}) agg = agg.reset_index() agg['AccumCostHKD'] = agg.CostInBaseCcy.cumsum() agg.drop(['CostInBaseCcy'], axis=1, inplace=True) return agg
def _GetExistingHoldings(start_date, bbgcode=None, platform=None, base_ccy='HKD'): tn = setup.GetAllTransactions() tn = tn[tn.Date < start_date] if platform is not None: tn = tn[tn.Platform == platform] # only include the supported instruments support_instruments = setup.GetListOfSupportedInstruments() tn = tn[tn.BBGCode.isin(support_instruments)] holdings = tn.groupby(['Platform', 'BBGCode']).agg({ 'NoOfUnits': 'sum', 'CostInPlatformCcy': 'sum' }) holdings = holdings[holdings.NoOfUnits != 0] holdings = holdings.reset_index() # calculate the cost and valuation in base ccy equivalent (cost in platform ccy, val in sec ccy) historical_data = mdata.GetHistoricalData() val = historical_data.copy() val = val[val.Date < start_date] for i in range(len(holdings)): row = holdings.iloc[i] holdings.loc[i, 'PlatformCcy'] = setup.GetPlatformCurrency(row.Platform) holdings.loc[i, 'SecurityCcy'] = setup.GetSecurityCurrency(row.BBGCode) # add valuation here v = val[val.BBGCode == row.BBGCode] if len(v) == 0: print('WARNING: no market data - check feed/date range') holdings.loc[i, 'Close'] = v.iloc[-1].Close holdings.loc[ i, 'ValuationInSecCcy'] = holdings.loc[i, 'Close'] * row.NoOfUnits # calc base ccy equivalent # optimise FX query (if platform ccy = security ccy then use same fx rate) same_ccy = holdings.loc[i, 'PlatformCcy'] == holdings.loc[i, 'SecurityCcy'] if same_ccy: fxrate = calc_fx.ConvertFX(holdings.loc[i, 'SecurityCcy'], base_ccy) holdings.loc[i, 'CostInBaseCcy'] = fxrate * row.CostInPlatformCcy holdings.loc[i, 'ValuationInBaseCcy'] = fxrate * holdings.loc[ i, 'ValuationInSecCcy'] else: holdings.loc[i, 'CostInBaseCcy'] = calc_fx.ConvertTo( base_ccy, holdings.loc[i, 'PlatformCcy'], row.CostInPlatformCcy) holdings.loc[i, 'ValuationInBaseCcy'] = calc_fx.ConvertTo( base_ccy, holdings.loc[i, 'SecurityCcy'], holdings.loc[i, 'ValuationInSecCcy']) return holdings
def CalcPortfolioHistoricalValuation(platform=None, bbgcode=None, start_date=None): # only applies to instruments supported by Yahoo Finance supported_instruments = setup.GetListOfSupportedInstruments() tn = setup.GetAllTransactions() tn_in_scope = tn[tn.BBGCode.isin(supported_instruments)] instruments_in_scope = supported_instruments # if platform is specified, check which instruments were actually on the platform if platform is not None: tn_in_scope = tn_in_scope[tn_in_scope.Platform == platform] instruments_in_scope = list(tn_in_scope.BBGCode.unique()) # if bbgcode is specified, then restrict to just the instrument if bbgcode is not None: if bbgcode in instruments_in_scope: instruments_in_scope = [bbgcode] # if start date is not defined, start from earliest transaction in scope if start_date is None: start_date = tn_in_scope.Date.min() df = pd.DataFrame() # loop through the list for i in range(len(instruments_in_scope)): bbgcode = instruments_in_scope[i] tmp = _CalcValuation(bbgcode=bbgcode, platform=platform, start_date=start_date) # remove redundant rows tmp = tmp[~((tmp.NoOfUnits == 0) & (tmp.Holdings == 0))] tmp['BBGCode'] = bbgcode df = df.append(tmp, ignore_index=False) # on each unique date, take the last row of unique security to avoid duplicated valuation df.sort_values(['Date', 'BBGCode'], inplace=True) df = df.drop_duplicates(subset=['Date', 'BBGCode'], keep='last') # group the data by date agg = df.groupby(['Date']).agg({'ValuationHKD': 'sum'}) agg = agg.reset_index() return agg
def GetHistoricalRealisedPnL(): tn = setup.GetAllTransactions() tn.CostInPlatformCcy = tn.CostInPlatformCcy.round(2) tn.drop('_id', axis=1, inplace=True) # enrich transactions with Platform metadata pl = _GetPlatformDef() tn = pd.merge(tn, pl, how='left', left_on='Platform', right_on='PlatformName') # enrich transactions with Security metadata sec = setup.GetSecurities() sec.drop(['_id', 'Currency'], axis=1, inplace=True) tn = pd.merge(tn, sec, how='left', left_on='BBGCode', right_on='BBGCode') hist = tn[tn.RealisedPnL.notnull()] pnl = hist.groupby(['Platform', 'Name', 'Type', 'PlatformCurrency']).RealisedPnL.sum() return hist, pnl
def GetStartDate(period=None): #period='1m' if period is None: tn = setup.GetAllTransactions() supported_instruments = setup.GetListOfSupportedInstruments() tn = tn[tn.BBGCode.isin(supported_instruments)] start_date = tn.Date.min() # since inception else: period = period.upper() # supported periods: YTD 1W 1M 3M 6M 1Y 3Y 5Y 10Y; calculate up to end of yesterday (up to start of today) today = datetime.datetime.today().date() if period == 'YTD': start_date = datetime.datetime(today.year, 1, 1) elif period == '1W': start_date = today + datetime.timedelta(days=-7) elif period == '1M': start_date = today + dateutil.relativedelta.relativedelta( months=-1) elif period == '3M': start_date = today + dateutil.relativedelta.relativedelta( months=-3) elif period == '6M': start_date = today + dateutil.relativedelta.relativedelta( months=-6) elif period == '1Y' or period == '12M': start_date = today + dateutil.relativedelta.relativedelta(years=-1) elif period == '3Y': start_date = today + dateutil.relativedelta.relativedelta(years=-3) elif period == '5Y': start_date = today + dateutil.relativedelta.relativedelta(years=-5) elif period == '10Y': start_date = today + dateutil.relativedelta.relativedelta( years=-10) start_date = datetime.datetime.combine(start_date, datetime.datetime.min.time()) return start_date
def GetPortfolioSummary(): # get a summary of transactions in the portfolio tn = setup.GetAllTransactions() tn.CostInPlatformCcy = tn.CostInPlatformCcy.round(2) tn.drop('_id', axis=1, inplace=True) sec = setup.GetSecurities() sec.drop('_id', axis=1, inplace=True) # enrich transactions with Security metadata tn = pd.merge(tn, sec, how='left', left_on='BBGCode', right_on='BBGCode') agg = {'NoOfUnits': sum, 'CostInPlatformCcy': sum, 'RealisedPnL': sum} #summary = tn.groupby(['Platform','Name','FundHouse','AssetClass','BBGCode','BBGPriceMultiplier','Currency']).agg(agg) summary = tn.groupby( ['Platform', 'Name', 'FundHouse', 'AssetClass', 'BBGCode', 'Currency']).agg(agg) summary.reset_index(inplace=True) summary.rename(columns={'Currency': 'SecurityCurrency'}, inplace=True) # enrich with platforms db = setup.ConnectToMongoDB() platforms = pd.DataFrame(list(db.Platform.find())) summary = pd.merge(summary, platforms, how='left', left_on='Platform', right_on='PlatformName') summary.drop(['PlatformName', '_id', 'SecurityCurrency'], axis=1, inplace=True) # enrich transactions with the latest price (ARKG has 2 FX rates USDHKD USDSGD that can cause duplicates) lastnav = calc_val.GetLastNAV() lastnav = lastnav.groupby(['BBGCode', 'LastNAV', 'SecurityCurrency']).agg({'LastUpdated': 'min'}) lastnav.reset_index(inplace=True) ### bug fixed 26 Dec 2020: left join caused duplicates summary = summary.merge( lastnav[['BBGCode', 'LastNAV', 'LastUpdated', 'SecurityCurrency']], how='left', left_on='BBGCode', right_on='BBGCode') # added 22 Nov 2018 (remove unused stock code) summary = summary[summary.SecurityCurrency.notnull()] summary.reset_index(inplace=True, drop=True) for i in range(len(summary)): #summary.loc[i,'FXConversionRate'] = GetFXRate(summary.loc[i,'PlatformCurrency'], summary.loc[i,'SecurityCurrency']) summary.loc[i, 'FXConversionRate'] = calc_fx.ConvertFX( summary.loc[i, 'SecurityCurrency'], summary.loc[i, 'PlatformCurrency']) #summary['CurrentValue'] = summary.NoOfUnits * summary.LastNAV * summary.FXConversionRate / summary.BBGPriceMultiplier summary[ 'CurrentValue'] = summary.NoOfUnits * summary.LastNAV * summary.FXConversionRate summary.CurrentValue = summary.CurrentValue.round(2) summary[ 'PnL'] = summary.CurrentValue - summary.CostInPlatformCcy #- summary.RealisedPnL summary.PnL = summary.PnL.round(2) agg2 = { 'NoOfUnits': sum, 'CostInPlatformCcy': sum, 'CurrentValue': sum, 'PnL': sum, 'RealisedPnL': sum } ps = summary.groupby([ 'Platform', 'PlatformCurrency', 'FundHouse', 'AssetClass', 'Name', 'BBGCode', 'LastNAV', 'LastUpdated' ]).agg(agg2) ps.reset_index(inplace=True) ps['PnLPct'] = ps.PnL / ps.CostInPlatformCcy # added 2 Dec 2020 sec = sec[['BBGCode', 'AssetType', 'Currency']] sec.rename(columns={ 'AssetType': 'SecurityType', 'Currency': 'SecurityCcy' }, inplace=True) ps = ps.merge(sec, how='left', left_on='BBGCode', right_on='BBGCode') # add current value in HKD for i in range(len(ps)): row = ps.loc[i] # get HKD equivalent amount ccy = row.PlatformCurrency value_ccy = row.CurrentValue ps.loc[i, 'CurrentValueInHKD'] = calc_fx.ConvertTo('HKD', ccy, value_ccy) # get Category sec_name = row.Name ps.loc[i, 'Category'] = _GetSecurityCategory(sec_name) # calculate Cost and PnL in HKD ps.loc[:, 'CostInHKD'] = ps.loc[:, 'CurrentValueInHKD'] / ps.loc[:, 'CurrentValue'] * ps.loc[:, 'CostInPlatformCcy'] ps.loc[:, 'PnLInHKD'] = ps.loc[:, 'CurrentValueInHKD'] / ps.loc[:, 'CurrentValue'] * ps.loc[:, 'PnL'] # 22 Dec 2020: add SecCcy to HKD rate, add WA cost in Security Ccy for i in [x for x in ps.index]: row = ps.loc[i] ps.loc[i, 'WAC'] = setup.GetWeightedAvgCostPerUnitInSecCcy( row.BBGCode, row.Platform) # total PnL = realised + unrealised # (should I add or not? TO BE DECIDED) # special treatment to breakdown Allianz Income & Growth funds # divide by 3 separate rows and allocate different asset classes allianz_bbgcodes = ['ALIGH2S LX', 'ALLGAME LX'] allianz_allocations = [{ 'Equity': 0.33 }, { 'Credit': 0.33 }, { 'Convertibles': 0.34 }] # generate the new rows based on allocations dfAllianz = ps[ps.BBGCode.isin(allianz_bbgcodes)].copy() dfAllianzNew = pd.DataFrame(columns=dfAllianz.columns) for i in range(len(dfAllianz)): row = dfAllianz.iloc[i] for j in range(len(allianz_allocations)): new_row = row.copy() new_row['AssetClass'] = list(allianz_allocations[j].keys())[0] new_row['NoOfUnits'] = row.NoOfUnits * list( allianz_allocations[j].values())[0] new_row['CostInPlatformCcy'] = row.CostInPlatformCcy * list( allianz_allocations[j].values())[0] new_row['CurrentValue'] = row.CurrentValue * list( allianz_allocations[j].values())[0] new_row['PnL'] = row.PnL * list(allianz_allocations[j].values())[0] new_row['RealisedPnL'] = row.RealisedPnL * list( allianz_allocations[j].values())[0] new_row['CurrentValueInHKD'] = row.CurrentValueInHKD * list( allianz_allocations[j].values())[0] dfAllianzNew = dfAllianzNew.append(new_row) # replace the original rows with the new rows ps2 = ps[~ps.BBGCode.isin(allianz_bbgcodes)].copy() ps2 = ps2.append(dfAllianzNew) # can't assign Portfolio % when Allianz is broken down into separate asset classes ps.loc[:, 'PortfolioPct'] = ps.loc[:, 'CurrentValueInHKD'] / ps.CurrentValueInHKD.sum( ) # remove rows with 0 holdings ps = ps[ps.NoOfUnits != 0] ps2 = ps2[ps2.NoOfUnits != 0] PortfolioSummary = {'Original': ps, 'Adjusted': ps2} return PortfolioSummary
def GetDates(period=None): ''' Accepted values for period: None (start from first ever transaction, end yesterday) YTD 1W 1M 3M 6M 1Y 3Y 5Y 10Y 20xx ''' # since inception if period is None: tn = setup.GetAllTransactions() supported_instruments = setup.GetListOfSupportedInstruments() tn = tn[tn.BBGCode.isin(supported_instruments)] start_date = tn.Date.min() end_date = datetime.datetime.now().date() - datetime.timedelta(days=1) # period is a year (2019, 2020, 2021) elif len(period) == 4 and period.isnumeric(): start_date = datetime.datetime(int(period), 1, 1) if int(period) == datetime.datetime.today().year: end_date = end_date = datetime.datetime.now().date( ) - datetime.timedelta(days=1) else: end_date = datetime.datetime(int(period), 12, 31) else: period = period.upper() # supported periods: YTD 1W 1M 3M 6M 1Y 3Y 5Y 10Y; calculate up to end of yesterday (up to start of today) today = datetime.datetime.today().date() if period == 'YTD': start_date = datetime.datetime(today.year, 1, 1) elif period == '1W': start_date = today + datetime.timedelta(days=-7) elif period == '1M': start_date = today + dateutil.relativedelta.relativedelta( months=-1) elif period == '3M': start_date = today + dateutil.relativedelta.relativedelta( months=-3) elif period == '6M': start_date = today + dateutil.relativedelta.relativedelta( months=-6) elif period == '1Y' or period == '12M': start_date = today + dateutil.relativedelta.relativedelta(years=-1) elif period == '3Y': start_date = today + dateutil.relativedelta.relativedelta(years=-3) elif period == '5Y': start_date = today + dateutil.relativedelta.relativedelta(years=-5) elif period == '10Y': start_date = today + dateutil.relativedelta.relativedelta( years=-10) end_date = datetime.datetime.now().date() - datetime.timedelta(days=1) start_date = datetime.datetime.combine(start_date, datetime.datetime.min.time()) end_date = datetime.datetime.combine(end_date, datetime.datetime.min.time()) return start_date, end_date
def ProcessHistoricalMarketData(bbgcode=None, platform=None, start_date=None): print('\nProcessing historical market data...') tn = setup.GetAllTransactions() # filter by bbgcode and platform if bbgcode is not None: tn = tn[tn.BBGCode == bbgcode] if platform is not None: tn = tn[tn.Platform == platform] if start_date is None: supported_instruments = setup.GetListOfSupportedInstruments() tn = tn[tn.BBGCode.isin(supported_instruments)] start_date = tn.Date.min() #list_of_etfs = GetListOfETFs() list_of_supported_instruments = setup.GetListOfSupportedInstruments() if bbgcode is not None: list_of_supported_instruments = [bbgcode] # populate list of ETFs and date ranges df = pd.DataFrame(columns=['BBGCode', 'YFTicker', 'DateFrom', 'DateTo']) for i in range(len(list_of_supported_instruments)): bbgcode = list_of_supported_instruments[i] yf_ticker = setup.GetYahooFinanceTicker(bbgcode) dates = setup.GetETFDataDateRanges(bbgcode) date_from = dates['DateFrom'] date_to = dates[ 'DateTo'] # this results in incorrect values for securites no longer held if date_from < start_date.date(): date_from = start_date.date() df = df.append( { 'BBGCode': bbgcode, 'YFTicker': yf_ticker, 'DateFrom': date_from, 'DateTo': date_to }, ignore_index=True) # loop through the list and collect the data from Yahoo data = pd.DataFrame() for i in range(len(df)): row = df.iloc[i] tmp = pdr.get_data_yahoo(row.YFTicker, start=row.DateFrom, end=row.DateTo) tmp = tmp.reset_index() tmp['BBGCode'] = row.BBGCode data = data.append(tmp, ignore_index=False) # added 15 Dec 2020: Yahoo Finance null rows? data = data[~data.Close.isnull()] data.drop_duplicates(['BBGCode', 'Date'], inplace=True) # NEED TO DEAL WITH HK/US HOLIDAYS MISMATCH - this process is also adding incorrect values for securities no longer held tmp = data.pivot('Date', 'BBGCode', values='Close') tmp = tmp.fillna(method='ffill') tmp = tmp.reset_index() tmp2 = pd.melt(tmp, id_vars=['Date'], value_vars=list(data.BBGCode.unique()), value_name='Close') tmp2.dropna(inplace=True) #tmp2.to_csv('HistoricalPrices.csv', index=False) # save to mongodb db = setup.ConnectToMongoDB() coll = db['HistoricalMarketData'] # clear all previous transactions coll.delete_many({}) # insert rows into the db coll.insert_many(tmp2.to_dict('records')) #return tmp2 print('(updated %s records on MongoDB)' % len(tmp2))
def _CalcValuation(bbgcode, platform=None, start_date=None): # assumes bbgcode can only be on 1 platform (exception VWO XLE) #bbgcode='XLE US' #platform='FSM HK' #bbgcode='SCHSEAI SP' tn = setup.GetAllTransactions() # filter by platform and bbgcode if platform is not None: tn = tn[tn.Platform == platform] tn = tn[tn.BBGCode == bbgcode] if start_date is None: #tn = setup.GetAllTransactions() supported_instruments = setup.GetListOfSupportedInstruments() tn = tn[tn.BBGCode.isin(supported_instruments)] #if bbgcode is not None: tn = tn[tn.BBGCode == bbgcode] start_date = tn.Date.min() hd = mdata.GetHistoricalData(bbgcode=bbgcode) hd = hd[['Date', 'Close']] hd_prev = hd[hd.Date < start_date].copy() hd_prev = hd_prev.tail(1) # filter by selected date range hd = hd[hd.Date >= start_date] # filter by date until its no longer held if tn.NoOfUnits.sum() == 0: hd = hd[hd.Date <= tn.Date.max()] # add back last valuation before beginning of date range hd = hd.append(hd_prev) hd = hd.sort_values(['Date'], ascending=True) tn = tn[['Date', 'NoOfUnits']] tn = tn[tn.Date >= start_date] # CAREFUL: if the transaction date is a holiday where there is no market data, the holdings will be missed # add balance brought forward bf = _GetExistingHoldings(start_date, platform=platform) bf = bf[bf.BBGCode == bbgcode] df = pd.merge(hd, tn, how='left', on='Date') # if there is balance b/f, then add it if len(bf) > 0: df.loc[0, 'NoOfUnits'] = bf.iloc[0].NoOfUnits df.NoOfUnits.fillna(0, inplace=True) df['Holdings'] = df.NoOfUnits.cumsum() # security currency sec_ccy = setup.GetSecurityCurrency(bbgcode) ToUSD = calc_fx.GetFXRate('USD', sec_ccy) df['Valuation'] = df.Holdings * df.Close df['ValuationUSD'] = df.Valuation * ToUSD # filter out unused rows # load historical USDHKD exchange rates from cache usdhkd = mdata.GetHistoricalUSDHKD() df = df.merge(usdhkd, how='left', on='Date') df['USDHKDrate'] = df.USDHKDrate.fillna(method='ffill') df['ValuationHKD'] = df.ValuationUSD * df.USDHKDrate return df
def CalcIRR(platform=None, bbgcode=None, period=None): #platform = 'FSM HK' #bbgcode = 'ARKK US' #period = None #since inception #period = 'YTD' #period = '1Y' #period = '3M' #period='2020' #platform,bbgcode,period=None,None,None df = setup.GetAllTransactions() list_of_supported_securities = setup.GetListOfSupportedInstruments() # filter the data based on selection criteria (bbgcode, platform) df.drop(['_id'], axis=1, inplace=True) df = df[df.BBGCode.isin(list_of_supported_securities)] if platform is not None: df = df[df.Platform == platform] if bbgcode is not None: df = df[df.BBGCode == bbgcode] # get the start date for cashflows (the sum of anything before needs to be added as a single cashflow) #date_range_start = util.GetStartDate(period) date_range_start, date_range_end = util.GetDates(period) # apply the start date from applicable transactions earliest_transaction_date = df.Date.min() date_range_start_dt = earliest_transaction_date PerformCalc = True # determine if there is previous data (i.e. whether to add cost brought forward as cashflow) if period is None: # if period is not defined (i.e. since inception), take the earliest transaction date hasPrevData = False date_range_start_dt = earliest_transaction_date else: # if period is defined (e.g. 3Y), check whether there are transactions before 3Y hasPrevData = (len(df[df.Date < date_range_start_dt]) > 0) date_range_start_dt = datetime.datetime.combine( date_range_start, datetime.datetime.min.time()) df = df[df.Date >= date_range_start_dt] if earliest_transaction_date > date_range_start: # if the first transaction is after the beginning of specified period, no need to calc IRR irr = np.nan PerformCalc = False dic = { 'StartDate': date_range_start_dt, 'InitialCashflow': None, 'FinalCashflow': None, 'IRR': irr } if PerformCalc: # process cashflows # cashflow needs to start after the start date, because the ptf val would have included transactions inc. on the same day cf = df[df.Date > date_range_start_dt].copy() #cf = df[df.Date >= date_range_start_dt].copy() cf.loc[cf.Type == 'Buy', 'Cashflow'] = cf.loc[cf.Type == 'Buy', 'CostInPlatformCcy'] * -1 # realised PnL needs to be taken into account to the cashflow calculation too! cf.loc[cf.Type == 'Sell', 'Cashflow'] = cf.loc[cf.Type == 'Sell', 'CostInPlatformCcy'] * -1 + cf.loc[ cf.Type == 'Sell', 'RealisedPnL'] #+ cf.loc[cf.Type=='Sell', 'RealisedPnL'] * -1 cf.loc[cf.Type == 'Dividend', 'Cashflow'] = cf.loc[cf.Type == 'Dividend', 'RealisedPnL'] # get platform and currency platforms = list(cf.Platform.unique()) currencies = [setup.GetPlatformCurrency(x) for x in platforms] platform_ccy = { platforms[x]: currencies[x] for x in range(len(platforms)) } cf['PlatformCcy'] = cf.Platform.map(platform_ccy) cf = cf[['Date', 'Type', 'BBGCode', 'PlatformCcy', 'Cashflow']] # calculate HKD equivalent SGDHKD = calc_fx.GetFXRate('HKD', 'SGD') ToHKD = {'HKD': 1, 'SGD': SGDHKD} cf['CashflowInHKD'] = cf.PlatformCcy.map(ToHKD) * cf.Cashflow # need to add initial and final cashflows (valuation at beginning, valuation at end) # get valuations (beginning, ending) if bbgcode is None: val = CalcPortfolioHistoricalValuation( platform=platform, bbgcode=bbgcode, start_date=date_range_start_dt) val.rename(columns={'ValuationHKD': 'Cashflow'}, inplace=True) else: val = _CalcValuation(bbgcode=bbgcode, start_date=date_range_start_dt) val.rename(columns={'ValuationHKD': 'Cashflow'}, inplace=True) val = val[['Date', 'Cashflow']] # valuation as of start date if period is not None: val_start = (val[(val.Date <= np.datetime64(date_range_start_dt)) & (val.index == val[val.Date <= np.datetime64( date_range_start_dt)].index.max())]).copy() val_start.loc[:, 'Cashflow'] = val_start.loc[:, 'Cashflow'] * -1 val_start.rename(columns={'Cashflow': 'CashflowInHKD'}, inplace=True) cf = cf.append(val_start) else: val_start = pd.DataFrame(data={ 'Date': date_range_start_dt, 'CashflowInHKD': 0 }, columns=['Date', 'CashflowInHKD'], index=[0]) # latest valuation val_end = val[val.index == val.index.max()].copy() val_end.rename(columns={'Cashflow': 'CashflowInHKD'}, inplace=True) # add latest valuation as final cashflow (only if there are still holdings) #if (cf.Date.iloc[-1] != val.Date.iloc[0]) and val.Cashflow.iloc[0]!=0: if val.Cashflow.iloc[-1] != 0: cf = cf.append(val_end, ignore_index=True) cf = cf.sort_values(['Date']) cf = cf.reset_index(drop=True) # annualised return annualised_irr = _xirr(values=cf.CashflowInHKD.to_list(), dates=cf.Date.to_list()) # convert back to period if period is < a year no_of_days = (pd.to_datetime(cf.iloc[-1].Date) - pd.to_datetime(cf.iloc[0].Date)).days if no_of_days < 365: irr = (1 + annualised_irr)**(no_of_days / 365) - 1 else: irr = annualised_irr # return the calc results dic = { 'StartDate': date_range_start_dt, 'EndDate': cf.tail(1).Date.iloc[0], 'InitialCashflow': val_start.CashflowInHKD.iloc[0], 'FinalCashflow': val_end.CashflowInHKD.iloc[0], 'IRR': irr } return dic
def ProcessLastNAV(): # get all transactions from MongoDB tran = setup.GetAllTransactions() #tran['NoOfUnits'] = tran.NoOfUnits.astype(np.float32) tran_summary = tran.groupby(['Platform', 'BBGCode']).aggregate({'NoOfUnits': 'sum'}) tran_summary = tran_summary.reset_index(drop=False) # filter transactions tran_summary = tran_summary[tran_summary.NoOfUnits > 0.001] tran_summary = tran_summary[tran_summary.Platform.str[:4] != 'Cash'] # exclude Singapore cash fund tran_summary = tran_summary[tran_summary.BBGCode != 'CASHFND SP'] # enrich with platform and security currency dfPlatforms = pd.DataFrame() dfPlatforms['Platform'] = tran_summary.Platform.unique() dfPlatforms['PlatformCcy'] = [ setup.GetPlatformCurrency(x) for x in list(tran_summary.Platform.unique()) ] tran_summary = tran_summary.merge(dfPlatforms, how='left', left_on='Platform', right_on='Platform') secs = setup.GetSecurities() tran_summary = tran_summary.merge(secs[['BBGCode', 'Currency']], how='left', left_on='BBGCode', right_on='BBGCode') tran_summary.rename(columns={'Currency': 'SecurityCcy'}, inplace=True) # enrich with last NAV lastnav = pd.read_excel(mdata.LastNavFilePath) lastnav.rename(columns={'Ticker_BBG': 'BBGCode'}, inplace=True) tran_summary = tran_summary.merge( lastnav[['BBGCode', 'LastNAV', 'LastUpdated']], how='left', left_on='BBGCode', right_on='BBGCode') # calculate FX rate for i in range(len(tran_summary)): row = tran_summary.iloc[i] if row.PlatformCcy == row.SecurityCcy: tran_summary.loc[i, 'FXRate'] = 1 else: tran_summary.loc[i, 'FXRate'] = calc_fx.GetFXRate( row.PlatformCcy, row.SecurityCcy) # format output columns tran_summary.rename(columns={'SecurityCcy': 'SecurityCurrency'}, inplace=True) tran_summary.drop(columns=['PlatformCcy', 'NoOfUnits'], inplace=True) # save results on MongoDB db = setup.ConnectToMongoDB() LastNAV = db['LastNAV'] LastNAV.delete_many({}) LastNAV.insert_many(tran_summary[[ 'BBGCode', 'LastNAV', 'SecurityCurrency', 'FXRate', 'LastUpdated' ]].to_dict('records'))