def getPositions(file): """ [String] file => [List] ([Dictionary] position) """ logger.debug('loadRepoPosition(): {0}'.format(file)) headers = ( 'RepoName', 'Account', 'LoanAmount', 'AccruedInterest' , 'OpenDate', 'CloseDate', 'InterestRate') with open(file, newline='') as csvfile: spamreader = csv.reader(csvfile, delimiter=',', quotechar='|') return \ compose( list , partial( map , lambda p: mergeDict( p , {'CloseDate': '99991231' if p['CloseDate'] == '' \ else p['CloseDate']} ) ) , partial( map , lambda p: mergeDict( p , { 'LoanAmount': float(p['LoanAmount']) , 'AccruedInterest': float(p['AccruedInterest']) , 'InterestRate': float(p['InterestRate']) } ) ) , partial(map, dict) , partial(map, lambda row: zip(headers, row)) )(spamreader)
def enrichPosition(repoData, position): """ [Dictionary] repoData, [Dictionary] position => [Iterable] ([Dictionary] enriched position) Because a Bloomberg repo position can have one or multiple collaterals, we allocate the accrued interest pro-rata to each collateral. """ logger.debug('enrichPosition(): {0}'.format(position['RepoName'])) # [String] date (yyyy-mm-dd) => [String] date (yyyymmdd) changeDate = compose( lambda L: L[0] + L[1] + L[2] , lambda s: s.split('-') ) try: return \ map( lambda t: \ mergeDict( position , { 'CollateralID': t[0] , 'CollateralQuantity': t[1] , 'LoanAmount': position['LoanAmount']*t[2] , 'AccruedInterest': position['AccruedInterest']*t[2] , 'OpenDate': changeDate(t[3]) } ) , repoData[position['RepoName']] ) except: logger.error('enrichPosition(): failed on {0}'.format(position['RepoName'])) return [mergeDict( position , {'CollateralID': '', 'CollateralQuantity': 0} )]
def getAccumulatedTimeWeightedCapital(bondConnectOnly, sortedCLPositions): """ [Bool] bondConnectOnly [Iterable] ([String] period end date, [List] positions of that period) => [Iterable] Float (time weighted return at each period end date) """ """ [Iterable] cash ledger entries => [Iterable] cash ledger entries filter and change the entries for bond connect calculation. """ mappingFunc = compose( partial( map , lambda p: mergeDict(p, {'TranDescription': 'Deposit'}) \ if p['TranDescription'] == 'Transfer' else p ) , partial( filter , lambda p: 'BOCHK_BC' in p['GroupWithinCurrency_OpeningBalDesc'] ) ) return \ compose( partial(map, lambda t: getTimeWeightedCapital(t[0], t[1])) , partial(map, lambda t: (t[0], list(mappingFunc(t[1])))) \ if bondConnectOnly else partial(map, lambda t: t) , lambda sortedCLPositions: \ accumulate(sortedCLPositions, lambda t1, t2: (t2[0], t1[1] + t2[1])) )(sortedCLPositions)
def createRepoReconFile(directory, date, positions): """ [Iterable] repo positions after enrichment => [String] file Create a csv file from the repo positions in the directory. """ logger.debug('createRepoReconFile(): {0}'.format(date)) headers = ( 'RepoName', 'Account', 'CollateralID', 'CollateralQuantity' , 'LoanAmount', 'AccruedInterest', 'OpenDate', 'CloseDate' , 'InterestRate') changeDate = lambda dt: dt[0:4] + '-' + dt[4:6] + '-' + dt[6:8] updatePosition = lambda p: \ mergeDict( p , { 'OpenDate': changeDate(p['OpenDate']) , 'CloseDate': changeDate(p['CloseDate']) } ) return \ compose( partial(writeCsv, join(directory, 'Repo_PosRecon_WithHeader_{0}.csv'.format(date))) , partial(chain, [headers]) , partial(map, partial(dictToValues, headers)) , partial(map, updatePosition) )(positions)
def _addMetaDataToPosition(fields, positions, metaData): """ [Iterable] positions, [Dictionary] metaData => [Iterable] positions """ data = {key: metaData.get(key, '') for key in fields} return map(lambda p: mergeDict(p, data), positions)
def getJournalTrades(lines): """ [Iterable] lines => [Iterable] ([Dictionary] journal trade) """ nonEmptyLine = lambda line: len(line) > 0 and line[0] != '' toDateTimeString = lambda x: fromExcelOrdinal(x).strftime('%Y-%m-%d') toStringIfFloat = lambda acc: \ str(int(acc)) if isinstance(acc, float) else acc updatePosition = lambda p: \ mergeDict( p , { 'Account': toStringIfFloat(p['Account']) , 'Trade Date': toDateTimeString(p['Trade Date']) , 'Settlement Date': toDateTimeString(p['Settlement Date']) , 'Ticket Number': toStringIfFloat(p['Ticket Number']) } ) return \ compose( partial(map, updatePosition) , getRawPositions , partial(takewhile, nonEmptyLine) )(lines)
def getRepoRerateFromFile(file): """ [String] file => [Iterable] ([Dictionary] repo rerate) """ logger.debug('getRepoRerateFromFile(): {0}'.format(file)) getRateTable = lambda L: dict(zip(L[0], L[1])) return \ compose( partial( map , lambda d: mergeDict(d, {'TransactionType': d['transaction_type']})) , partial( map , lambda el: mergeDict(el, {'RateTable': getRateTable(el['RateTable'])})) , getRawDataFromXML )(file)
def updateAccruedInterest(date, position): """ [Dictionary] position => [Dictionary] position update the accrued interest """ logger.debug('updateAccruedInterest(): {0}'.format(position['RepoName'])) return \ mergeDict( position , {'AccruedInterest': getAccruedInterest(date, position)} )
def addInvestId(position): """ [Dictionary] position => [Dictionary] position Add the field 'InvestID' to the position """ m = re.search('\((.*)\)', position['InvestmentDescription']) return \ mergeDict( position , {'InvestID': m.group(1) if m != None else position['InvestmentDescription']} )
def consolidate(group): """ [List] group (positions) => [Dictionary] position """ return \ compose( lambda position: mergeDict( position , _updateFields( ( 'Quantity', 'CostBook', 'MarketValueBook' , 'UnrealizedPriceGainLossBook', 'UnrealizedFXGainLossBook' , 'AccruedAmortBook', 'AccruedInterestBook' ) , group ) ) , lambda group: mergeDict( group[0] , {'UnitCost': getUnitCost(group)} ) )(group)
def getRepoTradeFromFile(file): """ [String] file => [Iterable] ([Dictionary] repo trade) """ logger.debug('getRepoTradeFromFile(): {0}'.format(file)) return \ compose( partial( map , lambda d: mergeDict(d, {'TransactionType': d['transaction_type']})) , getRawDataFromXML )(file)
def _consolidate_dividend_receivable_group(group): """ [List] ([Dictionary] dvd receivable) => [Dictionary] consolidated dvd receivable position """ if not allEquals( map( lambda p: (p['EXDate'], p['ExDateQuantity'], p['LocalCurrency']), group)): raise ValueError( '_consolidate_dividend_receivable_group(): inconsistency {0}'. format(group[0]['Investment'])) return mergeDict( group[0], { 'LocalGrossDividendRecPay': sum(map(lambda p: p['LocalGrossDividendRecPay'], group)) })
def addSedol(sedolMapping, p): return mergeDict(p, {'SEDOL': sedolMapping.get(p['Code'], '')})
def updatePortfolioName(p): return mergeDict(p, {'NameSort': toString(p['NameSort'])})
getCurrentDirectory = lambda : \ dirname(abspath(__file__)) """ [Dictionary] functionMap (key -> function), [Dictionary] d => [Dictionary] updated d (with keys in functionMap) Create a copy of the input dictionary d, with certain key, value pairs updated for those keys in the functionMap. """ updateDictionaryWithFunction = lambda functionMap, d: \ mergeDict(d, {key: functionMap[key](d.get(key, '')) for key in functionMap}) """ [Dictonary] raw metadata => [Dictionary] metadata """ getExcelMetadata = lambda metadata: \ updateDictionaryWithFunction( { 'Portfolio': toStringIfFloat , 'PeriodEndDate': toDateTimeString , 'PeriodStartDate': toDateTimeString } , metadata )
def updateCashDescription(p): return mergeDict(p, {'TaxLotDescription': p['InvestID'] + ' ' + p['Portfolio']}) \ if _isCash(p['ThenByDescription']) else p