def process_args(api, args): taxlot_fields = [ "units", "cost.currency", "cost.amount", "portfolio_cost", "price", "purchase_date", "settlement_date", ] identifiers = set() shk = set() properties = set() def make_holding(adj): lots = lpt.to_df(adj.tax_lots, taxlot_fields) lots["instrument_uid"] = adj.instrument_uid identifiers.update(adj.instrument_identifiers.keys()) # TODO: sub-holding-keys and properties for k, v in adj.instrument_identifiers.items(): lots[k] = v if adj.sub_holding_keys is not None: for k, v in adj.sub_holding_keys.items(): lots[f"SHK:{k}"] = v.value.label_value return lots def get_success(result): holdings = [make_holding(adj) for adj in result.content.adjustments] df = pd.concat(holdings, ignore_index=True, sort=True) return lpt.trim_df(df, args.limit) def list_success(result): return lpt.trim_df( lpt.to_df(result, ["effective_at", "unmatched_holding_method"]), args.limit, sort="effective_at", ) if args.date: return api.call.get_holdings_adjustment( scope=args.scope, code=args.portfolio, effective_at=lpt.to_date(args.date)).bind(get_success) else: return api.call.list_holdings_adjustments( scope=args.scope, code=args.portfolio, from_effective_at=lpt.to_date(args.start_date) if args.start_date else lpt.to_date("1900-01-01"), to_effective_at=lpt.to_date(args.end_date) if args.end_date else lpt.to_date(datetime.now()), ).bind(list_success)
def process_args(api, args): def success(result): flat = [i for r in result.content for i in r.mastered_instruments] if len(flat) > 0: identifiers = sorted( set.union(*[set(i.identifiers.keys()) for i in flat])) df = lpt.to_df( flat, ["name"] + ["identifiers.KEY:" + i + ".value" for i in identifiers], ) df.columns = ["Name"] + identifiers return df else: return "No Matches" request = [ api.models.InstrumentSearchProperty(s[0], s[1]) for s in [p.split("=") for p in args.properties] ] return api.call.instruments_search( instrument_search_property=request, mastered_only=True, mastered_effective_at=lpt.to_date(args.date), ).bind(success)
def process_args(api, args): taxlot_fields = [ "units", "cost.amount", "cost.currency", "portfolio_cost", "price", "purchase_date", "settlement_date", ] # Check if is json or csv if "csv" in args.input: df = lpt.read_input(args.input) elif "json" in args.input: # Convert from json to dataframe with open(args.input, "r") as myfile: data = json.load(myfile) df = json_normalize(data, max_level=1) df.columns = df.columns.str.replace("subHoldingKeys.", SHK) df.columns = df.columns.str.replace("properties.", P) columns_to_modify = [ c for c in df.columns.values if c.startswith(SHK) or c.startswith(P) ] for col in columns_to_modify: df[col] = df[col].apply( lambda x: x.get("value", {}).get("labelValue", "")) else: raise Exception( "The file provided: {} is not .json or .csv format.".format( args.input)) # Check the schema for column in taxlot_fields: if column not in df.columns: df[column] = None keys = ["instrument_uid" ] + [c for c in df.columns.values if c.startswith(SHK)] # Fill down any blanks df[keys] = df[keys].fillna(method="ffill") # Group by the keys and build request for each group return api.call.set_holdings( args.scope, args.portfolio, lpt.to_date(args.date), holding_adjustments=[ api.models.AdjustHoldingRequest( instrument_identifiers=lpt.to_instrument_identifiers( i if len(keys) == 1 else i[0]), sub_holding_keys=lpt.perpetual_upsert(api.models, hld_df, SHK), properties=lpt.perpetual_upsert(api.models, hld_df), tax_lots=api.from_df(hld_df[taxlot_fields], api.models.TargetTaxLotRequest), ) for i, hld_df in df.groupby(keys) ], )
def process_args(api, args): df = lpt.read_csv(args.filename) date = lpt.to_date(args.date) scope = args.scope c1 = df.columns.values[0] # First column is the instrument if args.fx: scope = scope + "_FX" df[c1] = "CCY_" + df[c1] df["column3"] = None else: mi.map_instruments(api, df, c1) # fix the column names df.columns = ["instrument", "price", "ccy"] def upsert_analytics(result=None): analytics = [ api.models.InstrumentAnalytic(row["instrument"], row["price"], row["ccy"]) for i, row in df.iterrows() ] return api.call.set_analytics(scope, date.year, date.month, date.day, analytics).bind(lambda r: None) if args.update: return upsert_analytics() return api.call.create_analytic_store( api.models.CreateAnalyticStoreRequest(scope, date)).bind(upsert_analytics)
def process_args(api, args): def success(result): return result.content.to_dict() return api.call.get_portfolio_commands(scope=args.scope, code=args.portfolio, effective_at=lpt.to_date( args.date)).bind(success)
def run_query(api, args, date): request = api.models.AggregationRequest( recipe_id=api.models.ResourceId(args.pricing_scope or args.scope, args.recipe), effective_at=lpt.to_date(date), metrics=[ api.models.AggregateSpec(AGG_INSTR, "Value"), api.models.AggregateSpec(AGG_UID, "Value"), api.models.AggregateSpec(AGG_TYPE, "Value"), api.models.AggregateSpec(AGG_PV, "Value"), api.models.AggregateSpec(AGG_UNITS, "Value"), api.models.AggregateSpec(AGG_COST, "Value"), api.models.AggregateSpec(AGG_RATE, "Value"), api.models.AggregateSpec(AGG_PRC, "Value"), ], portfolio_identifier_code="GroupPortfolio" if args.group else "SinglePortfolio", ) # Called if get_aggregtion_by_portfolio() succeeds def success(result): df = pd.DataFrame.from_records(result.content.data)[[ AGG_TYPE, AGG_INSTR, AGG_UID, AGG_UNITS, AGG_COST, AGG_PRC, AGG_RATE, AGG_PV, ]] df[LVAL] = df[AGG_PV] / df[AGG_RATE] df.rename( columns={ AGG_INSTR: INSTR, AGG_UNITS: UNITS, AGG_COST: COST, AGG_PRC: PRICE, AGG_TYPE: TYPE, AGG_UID: UID, AGG_RATE: RATE, AGG_PV: PVAL, }, inplace=True, ) return ( result.stats, df[[UID, TYPE, INSTR, UNITS, COST, PRICE, LVAL, RATE, PVAL]], ) return api.call.get_aggregation(args.scope, args.portfolio, aggregation_request=request).bind(success)
def get_props(result: PortfolioProperties) -> Dict[str, Timestamp]: """ The responsibility of this function :param PortfolioProperties result: The result from the API call to get properties for the Portfolio :return: Dict[str, Timestamp]: The extended fields and their values retrieved from LUSID """ nonlocal shortlist return { shortlist[pk]: lpt.to_date(pv.value.label_value) for pk, pv in result.properties.items() if pk in shortlist }
def create(): if args.create: return api.call.create_portfolio( args.scope, transaction_portfolio=api.models.CreateTransactionPortfolioRequest( code=args.portfolio, display_name=args.create[0], base_currency=args.create[1], created=lpt.to_date(args.create[2]), accounting_method=map_method(args.accounting_method), sub_holding_keys=args.shk, ), ) else: return Either.Right(None)
def step2(result=None): next_step = success if result is not None: # Contains full set of instrument properties l = list( result.apply(lambda r: f"Instrument/{r['scope']}/{r['code']}", axis=1)) args.properties = l[:MAX_PROPS] # limitation if len(l) > MAX_PROPS: next_step = lambda r: step3(r, l[MAX_PROPS:], None) return api.call.get_instruments( args.type, request_body=args.instrument, property_keys=args.properties, effective_at=lpt.to_date(args.effective_at), ).bind(next_step)
def process_args(api, args): def success(constituents): print(constituents.content) exit() df = lpt.to_df(constituents, []) return lpt.trim_df(df, args.limit) # properties = ['Instrument/default/Name'] properties = [] properties.extend(args.properties or []) result = api.call.get_reference_portfolio_constituents( args.scope, args.portfolio, effective_at=lpt.to_date(args.date), property_keys=properties, ) return result.bind(success)
def set_holdings(portfolio, holdings): # Group by the keys and build request for each group return api.call.set_holdings( args.scope, portfolio, lpt.to_date(args.positions[1]), adjust_holding_request=[ api.models.AdjustHoldingRequest( instrument_identifiers=lpt. to_instrument_identifiers(i if len(keys) == 1 else i[0]), sub_holding_keys=lpt.perpetual_upsert( api.models, hld_df, "SHK:"), properties=lpt.perpetual_upsert( api.models, hld_df), tax_lots=api.from_df( hld_df[taxlot_fields], api.models.TargetTaxLotRequest), ) for i, hld_df in holdings.groupby(keys) ], )
def process_args(api, args): def success(result): def get_value(p): return ( p.key, p.value.metric_value.value if p.value.metric_value is not None else p.value.label_value, p.effective_from, ) df = pd.DataFrame.from_records( [get_value(p) for p in result.content.properties.values()], columns=["key", "value", "effective_from"], ) return lpt.trim_df(df, args.limit, sort="key") return api.call.get_portfolio_properties(scope=args.scope, code=args.portfolio, effective_at=lpt.to_date( args.date)).bind(success)
def batch_upsert_quotes(api, scope, df): quotes = { index: api.models.UpsertQuoteRequest( quote_id=api.models.QuoteId( api.models.QuoteSeriesId( provider=row["provider"], price_source=row["source"], instrument_id=row["instrument_uid"], instrument_id_type=row["instrument_uid_type"], quote_type=row["quote_type"], field=row["field"], ), effective_at=lpt.to_date(row["effective_at"]), ), metric_value=api.models.MetricValue(value=row["metric_value"], unit=row["metric_unit"]), lineage="InternalSystem", ) for index, row in df.iterrows() } return api.call.upsert_quotes(scope=scope, quotes=quotes)
def process_args(api, args): def success(result): columns = [ ("quote_id.effective_at", "Date"), ("quote_id.quote_series_id.instrument_id_type", "Instrument Type"), ("quote_id.quote_series_id.instrument_id", "Instrument"), ("metric_value.value", "Quote"), ("metric_value.unit", "Unit"), ("quote_id.quote_series_id.quote_type", "Type"), ("quote_id.quote_series_id.price_source", "Source"), ("quote_id.quote_series_id.provider", "Provider"), ("as_at", "AsAt"), ] df = lpt.to_df(list(result.content.values.values()), [c[0] for c in columns]) return lpt.trim_df(df.rename(columns=dict(columns)), args.limit) if args.from_file: column = args.from_file[1] df = pd.read_csv(args.from_file[0], dtype=str)[[column]].drop_duplicates() df = df[df[column].notnull()] args.instrument.extend(df[column].values) return api.call.get_quotes( scope=args.scope, quote_ids={ i: api.models.QuoteSeriesId( provider=args.provider, price_source=args.source, instrument_id=i, instrument_id_type=args.identifier, quote_type=args.quote_type, field=args.field, ) for i in args.instrument }, effective_at=lpt.to_date(args.date), ).bind(success)
def run_query(api, args, date): def success(result): shkeys = [ "SHK:" + shk for hld in result.content.values[:1] for shk in hld.sub_holding_keys.keys() ] properties = ["P:" + p for p in args.properties] df = lpt.to_df(result, [c[0] for c in columns] + shkeys + properties) df.columns = ([c[1] for c in columns] + [k.split("/")[2] for k in shkeys] + [p[8:] for p in args.properties]) return (result.stats, df) return api.call.get_holdings( scope=args.scope, code=args.portfolio, effective_at=lpt.to_date(date), by_taxlots=args.taxlots, property_keys=["Instrument/default/Name"] + args.properties, ).bind(success)
def run_query(api, args, date): metrics = [ api.models.AggregateSpec(AGG_INSTR, "Value"), api.models.AggregateSpec(AGG_UID, "Value"), api.models.AggregateSpec(AGG_TYPE, "Value"), api.models.AggregateSpec(AGG_PV, "Value"), api.models.AggregateSpec(AGG_UNITS, "Value"), api.models.AggregateSpec(AGG_COST, "Value"), api.models.AggregateSpec(AGG_RATE, "Value"), api.models.AggregateSpec(AGG_PRC, "Value"), ] metrics.extend([api.models.AggregateSpec(v, "Value") for v in args.properties]) request = api.models.ValuationRequest( recipe_id=api.models.ResourceId(args.pricing_scope or args.scope, args.recipe), valuation_schedule=api.models.ValuationSchedule(effective_at=lpt.to_date(date)), metrics=metrics, portfolio_entity_ids=[ api.models.PortfolioEntityId( scope=args.scope, code=args.portfolio, portfolio_entity_type="GroupPortfolio" if args.group else "SinglePortfolio", ) ], ) # Called if get_valuation_by_portfolio() succeeds def success(result): df = pd.DataFrame.from_records(result.content.data)[ [ AGG_TYPE, AGG_INSTR, AGG_UID, AGG_UNITS, AGG_COST, AGG_PRC, AGG_RATE, AGG_PV, ] + args.properties ] df[LVAL] = df[AGG_PV] / df[AGG_RATE] df.rename( columns={ AGG_INSTR: INSTR, AGG_UNITS: UNITS, AGG_COST: COST, AGG_PRC: PRICE, AGG_TYPE: TYPE, AGG_UID: UID, AGG_RATE: RATE, AGG_PV: PVAL, }, inplace=True, ) return ( result.stats, df[ [UID, TYPE, INSTR, UNITS, COST, PRICE, LVAL, RATE, PVAL] + args.properties ], ) return api.call.get_valuation(valuation_request=request).bind(success)
def run_query( api, args, scope_left=None, portfolio_left=None, date_left=None, scope_right=None, portfolio_right=None, date_right=None, instr_props=[], ): # Create request for left left_id = lusid.models.ResourceId(scope=scope_left, code=portfolio_left) left = api.models.PortfolioReconciliationRequest( portfolio_id=left_id, effective_at=lpt.to_date(date_left)) # create request for right right_id = lusid.models.ResourceId(scope=scope_right, code=portfolio_right) right = api.models.PortfolioReconciliationRequest( portfolio_id=right_id, effective_at=lpt.to_date(date_right)) # form reconciliation request request = api.models.PortfoliosReconciliationRequest( left=left, right=right, instrument_property_keys=instr_props + [AGG_INSTR], ) def success(result): if len(result.content.values) == 0: return "No reconciliation breaks" def parse_breaks(result): data = {} for item in result.content.values: row = { "LUID": item.instrument_uid, "Name": [ i.value.label_value if i.key == AGG_INSTR else None for i in item.instrument_properties ][0], "diff_cost": item.difference_cost.amount, "diff_cost_ccy": item.difference_cost.currency, "left_cost": item.left_cost.amount, "left_cost_ccy": item.left_cost.currency, "left_units": item.left_units, "right_cost": item.right_cost.amount, "right_cost_ccy": item.right_cost.currency, "right_units": item.right_units, } shks = { k: v.value.label_value for k, v in item.sub_holding_keys.items() } row.update(shks) data[item.instrument_uid] = row df = pd.DataFrame.from_dict(data).T df.fillna("N/A", inplace=True) return df return parse_breaks(result) return api.call.reconcile_holdings( portfolios_reconciliation_request=request).bind(success)
action="store_true").parse()) api = lse.connect(args) def metric(m): return api.models.PropertyValue(metric_value=models.MetricValue( value=float(m))) def label(l): return api.models.PropertyValue(label_value=str(l)) property_request = { args.property: api.models.ModelProperty( key=args.property, value=metric(args.value) if args.metric else label(args.value), effective_from=lpt.to_date(args.date) if args.date else None, ) } if args.test: print(property_request) else: api.call.upsert_portfolio_properties( scope=args.scope, code=args.portfolio, portfolio_properties=property_request).match( left=lpt.display_error, right=lambda r: print("Done!"))
def process_args(api, args): def success(txns): available_columns = [ ("C", "transaction_status", "Status"), ("B", "transaction_id", "TxnId"), ("B", "type", "Type"), ("B", "P:Instrument/default/Name", "Instrument"), ("B", "instrument_uid", "LUID"), ("B", "transaction_date", "TradeDate"), ("A", "settlement_date", "SettleDate"), ("B", "units", "Units"), ("A", "transaction_price.price", "Price"), ("A", "transaction_currency", "TradeCcy"), ("A", "total_consideration.currency", "SettleCcy"), ("A", "total_consideration.amount", "SettleAmt"), ("A", "exchange_rate", "ExchRate"), ("A", "P:Transaction/default/TradeToPortfolioRate", "PortRate"), ("C", "entry_date_time", "EntryDate"), ("C", "cancel_date_time", "CancelDate"), ] # Pick appropriate set of columns based on arguments col_subset = "B" if args.brief else "AB" if args.cancels: col_subset += "C" columns = [c for c in available_columns if c[0] in col_subset] columns.extend([("X", "P:" + v, v.replace("Instrument/default/", "")) for v in args.properties or []]) df = lpt.to_df(txns, [c[1] for c in columns]) # Rename the column headings df.columns = [c[2] for c in columns] return lpt.trim_df(df, args.limit) properties = ["Instrument/default/Name"] properties.extend(args.properties or []) if args.type != "input" or args.cancels == True: # Date range is required for build_transactions endpoint if args.start_date == None: args.start_date = "1900-01-01" if args.end_date == None: args.end_date = datetime.datetime.today() result = api.call.build_transactions( scope=args.scope, code=args.portfolio, transaction_query_parameters=api.models.TransactionQueryParameters( start_date=lpt.to_date(args.start_date), end_date=lpt.to_date(args.end_date), query_mode="TradeDate", show_cancelled_transactions=args.cancels, ), property_keys=properties, ) else: result = api.call.get_transactions( args.scope, args.portfolio, from_transaction_date=lpt.to_date(args.start_date), to_transaction_date=lpt.to_date(args.end_date), property_keys=properties, ) return result.bind(success)