def convert_asset_for_rates_data_set(from_asset: Asset, c_type: RatesConversionType) -> str: try: bbid = from_asset.get_identifier(AssetIdentifier.BLOOMBERG_ID) if bbid is None: return from_asset.get_marquee_id() if c_type is RatesConversionType.DEFAULT_BENCHMARK_RATE: to_asset = CURRENCY_TO_DEFAULT_RATE_BENCHMARK[bbid] elif c_type is RatesConversionType.INFLATION_BENCHMARK_RATE: to_asset = CURRENCY_TO_INFLATION_RATE_BENCHMARK[bbid] else: to_asset = CROSS_TO_CROSS_CURRENCY_BASIS[bbid] return GsAssetApi.map_identifiers(GsIdType.mdapi, GsIdType.id, [to_asset])[to_asset] except KeyError: logging.info(f'Unsupported currency or cross ${bbid}') raise from_asset.get_marquee_id()
def vol_term(asset: Asset, strike_reference: SkewReference, relative_strike: Real, pricing_date: Optional[GENERIC_DATE] = None, *, source: str = None, real_time: bool = False) -> pd.Series: """ Volatility term structure. Uses most recent date available if pricing_date is not provided. :param asset: asset object loaded from security master :param strike_reference: reference for strike level :param relative_strike: strike relative to reference :param pricing_date: YYYY-MM-DD or relative days before today e.g. 1d, 1m, 1y :param source: name of function caller :param real_time: whether to retrieve intraday data instead of EOD :return: volatility term structure """ if real_time: raise NotImplementedError( 'realtime forward term not implemented') # TODO if strike_reference != SkewReference.NORMALIZED: relative_strike /= 100 start, end = _range_from_pricing_date(asset.exchange, pricing_date) with DataContext(start, end): _logger.debug('where strikeReference=%s, relativeStrike=%s', strike_reference.value, relative_strike) where = FieldFilterMap(strikeReference=strike_reference.value, relativeStrike=relative_strike) q = GsDataApi.build_market_data_query([asset.get_marquee_id()], QueryType.IMPLIED_VOLATILITY, where=where, source=source, real_time=real_time) _logger.debug('q %s', q) df = _market_data_timed(q) if df.empty: return pd.Series() latest = df.index.max() _logger.info('selected pricing date %s', latest) df = df.loc[latest] cbd = _get_custom_bd(asset.exchange) df = df.assign(expirationDate=df.index + df['tenor'].map(_to_offset) + cbd - cbd) df = df.set_index('expirationDate') df.sort_index(inplace=True) df = df.loc[DataContext.current.start_date:DataContext.current.end_date] return df['impliedVolatility'] if not df.empty else pd.Series()
def average_implied_variance(asset: Asset, tenor: str, strike_reference: EdrDataReference, relative_strike: Real, *, source: str = None, real_time: bool = False) -> Series: """ Historic weighted average implied variance for the underlying assets of an equity index. :param asset: asset object loaded from security master :param tenor: relative date representation of expiration date e.g. 1m :param strike_reference: reference for strike level :param relative_strike: strike relative to reference :param source: name of function caller :param real_time: whether to retrieve intraday data instead of EOD :return: average implied variance curve """ if real_time: raise NotImplementedError( 'realtime average_implied_variance not implemented') if strike_reference == EdrDataReference.DELTA_PUT: relative_strike = abs(100 - relative_strike) relative_strike = relative_strike / 100 delta_types = (EdrDataReference.DELTA_CALL, EdrDataReference.DELTA_PUT) strike_ref = "delta" if strike_reference in delta_types else strike_reference.value _logger.debug('where tenor=%s, strikeReference=%s, relativeStrike=%s', tenor, strike_ref, relative_strike) mqid = asset.get_marquee_id() where = FieldFilterMap(tenor=tenor, strikeReference=strike_ref, relativeStrike=relative_strike) q = GsDataApi.build_market_data_query([mqid], QueryType.AVERAGE_IMPLIED_VARIANCE, where=where, source=source, real_time=real_time) _logger.debug('q %s', q) df = _market_data_timed(q) return Series() if df.empty else df['averageImpliedVariance']
def implied_correlation(asset: Asset, tenor: str, strike_reference: EdrDataReference, relative_strike: Real, *, source: str = None, real_time: bool = False) -> Series: """ Correlation of an asset implied by observations of market prices. :param asset: asset object loaded from security master :param tenor: relative date representation of expiration date e.g. 1m :param strike_reference: reference for strike level :param relative_strike: strike relative to reference :param source: name of function caller :param real_time: whether to retrieve intraday data instead of EOD :return: implied correlation curve """ if real_time: raise NotImplementedError( 'realtime implied_correlation not implemented') if strike_reference == EdrDataReference.DELTA_PUT: relative_strike = abs(100 - relative_strike) relative_strike = relative_strike / 100 delta_types = (EdrDataReference.DELTA_CALL, EdrDataReference.DELTA_PUT) strike_ref = "delta" if strike_reference in delta_types else strike_reference.value _logger.debug('where tenor=%s, strikeReference=%s, relativeStrike=%s', tenor, strike_ref, relative_strike) mqid = asset.get_marquee_id() where = FieldFilterMap(tenor=tenor, strikeReference=strike_ref, relativeStrike=relative_strike) q = GsDataApi.build_market_data_query([mqid], QueryType.IMPLIED_CORRELATION, where=where, source=source, real_time=real_time) _logger.debug('q %s', q) df = _market_data_timed(q) return Series() if df.empty else df['impliedCorrelation']
def vol_smile(asset: Asset, tenor: str, strike_reference: VolSmileReference, pricing_date: Optional[GENERIC_DATE] = None, *, source: str = None, real_time: bool = False) -> Series: """ Volatility smile of an asset implied by observations of market prices. :param asset: asset object loaded from security master :param tenor: relative date representation of expiration date e.g. 1m :param strike_reference: reference for strike level :param pricing_date: YYYY-MM-DD or relative days before today e.g. 1d, 1m, 1y :param source: name of function caller :param real_time: whether to retrieve intraday data instead of EOD :return: implied volatility smile """ if real_time: raise NotImplementedError('realtime vol_smile not implemented') mqid = asset.get_marquee_id() start, end = _range_from_pricing_date(asset.exchange, pricing_date) with DataContext(start, end): q = GsDataApi.build_market_data_query( [mqid], QueryType.IMPLIED_VOLATILITY, where=FieldFilterMap(tenor=tenor, strikeReference=strike_reference.value), source=source, real_time=real_time) _logger.debug('q %s', q) df = _market_data_timed(q) if df.empty: return Series latest = df.index.max() _logger.info('selected pricing date %s', latest) df = df.loc[latest] vols = df['impliedVolatility'].values strikes = df['relativeStrike'].values return Series(vols, index=strikes)
def implied_volatility(asset: Asset, tenor: str, strike_reference: VolReference, relative_strike: Real, *, source: str = None, real_time: bool = False) -> Series: """ Volatility of an asset implied by observations of market prices. :param asset: asset object loaded from security master :param tenor: relative date representation of expiration date e.g. 1m :param strike_reference: reference for strike level :param relative_strike: strike relative to reference :param source: name of function caller :param real_time: whether to retrieve intraday data instead of EOD :return: implied volatility curve """ if asset.asset_class == AssetClass.FX: # no ATM support yet if relative_strike == 50 and strike_reference in ( VolReference.DELTA_CALL, VolReference.DELTA_PUT): delta_strike = 'DN' else: if strike_reference == VolReference.DELTA_CALL: delta_strike = f'{relative_strike}DC' elif strike_reference == VolReference.DELTA_PUT: delta_strike = f'{relative_strike}DP' elif strike_reference == VolReference.FORWARD: if relative_strike == 100: delta_strike = 'ATMF' else: raise MqValueError( 'Relative strike must be 100 for Forward strike reference' ) elif strike_reference == VolReference.SPOT: if relative_strike == 100: delta_strike = 'ATMS' else: raise MqValueError( 'Relative strike must be 100 for Spot strike reference' ) else: raise MqValueError('strikeReference: ' + strike_reference.value + ' not supported for FX') loc_string = 'NYC' _logger.debug('where tenor=%s, deltaStrike=%s, location=%s', tenor, delta_strike, loc_string) where = FieldFilterMap(tenor=tenor, deltaStrike=delta_strike, location=loc_string) q = GsDataApi.build_market_data_query([asset.get_marquee_id()], QueryType.IMPLIED_VOLATILITY, where=where, source=source, real_time=real_time) _logger.debug('q %s', q) df = _market_data_timed(q) if df.empty: reversed_cross = _reverse_cross(asset.name) q = GsDataApi.build_market_data_query( [reversed_cross.get_marquee_id()], QueryType.IMPLIED_VOLATILITY, where=where, source=source, real_time=real_time) _logger.debug('q %s', q) df = _market_data_timed(q) else: if strike_reference == VolReference.DELTA_PUT: relative_strike = abs(100 - relative_strike) relative_strike = relative_strike if strike_reference == VolReference.NORMALIZED else relative_strike / 100 ref_string = "delta" if strike_reference in ( VolReference.DELTA_CALL, VolReference.DELTA_PUT) else strike_reference.value _logger.debug('where tenor=%s, strikeReference=%s, relativeStrike=%s', tenor, ref_string, relative_strike) where = FieldFilterMap(tenor=tenor, strikeReference=ref_string, relativeStrike=relative_strike) q = GsDataApi.build_market_data_query([asset.get_marquee_id()], QueryType.IMPLIED_VOLATILITY, where=where, source=source, real_time=real_time) _logger.debug('q %s', q) df = _market_data_timed(q) return Series() if df.empty else df['impliedVolatility']
def skew(asset: Asset, tenor: str, strike_reference: SkewReference, distance: Real, *, location: str = 'NYC', source: str = None, real_time: bool = False) -> Series: """ Difference in implied volatility of equidistant out-of-the-money put and call options. :param asset: asset object loaded from security master :param tenor: relative date representation of expiration date e.g. 1m :param strike_reference: reference for strike level (for equities) :param distance: distance from at-the-money option :param location: location at which a price fixing has been taken (for FX assets) :param source: name of function caller :param real_time: whether to retrieve intraday data instead of EOD :return: skew curve """ if real_time: raise MqValueError('real-time skew not supported') if strike_reference in (SkewReference.DELTA, None): b = 50 elif strike_reference == SkewReference.NORMALIZED: b = 0 else: b = 100 kwargs = {} if strike_reference in (SkewReference.DELTA, None): # using delta call strikes so X DP is represented as (100 - X) DC q_strikes = [100 - distance, distance, b] else: q_strikes = [b - distance, b + distance, b] if asset.asset_class == AssetClass.FX: q_strikes = _to_fx_strikes(q_strikes) kwargs['location'] = location column = 'deltaStrike' # should use SkewReference.DELTA for FX else: assert asset.asset_class == AssetClass.Equity if not strike_reference: raise MqTypeError('strike reference required for equities') if strike_reference != SkewReference.NORMALIZED: q_strikes = [x / 100 for x in q_strikes] kwargs['strikeReference'] = strike_reference.value column = 'relativeStrike' kwargs[column] = q_strikes _logger.debug('where tenor=%s and %s', tenor, kwargs) where = FieldFilterMap(tenor=tenor, **kwargs) q = GsDataApi.build_market_data_query([asset.get_marquee_id()], QueryType.IMPLIED_VOLATILITY, where=where, source=source) _logger.debug('q %s', q) df = _market_data_timed(q) if df.empty: return pd.Series() curves = {k: v for k, v in df.groupby(column)} if len(curves) < 3: raise MqValueError('skew not available for given inputs') series = [curves[qs]['impliedVolatility'] for qs in q_strikes] return (series[0] - series[1]) / series[2]
def bucketize_price(asset: Asset, price_method: str, price_component: str, bucket: str = '7x24', granularity: str = 'daily', *, source: str = None, real_time: bool = True) -> pd.Series: """' Bucketized Elec Historical Clears :param asset: asset object loaded from security master :param price_method: price method between LMP and MCP: Default value = LMP :param price_component: price type among totalPrice, energy, loss and congestion: Default value = totalPrice :param bucket: bucket type among '7x24', 'peak', 'offpeak', '2x16h' and '7x8': Default value = 7x24 :param granularity: daily or monthly: default value = daily :param source: name of function caller: default source = None :param real_time: whether to retrieve intraday data instead of EOD: default value = True :return: Bucketized Elec Historical Clears """ # create granularity indicator if granularity.lower() in ['daily', 'd']: granularity = 'D' elif granularity.lower() in ['monthly', 'm']: granularity = 'M' else: raise ValueError('Invalid granularity: ' + granularity + '. Expected Value: daily or monthly.') start_date, end_date = DataContext.current.start_date, DataContext.current.end_date where = FieldFilterMap(priceMethod=price_method, priceComponent=price_component) with DataContext(start_date, end_date + datetime.timedelta(days=2)): q = GsDataApi.build_market_data_query([asset.get_marquee_id()], QueryType.PRICE, where=where, source=source, real_time=True) df = _market_data_timed(q) _logger.debug('q %s', q) # TODO: get timezone info from Asset # default frequency definition df = df.tz_convert('US/Eastern') peak_start = 7 peak_end = 23 weekends = [5, 6] bbid = Asset.get_identifier(asset, AssetIdentifier.BLOOMBERG_ID) if bbid.split(" ")[0] in ['MISO', 'CAISO', 'ERCOT', 'SPP']: df = df.tz_convert('US/Central') peak_start = 6 peak_end = 22 if bbid.split(" ")[0] == 'CAISO': df = df.tz_convert('US/Pacific') weekends = [6] start_time, end_time = pd.to_datetime( start_date), pd.to_datetime(end_date) + datetime.timedelta(hours=23) df['month'] = df.index.month df['date'] = df.index.date df['day'] = df.index.dayofweek df['hour'] = df.index.hour holidays = NercCalendar().holidays(start=start_date, end=end_date).date # checking missing data points ref_hour_range = pd.date_range(start_time, end_time, freq='1h', tz='US/Eastern') missing_hours = ref_hour_range[~ref_hour_range.isin(df.index)] missing_dates = np.unique(missing_hours.date) missing_months = np.unique(missing_hours.month) # drop dates and months which have missing data df = df.loc[(~df['date'].isin(missing_dates))] if granularity == 'M': df = df.loc[(~df['month'].isin(missing_months))] # TODO: get frequency definition from SecDB if bucket.lower() == '7x24': pass # offpeak: 11pm-7am & weekend & holiday elif bucket.lower() == 'offpeak': df = df.loc[df['date'].isin(holidays) | df['day'].isin(weekends) | (~df['date'].isin(holidays) & ~df['day'].isin(weekends) & ((df['hour'] < peak_start) | (df['hour'] > peak_end - 1)))] # peak: 7am to 11pm on weekdays elif bucket.lower() == 'peak': df = df.loc[(~df['date'].isin(holidays)) & (~df['day'].isin(weekends)) & (df['hour'] > peak_start - 1) & (df['hour'] < peak_end)] # 7x8: 11pm to 7am elif bucket.lower() == '7x8': df = df.loc[(df['hour'] < peak_start) | (df['hour'] > peak_end - 1)] # 2x16h: weekends & holidays elif bucket.lower() == '2x16h': df = df.loc[((df['date'].isin(holidays)) | df['day'].isin(weekends)) & ((df['hour'] > peak_start - 1) & (df['hour'] < peak_end))] else: raise ValueError('Invalid bucket: ' + bucket + '. Expected Value: peak, offpeak, 7x24, 7x8, 2x16h.') df = df['price'].resample(granularity).mean() df.index = df.index.date df = df.loc[start_date:end_date] return df