示例#1
0
def test_auto_scroll_on_pages(mocker):
    response = {
        "requestId": "049de678-1480000",
        "totalPages": 5,
        "data": [
            {
                "date": "2012-01-25",
                "assetId": "MADXKSGX6921CFNF",
                "value": 1
            }
        ]
    }
    mocker.patch.object(ContextMeta, 'current', return_value=GsSession(Environment.QA))
    mocker.patch.object(ContextMeta.current, '_post', return_value=response)

    query = DataQuery(
        start_date=dt.date(2017, 1, 15),
        end_date=dt.date(2017, 1, 18),
        where=FieldFilterMap(
            currency="GBP"
        )
    )
    response = GsDataApi.get_results("test", response, query)
    assert len(response) == 5
示例#2
0
    def get_many_coordinates(
        cls,
        mkt_type: str = None,
        mkt_asset: str = None,
        mkt_class: str = None,
        mkt_point: Tuple[str, ...] = (),
        *,
        limit: int = 100,
        return_type: type = str,
    ) -> Union[Tuple[str, ...], Tuple[MarketDataCoordinate, ...]]:
        where = FieldFilterMap(
            mkt_type=mkt_type.upper() if mkt_type is not None else None,
            mkt_asset=mkt_asset.upper() if mkt_asset is not None else None,
            mkt_class=mkt_class.upper() if mkt_class is not None else None,
        )
        for index, point in enumerate(mkt_point):
            setattr(where, 'mkt_point' + str(index + 1), point.upper())

        query = EntityQuery(where=where, limit=limit)
        results = GsSession.current._post('/data/mdapi/query',
                                          query)['results']

        if return_type is str:
            return tuple(coordinate['name'] for coordinate in results)
        elif return_type is MarketDataCoordinate:
            return tuple(
                MarketDataCoordinate(
                    mkt_type=coordinate['dimensions']['mktType'],
                    mkt_asset=coordinate['dimensions']['mktAsset'],
                    mkt_class=coordinate['dimensions']['mktClass'],
                    mkt_point=tuple(coordinate['dimensions']
                                    ['mktPoint'].values()),
                    mkt_quoting_style=coordinate['dimensions']
                    ['mktQuotingStyle']) for coordinate in results)
        else:
            raise NotImplementedError('Unsupported return type')
示例#3
0
def cap_floor_vol(asset: Asset,
                  expiration_tenor: str,
                  relative_strike: float,
                  *,
                  source: str = None,
                  real_time: bool = False) -> Series:
    """
    GS end-of-day implied normal volatility for cap and floor vol matrices.

    :param asset: asset object loaded from security master
    :param expiration_tenor: relative date representation of expiration date on the option e.g. 3m
    :param relative_strike: strike level relative to at the money e.g. 10
    :param source: name of function caller
    :param real_time: whether to retrieve intraday data instead of EOD
    :return: cap and floor implied normal volatility curve
    """
    if real_time:
        raise NotImplementedError('realtime cap_floor_vol not implemented')

    rate_benchmark_mqid = convert_asset_for_rates_data_set(
        asset, RatesConversionType.DEFAULT_BENCHMARK_RATE)

    _logger.debug('where expiry=%s, strike=%s', expiration_tenor,
                  relative_strike)

    q = GsDataApi.build_market_data_query([rate_benchmark_mqid],
                                          QueryType.CAP_FLOOR_VOL,
                                          where=FieldFilterMap(
                                              expriy=expiration_tenor,
                                              strike=relative_strike),
                                          source=source,
                                          real_time=real_time)

    _logger.debug('q %s', q)
    df = _market_data_timed(q)
    return Series() if df.empty else df['capFloorVol']
示例#4
0
def basis_swap_term_structure(
    asset: Asset,
    spread_benchmark_type: BenchmarkType = None,
    spread_tenor: str = None,
    reference_benchmark_type: BenchmarkType = None,
    reference_tenor: str = None,
    forward_tenor: str = 'Spot',
    pricing_date: Optional[GENERIC_DATE] = None,
    *,
    source: str = None,
    real_time: bool = False,
) -> Series:
    """
    GS end-of-day Floating-Floating interest rate swap (IRS) term structure across major currencies.


    :param asset: asset object loaded from security master
    :param spread_benchmark_type: benchmark type of spread leg on which basis spread is added e.g. LIBOR
    :param spread_tenor: relative date representation of expiration date of spread leg e.g. 1m
    :param reference_benchmark_type: benchmark type of reference leg e.g. LIBOR
    :param reference_tenor: relative date representation of expiration date of reference leg e.g. 1m
    :param forward_tenor: relative date representation of forward starting point eg: '1y'  or 'Spot' for spot
           starting swaps
    :param pricing_date: YYYY-MM-DD or relative date
    :param source: name of function caller
    :param real_time: whether to retrieve intraday data instead of EOD
    :return: swap rate curve
    """
    if real_time:
        raise NotImplementedError('realtime basis_swap_rate not implemented')

    currency = CurrencyEnum(asset.get_identifier(AssetIdentifier.BLOOMBERG_ID))
    if currency.value not in ['JPY', 'EUR', 'USD', 'GBP']:
        raise NotImplementedError(
            'Data not available for {} basis swap rates'.format(
                currency.value))

    for benchmark_type in [spread_benchmark_type, reference_benchmark_type]:
        if benchmark_type is not None and \
                benchmark_type.value not in CURRENCY_TO_SWAP_RATE_BENCHMARK[currency.value].keys():
            raise MqValueError('%s is not supported for %s', benchmark_type,
                               currency.value)

    for floating_rate_tenor in [spread_tenor, reference_tenor]:
        if not re.fullmatch('(\\d+)([bdwmy])', floating_rate_tenor):
            raise MqValueError('invalid floating rate tenor ' +
                               floating_rate_tenor)

    if forward_tenor == '0b' or forward_tenor is None or forward_tenor == 'Spot':
        forward_tenor = '0d'
    elif not re.fullmatch('(\\d+)([bdwmy])', forward_tenor):
        raise MqValueError('invalid forward tenor ' + forward_tenor)

    # default benchmark types
    legs_w_defaults = dict()
    legs_w_defaults['spread'] = _get_swap_leg_defaults(currency,
                                                       spread_benchmark_type,
                                                       spread_tenor)
    legs_w_defaults['reference'] = _get_swap_leg_defaults(
        currency, reference_benchmark_type, reference_tenor)

    csaTerms = currency.value + '-1'
    clearing_house = 'LCH'

    kwargs = dict(
        type='BasisSwap',
        asset_parameters_payer_rate_option=legs_w_defaults['spread']
        ['benchmark_type'],
        asset_parameters_payer_designated_maturity=legs_w_defaults['spread']
        ['floating_rate_tenor'],
        asset_parameters_receiver_rate_option=legs_w_defaults['reference']
        ['benchmark_type'],
        asset_parameters_receiver_designated_maturity=legs_w_defaults[
            'reference']['floating_rate_tenor'],
        asset_parameters_clearing_house=clearing_house,
        asset_parameters_effective_date=forward_tenor,
        asset_parameters_notional_currency=currency.name,
        pricing_location=legs_w_defaults['spread']['pricing_location'])

    assets = GsAssetApi.get_many_assets(**kwargs)
    if len(assets) == 0:
        raise MqValueError(
            'Specified arguments did not match any asset in the dataset')
    else:
        rate_mqids = [asset.id for asset in assets]

    _logger.debug(
        'where spread_benchmark_type=%s, spread_tenor=%s,  reference_benchmark_type=%s, '
        'reference_tenor=%s, forward_tenor=%s, pricing_location=%s ',
        legs_w_defaults['spread']['benchmark_type'],
        legs_w_defaults['spread']['floating_rate_tenor'],
        legs_w_defaults['reference']['benchmark_type'],
        legs_w_defaults['reference']['floating_rate_tenor'], forward_tenor,
        legs_w_defaults['spread']['pricing_location'])

    start, end = _range_from_pricing_date(assets[0].exchange, pricing_date)
    with DataContext(start, end):
        where = FieldFilterMap(csaTerms=csaTerms)
        q = GsDataApi.build_market_data_query(rate_mqids,
                                              QueryType.BASIS_SWAP_RATE,
                                              where=where,
                                              source=source,
                                              real_time=real_time)
        _logger.debug('q %s', q)
        df = _market_data_timed(q)
    if df.empty:
        return pd.Series()
    latest = df.index.max()
    _logger.info('selected pricing date %s', latest)
    df = df.loc[latest]
    business_day = _get_custom_bd(asset.exchange)
    df = df.assign(expirationDate=df.index +
                   df['terminationTenor'].map(_to_offset) + business_day -
                   business_day)
    df = df.set_index('expirationDate')
    df.sort_index(inplace=True)
    df = df.loc[DataContext.current.start_date:DataContext.current.end_date]
    return df['basisSwapRate'] if not df.empty else pd.Series()
示例#5
0
def basis_swap_spread(
    asset: Asset,
    swap_tenor: str = '1y',
    spread_benchmark_type: BenchmarkType = None,
    spread_tenor: str = None,
    reference_benchmark_type: BenchmarkType = None,
    reference_tenor: str = None,
    forward_tenor: str = 'Spot',
    *,
    source: str = None,
    real_time: bool = False,
) -> Series:
    """
    GS end-of-day Floating-Floating interest rate swap (IRS) curves across major currencies.


    :param asset: asset object loaded from security master
    :param swap_tenor: relative date representation of expiration date e.g. 1m
    :param spread_benchmark_type: benchmark type of spread leg on which basis spread is added e.g. LIBOR
    :param spread_tenor: relative date representation of expiration date of paying leg e.g. 1m
    :param reference_benchmark_type: benchmark type of reference leg e.g. LIBOR
    :param reference_tenor: relative date representation of expiration date of reference leg e.g. 1m
    :param forward_tenor: relative date representation of forward starting point eg: '1y' or 'Spot' for Spot
            Starting swap
    :param source: name of function caller
    :param real_time: whether to retrieve intraday data instead of EOD
    :return: swap rate curve
    """
    if real_time:
        raise NotImplementedError('realtime basis_swap_rate not implemented')

    currency = CurrencyEnum(asset.get_identifier(AssetIdentifier.BLOOMBERG_ID))
    if currency.value not in ['JPY', 'EUR', 'USD', 'GBP']:
        raise NotImplementedError(
            'Data not available for {} basis swap rates'.format(
                currency.value))

    for benchmark_type in [spread_benchmark_type, reference_benchmark_type]:
        if benchmark_type is not None and \
                benchmark_type.value not in CURRENCY_TO_SWAP_RATE_BENCHMARK[currency.value].keys():
            raise MqValueError('%s is not supported for %s', benchmark_type,
                               currency.value)

    if not re.fullmatch('(\\d+)([bdwmy])', swap_tenor):
        raise MqValueError('invalid swap tenor ' + swap_tenor)

    for floating_rate_tenor in [spread_tenor, reference_tenor]:
        if not re.fullmatch('(\\d+)([bdwmy])', floating_rate_tenor):
            raise MqValueError('invalid floating rate tenor ' +
                               floating_rate_tenor)

    # default benchmark types
    legs_w_defaults = dict()
    legs_w_defaults['spread'] = _get_swap_leg_defaults(currency,
                                                       spread_benchmark_type,
                                                       spread_tenor)
    legs_w_defaults['reference'] = _get_swap_leg_defaults(
        currency, reference_benchmark_type, reference_tenor)

    if forward_tenor == '0b' or forward_tenor is None or forward_tenor == 'Spot':
        forward_tenor = '0d'
    elif not re.fullmatch('(\\d+)([bdwmy])', forward_tenor):
        raise MqValueError('invalid forward tenor ' + forward_tenor)

    csaTerms = currency.value + '-1'
    clearing_house = 'LCH'

    kwargs = dict(
        type='BasisSwap',
        asset_parameters_termination_date=swap_tenor,
        asset_parameters_payer_rate_option=legs_w_defaults['spread']
        ['benchmark_type'],
        asset_parameters_payer_designated_maturity=legs_w_defaults['spread']
        ['floating_rate_tenor'],
        asset_parameters_receiver_rate_option=legs_w_defaults['reference']
        ['benchmark_type'],
        asset_parameters_receiver_designated_maturity=legs_w_defaults[
            'reference']['floating_rate_tenor'],
        asset_parameters_clearing_house=clearing_house,
        asset_parameters_effective_date=forward_tenor,
        asset_parameters_notional_currency=currency.name,
        pricing_location=legs_w_defaults['spread']['pricing_location'])

    rate_mqid = _convert_asset_for_mdapi_swap_rates(**kwargs)

    _logger.debug(
        'where swap_tenor=%s, spread_benchmark_type=%s, spread_tenor=%s,  reference_benchmark_type=%s, '
        'reference_tenor=%s, forward_tenor=%s, pricing_location=%s ',
        swap_tenor, legs_w_defaults['spread']['benchmark_type'],
        legs_w_defaults['spread']['floating_rate_tenor'],
        legs_w_defaults['reference']['benchmark_type'],
        legs_w_defaults['reference']['floating_rate_tenor'], forward_tenor,
        legs_w_defaults['spread']['pricing_location'])

    where = FieldFilterMap(csaTerms=csaTerms)
    q = GsDataApi.build_market_data_query([rate_mqid],
                                          QueryType.BASIS_SWAP_RATE,
                                          where=where,
                                          source=source,
                                          real_time=real_time)
    _logger.debug('q %s', q)
    df = _market_data_timed(q)

    return Series() if df.empty else df['basisSwapRate']
示例#6
0
def swap_rate_2(asset: Asset,
                swap_tenor: str,
                benchmark_type: BenchmarkType = None,
                floating_rate_tenor: str = None,
                forward_tenor: str = 'Spot',
                *,
                source: str = None,
                real_time: bool = False) -> Series:
    """
    GS end-of-day Fixed-Floating interest rate swap (IRS) curves across major currencies.


    :param asset: asset object loaded from security master
    :param swap_tenor: relative date representation of expiration date e.g. 1m
    :param benchmark_type: benchmark type e.g. LIBOR
    :param floating_rate_tenor: floating index rate
    :param forward_tenor: relative date representation of forward starting point eg: '1y' or 'Spot' for
    spot starting swaps
    :param source: name of function caller
    :param real_time: whether to retrieve intraday data instead of EOD
    :return: swap rate curve
    """
    if real_time:
        raise NotImplementedError('realtime swap_rate not implemented')
    currency = CurrencyEnum(asset.get_identifier(AssetIdentifier.BLOOMBERG_ID))

    if currency.value not in ['JPY', 'EUR', 'USD', 'GBP', 'CHF', 'SEK']:
        raise NotImplementedError(
            'Data not available for {} swap rates'.format(currency.value))

    if benchmark_type is not None and \
            benchmark_type.value not in CURRENCY_TO_SWAP_RATE_BENCHMARK[currency.value].keys():
        raise MqValueError('%s is not supported for %s', benchmark_type,
                           currency.value)

    defaults = _get_swap_leg_defaults(currency, benchmark_type,
                                      floating_rate_tenor)

    if not re.fullmatch('(\\d+)([bdwmy])', swap_tenor):
        raise MqValueError('invalid swap tenor ' + swap_tenor)

    if not re.fullmatch('(\\d+)([bdwmy])', floating_rate_tenor):
        raise MqValueError('invalid floating rate tenor ' +
                           floating_rate_tenor)

    if forward_tenor is None or forward_tenor == 'Spot':
        forward_tenor = '0b'
    elif not re.fullmatch('(\\d+)([bdwmy])', forward_tenor):
        raise MqValueError('invalid forward tenor ' + forward_tenor)

    clearing_house = 'LCH'
    csaTerms = currency.value + '-1'
    fixed_rate = 'ATM'
    pay_or_receive = 'Receive'
    kwargs = dict(
        type='Swap',
        asset_parameters_termination_date=swap_tenor,
        asset_parameters_floating_rate_option=defaults['benchmark_type'],
        asset_parameters_fixed_rate=fixed_rate,
        asset_parameters_clearing_house=clearing_house,
        asset_parameters_floating_rate_designated_maturity=defaults[
            'floating_rate_tenor'],
        asset_parameters_effective_date=forward_tenor,
        asset_parameters_pay_or_receive=pay_or_receive,
        asset_parameters_notional_currency=currency.name,
        pricing_location=defaults['pricing_location'])

    rate_mqid = _convert_asset_for_mdapi_swap_rates(**kwargs)

    _logger.debug(
        'where swap_tenor=%s, benchmark_type=%s, floating_rate_tenor=%s, forward_tenor=%s, '
        'pricing_location=%s', swap_tenor, defaults['benchmark_type'],
        defaults['floating_rate_tenor'], forward_tenor,
        defaults['pricing_location'])
    where = FieldFilterMap(csaTerms=csaTerms)
    q = GsDataApi.build_market_data_query([rate_mqid],
                                          QueryType.SWAP_RATE,
                                          where=where,
                                          source=source,
                                          real_time=real_time)
    _logger.debug('q %s', q)
    df = _market_data_timed(q)

    return Series() if df.empty else df['swapRate']
示例#7
0
def swap_rate(asset: Asset,
              tenor: str,
              benchmark_type: BenchmarkType = None,
              floating_index: str = None,
              *,
              source: str = None,
              real_time: bool = False) -> Series:
    """
    GS end-of-day Fixed-Floating interest rate swap (IRS) curves across major currencies.

    :param asset: asset object loaded from security master
    :param tenor: relative date representation of expiration date e.g. 1m
    :param benchmark_type: benchmark type e.g. LIBOR
    :param floating_index: floating index rate
    :param source: name of function caller
    :param real_time: whether to retrieve intraday data instead of EOD
    :return: swap rate curve
    """
    if real_time:
        raise NotImplementedError('realtime swap_rate not implemented')

    currency = asset.get_identifier(AssetIdentifier.BLOOMBERG_ID)
    currency = Currency(currency)

    # default benchmark types
    if benchmark_type is None:
        if currency == Currency.EUR:
            benchmark_type = BenchmarkType.EURIBOR
        elif currency == Currency.SEK:
            benchmark_type = BenchmarkType.STIBOR
        else:
            benchmark_type = BenchmarkType.LIBOR

    over_nights = [BenchmarkType.OIS]

    # default floating index
    if floating_index is None:
        if benchmark_type in over_nights:
            floating_index = '1d'
        else:
            if currency in [Currency.USD]:
                floating_index = '3m'
            elif currency in [
                    Currency.GBP, Currency.EUR, Currency.CHF, Currency.SEK
            ]:
                floating_index = '6m'

    mdapi_divider = " " if benchmark_type in over_nights else "-"
    mdapi_floating_index = BenchmarkType.OIS.value if benchmark_type is BenchmarkType.OIS else floating_index
    mdapi = currency.value + mdapi_divider + mdapi_floating_index

    rate_mqid = GsAssetApi.map_identifiers(GsIdType.mdapi, GsIdType.id,
                                           [mdapi])[mdapi]

    _logger.debug('where tenor=%s, floatingIndex=%s', tenor, floating_index)

    q = GsDataApi.build_market_data_query([rate_mqid],
                                          QueryType.SWAP_RATE,
                                          where=FieldFilterMap(tenor=tenor),
                                          source=source,
                                          real_time=real_time)

    _logger.debug('q %s', q)
    df = _market_data_timed(q)
    return Series() if df.empty else df['swapRate']
示例#8
0
def implied_volatility(asset: Asset,
                       tenor: str,
                       strike_reference: VolReference,
                       relative_strike: Real,
                       *,
                       source: str = None,
                       real_time: bool = False) -> Series:
    """
    Volatility of an asset implied by observations of market prices.

    :param asset: asset object loaded from security master
    :param tenor: relative date representation of expiration date e.g. 1m
    :param strike_reference: reference for strike level
    :param relative_strike: strike relative to reference
    :param source: name of function caller
    :param real_time: whether to retrieve intraday data instead of EOD
    :return: implied volatility curve
    """
    if asset.asset_class == AssetClass.FX:
        # no ATM support yet
        if relative_strike == 50 and strike_reference in (
                VolReference.DELTA_CALL, VolReference.DELTA_PUT):
            delta_strike = 'DN'
        else:
            if strike_reference == VolReference.DELTA_CALL:
                delta_strike = f'{relative_strike}DC'
            elif strike_reference == VolReference.DELTA_PUT:
                delta_strike = f'{relative_strike}DP'
            elif strike_reference == VolReference.FORWARD:
                if relative_strike == 100:
                    delta_strike = 'ATMF'
                else:
                    raise MqValueError(
                        'Relative strike must be 100 for Forward strike reference'
                    )
            elif strike_reference == VolReference.SPOT:
                if relative_strike == 100:
                    delta_strike = 'ATMS'
                else:
                    raise MqValueError(
                        'Relative strike must be 100 for Spot strike reference'
                    )
            else:
                raise MqValueError('strikeReference: ' +
                                   strike_reference.value +
                                   ' not supported for FX')
        loc_string = 'NYC'
        _logger.debug('where tenor=%s, deltaStrike=%s, location=%s', tenor,
                      delta_strike, loc_string)
        where = FieldFilterMap(tenor=tenor,
                               deltaStrike=delta_strike,
                               location=loc_string)
        q = GsDataApi.build_market_data_query([asset.get_marquee_id()],
                                              QueryType.IMPLIED_VOLATILITY,
                                              where=where,
                                              source=source,
                                              real_time=real_time)
        _logger.debug('q %s', q)
        df = _market_data_timed(q)
        if df.empty:
            reversed_cross = _reverse_cross(asset.name)
            q = GsDataApi.build_market_data_query(
                [reversed_cross.get_marquee_id()],
                QueryType.IMPLIED_VOLATILITY,
                where=where,
                source=source,
                real_time=real_time)
            _logger.debug('q %s', q)
            df = _market_data_timed(q)
    else:
        if strike_reference == VolReference.DELTA_PUT:
            relative_strike = abs(100 - relative_strike)
        relative_strike = relative_strike if strike_reference == VolReference.NORMALIZED else relative_strike / 100
        ref_string = "delta" if strike_reference in (
            VolReference.DELTA_CALL,
            VolReference.DELTA_PUT) else strike_reference.value

        _logger.debug('where tenor=%s, strikeReference=%s, relativeStrike=%s',
                      tenor, ref_string, relative_strike)
        where = FieldFilterMap(tenor=tenor,
                               strikeReference=ref_string,
                               relativeStrike=relative_strike)
        q = GsDataApi.build_market_data_query([asset.get_marquee_id()],
                                              QueryType.IMPLIED_VOLATILITY,
                                              where=where,
                                              source=source,
                                              real_time=real_time)
        _logger.debug('q %s', q)
        df = _market_data_timed(q)
    return Series() if df.empty else df['impliedVolatility']
示例#9
0
def skew(asset: Asset,
         tenor: str,
         strike_reference: SkewReference,
         distance: Real,
         *,
         location: str = 'NYC',
         source: str = None,
         real_time: bool = False) -> Series:
    """
    Difference in implied volatility of equidistant out-of-the-money put and call options.

    :param asset: asset object loaded from security master
    :param tenor: relative date representation of expiration date e.g. 1m
    :param strike_reference: reference for strike level (for equities)
    :param distance: distance from at-the-money option
    :param location: location at which a price fixing has been taken (for FX assets)
    :param source: name of function caller
    :param real_time: whether to retrieve intraday data instead of EOD
    :return: skew curve
    """
    if real_time:
        raise MqValueError('real-time skew not supported')

    if strike_reference in (SkewReference.DELTA, None):
        b = 50
    elif strike_reference == SkewReference.NORMALIZED:
        b = 0
    else:
        b = 100

    kwargs = {}
    if strike_reference in (SkewReference.DELTA, None):
        # using delta call strikes so X DP is represented as (100 - X) DC
        q_strikes = [100 - distance, distance, b]
    else:
        q_strikes = [b - distance, b + distance, b]

    if asset.asset_class == AssetClass.FX:
        q_strikes = _to_fx_strikes(q_strikes)
        kwargs['location'] = location
        column = 'deltaStrike'  # should use SkewReference.DELTA for FX
    else:
        assert asset.asset_class == AssetClass.Equity
        if not strike_reference:
            raise MqTypeError('strike reference required for equities')
        if strike_reference != SkewReference.NORMALIZED:
            q_strikes = [x / 100 for x in q_strikes]
        kwargs['strikeReference'] = strike_reference.value
        column = 'relativeStrike'

    kwargs[column] = q_strikes
    _logger.debug('where tenor=%s and %s', tenor, kwargs)
    where = FieldFilterMap(tenor=tenor, **kwargs)
    q = GsDataApi.build_market_data_query([asset.get_marquee_id()],
                                          QueryType.IMPLIED_VOLATILITY,
                                          where=where,
                                          source=source)
    _logger.debug('q %s', q)
    df = _market_data_timed(q)

    if df.empty:
        return pd.Series()

    curves = {k: v for k, v in df.groupby(column)}
    if len(curves) < 3:
        raise MqValueError('skew not available for given inputs')
    series = [curves[qs]['impliedVolatility'] for qs in q_strikes]
    return (series[0] - series[1]) / series[2]
示例#10
0
def bucketize_price(asset: Asset,
                    price_method: str,
                    price_component: str,
                    bucket: str = '7x24',
                    granularity: str = 'daily',
                    *,
                    source: str = None,
                    real_time: bool = True) -> pd.Series:
    """'
    Bucketized Elec Historical Clears

    :param asset: asset object loaded from security master
    :param price_method: price method between LMP and MCP: Default value = LMP
    :param price_component: price type among totalPrice, energy, loss and congestion: Default value = totalPrice
    :param bucket: bucket type among '7x24', 'peak', 'offpeak', '2x16h' and '7x8': Default value = 7x24
    :param granularity: daily or monthly: default value = daily
    :param source: name of function caller: default source = None
    :param real_time: whether to retrieve intraday data instead of EOD: default value = True
    :return: Bucketized Elec Historical Clears
    """

    # create granularity indicator
    if granularity.lower() in ['daily', 'd']:
        granularity = 'D'
    elif granularity.lower() in ['monthly', 'm']:
        granularity = 'M'
    else:
        raise ValueError('Invalid granularity: ' + granularity +
                         '. Expected Value: daily or monthly.')

    start_date, end_date = DataContext.current.start_date, DataContext.current.end_date
    where = FieldFilterMap(priceMethod=price_method,
                           priceComponent=price_component)

    with DataContext(start_date, end_date + datetime.timedelta(days=2)):
        q = GsDataApi.build_market_data_query([asset.get_marquee_id()],
                                              QueryType.PRICE,
                                              where=where,
                                              source=source,
                                              real_time=True)
        df = _market_data_timed(q)
        _logger.debug('q %s', q)

    # TODO: get timezone info from Asset
    # default frequency definition
    df = df.tz_convert('US/Eastern')
    peak_start = 7
    peak_end = 23
    weekends = [5, 6]
    bbid = Asset.get_identifier(asset, AssetIdentifier.BLOOMBERG_ID)
    if bbid.split(" ")[0] in ['MISO', 'CAISO', 'ERCOT', 'SPP']:
        df = df.tz_convert('US/Central')
        peak_start = 6
        peak_end = 22
    if bbid.split(" ")[0] == 'CAISO':
        df = df.tz_convert('US/Pacific')
        weekends = [6]

    start_time, end_time = pd.to_datetime(
        start_date), pd.to_datetime(end_date) + datetime.timedelta(hours=23)
    df['month'] = df.index.month
    df['date'] = df.index.date
    df['day'] = df.index.dayofweek
    df['hour'] = df.index.hour
    holidays = NercCalendar().holidays(start=start_date, end=end_date).date

    # checking missing data points
    ref_hour_range = pd.date_range(start_time,
                                   end_time,
                                   freq='1h',
                                   tz='US/Eastern')
    missing_hours = ref_hour_range[~ref_hour_range.isin(df.index)]
    missing_dates = np.unique(missing_hours.date)
    missing_months = np.unique(missing_hours.month)

    # drop dates and months which have missing data
    df = df.loc[(~df['date'].isin(missing_dates))]
    if granularity == 'M':
        df = df.loc[(~df['month'].isin(missing_months))]

    # TODO: get frequency definition from SecDB
    if bucket.lower() == '7x24':
        pass
    # offpeak: 11pm-7am & weekend & holiday
    elif bucket.lower() == 'offpeak':
        df = df.loc[df['date'].isin(holidays) | df['day'].isin(weekends) |
                    (~df['date'].isin(holidays) & ~df['day'].isin(weekends) &
                     ((df['hour'] < peak_start)
                      | (df['hour'] > peak_end - 1)))]
    # peak: 7am to 11pm on weekdays
    elif bucket.lower() == 'peak':
        df = df.loc[(~df['date'].isin(holidays)) & (~df['day'].isin(weekends))
                    & (df['hour'] > peak_start - 1)
                    & (df['hour'] < peak_end)]
    # 7x8: 11pm to 7am
    elif bucket.lower() == '7x8':
        df = df.loc[(df['hour'] < peak_start) | (df['hour'] > peak_end - 1)]
    # 2x16h: weekends & holidays
    elif bucket.lower() == '2x16h':
        df = df.loc[((df['date'].isin(holidays)) | df['day'].isin(weekends))
                    & ((df['hour'] > peak_start - 1)
                       & (df['hour'] < peak_end))]
    else:
        raise ValueError('Invalid bucket: ' + bucket +
                         '. Expected Value: peak, offpeak, 7x24, 7x8, 2x16h.')

    df = df['price'].resample(granularity).mean()
    df.index = df.index.date
    df = df.loc[start_date:end_date]
    return df
示例#11
0
def swap_term_structure(asset: Asset,
                        benchmark_type: BenchmarkType = None,
                        floating_rate_tenor: str = None,
                        forward_tenor: Optional[GENERIC_DATE] = None,
                        pricing_date: Optional[GENERIC_DATE] = None,
                        *,
                        source: str = None,
                        real_time: bool = False) -> Series:
    """
    GS end-of-day Fixed-Floating interest rate swap (IRS) term structure across major currencies.

    :param asset: asset object loaded from security master
    :param benchmark_type: benchmark type e.g. LIBOR
    :param floating_rate_tenor: floating index rate
    :param forward_tenor: absolute / relative date representation of forward starting point eg: '1y' or 'Spot' for
    spot starting swaps, 'imm1' or 'frb1'
    :param pricing_date: YYYY-MM-DD or relative date
    :param source: name of function caller
    :param real_time: whether to retrieve intraday data instead of EOD
    :return: swap rate term structure
    """
    if real_time:
        raise NotImplementedError('realtime swap_rate not implemented')

    currency = asset.get_identifier(AssetIdentifier.BLOOMBERG_ID)
    currency = CurrencyEnum(currency)
    if currency.value not in ['JPY', 'EUR', 'USD', 'GBP', 'CHF', 'SEK']:
        raise NotImplementedError(
            'Data not available for {} swap rates'.format(currency.value))
    clearing_house = 'LCH'

    _check_benchmark_type(currency, benchmark_type)

    forward_tenor = check_forward_tenor(forward_tenor)

    defaults = _get_swap_leg_defaults(currency, benchmark_type,
                                      floating_rate_tenor)

    if not re.fullmatch('(\\d+)([bdwmy])', defaults['floating_rate_tenor']):
        raise MqValueError('invalid floating rate tenor ' +
                           defaults['floating_rate_tenor'] + ' for index: ' +
                           defaults['benchmark_type'])

    calendar = defaults['pricing_location'].value
    if pricing_date is not None and pricing_date in list(
            GsCalendar.get(calendar).holidays):
        raise MqValueError(
            'Specified pricing date is a holiday in {} calendar'.format(
                calendar))

    csaTerms = currency.value + '-1'
    fixed_rate = 'ATM'
    kwargs = dict(
        type='Swap',
        asset_parameters_floating_rate_option=defaults['benchmark_type'],
        asset_parameters_fixed_rate=fixed_rate,
        asset_parameters_clearing_house=clearing_house,
        asset_parameters_floating_rate_designated_maturity=defaults[
            'floating_rate_tenor'],
        asset_parameters_effective_date=forward_tenor,
        asset_parameters_notional_currency=currency.name,
        pricing_location=defaults['pricing_location'].value)

    assets = GsAssetApi.get_many_assets(**kwargs)
    if len(assets) == 0:
        raise MqValueError(
            'Specified arguments did not match any asset in the dataset')
    else:
        rate_mqids = [asset.id for asset in assets]

    asset_string = ''
    for mqid in rate_mqids:
        asset_string = asset_string + ',' + mqid
    _logger.debug('assets returned %s', asset_string)

    _logger.debug(
        'where benchmark_type=%s, floating_rate_tenor=%s, forward_tenor=%s, '
        'pricing_location=%s', defaults['benchmark_type'],
        defaults['floating_rate_tenor'], forward_tenor,
        defaults['pricing_location'].value)

    start, end = _range_from_pricing_date(calendar, pricing_date)
    with DataContext(start, end):
        where = FieldFilterMap(csaTerms=csaTerms)
        q = GsDataApi.build_market_data_query(rate_mqids,
                                              QueryType.SWAP_RATE,
                                              where=where,
                                              source=source,
                                              real_time=real_time)
        _logger.debug('q %s', q)
        df = _market_data_timed(q)

    if df.empty:
        return pd.Series()
    latest = df.index.max()
    _logger.info('selected pricing date %s', latest)
    df = df.loc[latest]
    business_day = _get_custom_bd(calendar)
    df = df.assign(expirationDate=df.index +
                   df['terminationTenor'].map(_to_offset) + business_day -
                   business_day)
    df = df.set_index('expirationDate')
    df.sort_index(inplace=True)
    df = df.loc[DataContext.current.start_date:DataContext.current.end_date]
    return df['swapRate'] if not df.empty else pd.Series()
示例#12
0
def basis_swap_term_structure(asset: Asset, spread_benchmark_type: BenchmarkType = None, spread_tenor: str = None,
                              reference_benchmark_type: BenchmarkType = None, reference_tenor: str = None,
                              forward_tenor: Optional[GENERIC_DATE] = None,
                              clearing_house: _ClearingHouse = None,
                              pricing_date: Optional[GENERIC_DATE] = None,
                              *, source: str = None, real_time: bool = False, ) -> Series:
    """
    GS end-of-day Floating-Floating interest rate swap (IRS) term structure across major currencies.


    :param asset: asset object loaded from security master
    :param spread_benchmark_type: benchmark type of spread leg on which basis spread is added e.g. LIBOR
    :param spread_tenor: relative date representation of expiration date of spread leg e.g. 1m
    :param reference_benchmark_type: benchmark type of reference leg e.g. LIBOR
    :param reference_tenor: relative date representation of expiration date of reference leg e.g. 1m
    :param forward_tenor: absolute / relative date representation of forward starting point eg: '1y' or 'Spot' for
    spot starting swaps, 'imm1' or 'frb1'
    :param clearing_house: Example - "LCH", "EUREX", "JSCC", "CME"
    :param pricing_date: YYYY-MM-DD or relative date
    :param source: name of function caller
    :param real_time: whether to retrieve intraday data instead of EOD
    :return: swap rate curve
    """
    if real_time:
        raise NotImplementedError('realtime basis_swap_rate not implemented')

    currency = CurrencyEnum(asset.get_identifier(AssetIdentifier.BLOOMBERG_ID))
    if currency.value not in ['JPY', 'EUR', 'USD', 'GBP']:
        raise NotImplementedError('Data not available for {} basis swap rates'.format(currency.value))

    clearing_house = _check_clearing_house(clearing_house)

    for benchmark_type in [spread_benchmark_type, reference_benchmark_type]:
        _check_benchmark_type(currency, benchmark_type)

    # default benchmark types
    legs_w_defaults = dict()
    legs_w_defaults['spread'] = _get_swap_leg_defaults(currency, spread_benchmark_type, spread_tenor)
    legs_w_defaults['reference'] = _get_swap_leg_defaults(currency, reference_benchmark_type, reference_tenor)

    for key, leg in legs_w_defaults.items():
        if not re.fullmatch('(\\d+)([bdwmy])', leg['floating_rate_tenor']):
            raise MqValueError('invalid floating rate tenor ' + leg['floating_rate_tenor'] + ' index: ' +
                               leg['benchmark_type'])

    forward_tenor = check_forward_tenor(forward_tenor)

    calendar = legs_w_defaults['spread']['pricing_location'].value
    if pricing_date is not None and pricing_date in list(GsCalendar.get(calendar).holidays):
        raise MqValueError('Specified pricing date is a holiday in {} calendar'.format(calendar))

    csaTerms = currency.value + '-1'

    kwargs = dict(type='BasisSwap', asset_parameters_payer_rate_option=legs_w_defaults['spread']['benchmark_type'],
                  asset_parameters_payer_designated_maturity=legs_w_defaults['spread']['floating_rate_tenor'],
                  asset_parameters_receiver_rate_option=legs_w_defaults['reference']['benchmark_type'],
                  asset_parameters_receiver_designated_maturity=legs_w_defaults['reference']['floating_rate_tenor'],
                  asset_parameters_clearing_house=clearing_house.value, asset_parameters_effective_date=forward_tenor,
                  asset_parameters_notional_currency=currency.name,
                  pricing_location=legs_w_defaults['spread']['pricing_location'].value)

    assets = GsAssetApi.get_many_assets(**kwargs)
    if len(assets) == 0:
        raise MqValueError('Specified arguments did not match any asset in the dataset')
    else:
        rate_mqids = [asset.id for asset in assets]

    asset_string = ''
    for mqid in rate_mqids:
        asset_string = asset_string + ',' + mqid
    _logger.debug('assets returned %s', asset_string)

    _logger.debug('where spread_benchmark_type=%s, spread_tenor=%s,  reference_benchmark_type=%s, '
                  'reference_tenor=%s, forward_tenor=%s, pricing_location=%s ',
                  legs_w_defaults['spread']['benchmark_type'], legs_w_defaults['spread']['floating_rate_tenor'],
                  legs_w_defaults['reference']['benchmark_type'], legs_w_defaults['reference']['floating_rate_tenor'],
                  forward_tenor, legs_w_defaults['spread']['pricing_location'].value)

    start, end = _range_from_pricing_date(calendar, pricing_date)
    with DataContext(start, end):
        where = FieldFilterMap(csaTerms=csaTerms)
        q = GsDataApi.build_market_data_query(rate_mqids, QueryType.BASIS_SWAP_RATE, where=where,
                                              source=source, real_time=real_time)
        _logger.debug('q %s', q)
        df = _market_data_timed(q)

    if df.empty:
        series = ExtendedSeries()
    else:
        latest = df.index.max()
        _logger.info('selected pricing date %s', latest)
        df = df.loc[latest]
        business_day = _get_custom_bd(calendar)
        df = df.assign(expirationDate=df.index + df['terminationTenor'].map(_to_offset) + business_day - business_day)
        df = df.set_index('expirationDate')
        df.sort_index(inplace=True)
        df = df.loc[DataContext.current.start_date: DataContext.current.end_date]
        series = ExtendedSeries() if df.empty else ExtendedSeries(df['basisSwapRate'])
    series.dataset_ids = getattr(df, 'dataset_ids', ())
    return series
示例#13
0
def basis_swap_spread(asset: Asset, swap_tenor: str = '1y',
                      spread_benchmark_type: BenchmarkType = None, spread_tenor: str = None,
                      reference_benchmark_type: BenchmarkType = None, reference_tenor: str = None,
                      forward_tenor: Optional[GENERIC_DATE] = None, clearing_house: _ClearingHouse = None, *,
                      source: str = None, real_time: bool = False, ) -> Series:
    """
    GS end-of-day Floating-Floating interest rate swap (IRS) curves across major currencies.


    :param asset: asset object loaded from security master
    :param swap_tenor: relative date representation of expiration date e.g. 1m
    :param spread_benchmark_type: benchmark type of spread leg on which basis spread is added e.g. LIBOR
    :param spread_tenor: relative date representation of expiration date of paying leg e.g. 1m
    :param reference_benchmark_type: benchmark type of reference leg e.g. LIBOR
    :param reference_tenor: relative date representation of expiration date of reference leg e.g. 1m
    :param forward_tenor: absolute / relative date representation of forward starting point eg: '1y' or 'Spot' for
    spot starting swaps, 'imm1' or 'frb1'
    :param clearing_house: Example - "LCH", "EUREX", "JSCC", "CME"
    :param source: name of function caller
    :param real_time: whether to retrieve intraday data instead of EOD
    :return: swap rate curve
    """
    if real_time:
        raise NotImplementedError('realtime basis_swap_rate not implemented')

    currency = CurrencyEnum(asset.get_identifier(AssetIdentifier.BLOOMBERG_ID))
    if currency.value not in ['JPY', 'EUR', 'USD', 'GBP']:
        raise NotImplementedError('Data not available for {} basis swap rates'.format(currency.value))

    clearing_house = _check_clearing_house(clearing_house)

    for benchmark_type in [spread_benchmark_type, reference_benchmark_type]:
        _check_benchmark_type(currency, benchmark_type)

    if not (re.fullmatch('(\\d+)([bdwmy])', swap_tenor) or re.fullmatch('(frb[1-9])', forward_tenor)):
        raise MqValueError('invalid swap tenor ' + swap_tenor)

    # default benchmark types
    legs_w_defaults = dict()
    legs_w_defaults['spread'] = _get_swap_leg_defaults(currency, spread_benchmark_type, spread_tenor)
    legs_w_defaults['reference'] = _get_swap_leg_defaults(currency, reference_benchmark_type, reference_tenor)

    for key, leg in legs_w_defaults.items():
        if not re.fullmatch('(\\d+)([bdwmy])', leg['floating_rate_tenor']):
            raise MqValueError('invalid floating rate tenor ' + leg['floating_rate_tenor'] + ' index: ' +
                               leg['benchmark_type'])

    forward_tenor = check_forward_tenor(forward_tenor)
    csaTerms = currency.value + '-1'
    kwargs = dict(type='BasisSwap', asset_parameters_termination_date=swap_tenor,
                  asset_parameters_payer_rate_option=legs_w_defaults['spread']['benchmark_type'],
                  asset_parameters_payer_designated_maturity=legs_w_defaults['spread']['floating_rate_tenor'],
                  asset_parameters_receiver_rate_option=legs_w_defaults['reference']['benchmark_type'],
                  asset_parameters_receiver_designated_maturity=legs_w_defaults['reference']['floating_rate_tenor'],
                  asset_parameters_clearing_house=clearing_house.value, asset_parameters_effective_date=forward_tenor,
                  asset_parameters_notional_currency=currency.name,
                  pricing_location=legs_w_defaults['spread']['pricing_location'].value)

    rate_mqid = _convert_asset_for_mdapi_swap_rates(**kwargs)

    _logger.debug('where asset=%s, swap_tenor=%s, spread_benchmark_type=%s, spread_tenor=%s, '
                  'reference_benchmark_type=%s, reference_tenor=%s, forward_tenor=%s, pricing_location=%s ',
                  rate_mqid, swap_tenor, legs_w_defaults['spread']['benchmark_type'],
                  legs_w_defaults['spread']['floating_rate_tenor'],
                  legs_w_defaults['reference']['benchmark_type'], legs_w_defaults['reference']['floating_rate_tenor'],
                  forward_tenor, legs_w_defaults['spread']['pricing_location'].value)

    where = FieldFilterMap(csaTerms=csaTerms)
    q = GsDataApi.build_market_data_query([rate_mqid], QueryType.BASIS_SWAP_RATE, where=where, source=source,
                                          real_time=real_time)
    _logger.debug('q %s', q)
    df = _market_data_timed(q)

    series = ExtendedSeries() if df.empty else ExtendedSeries(df['basisSwapRate'])
    series.dataset_ids = getattr(df, 'dataset_ids', ())
    return series
示例#14
0
 def get_factor_exposure(cls, report_id: str, factor: str, factor_category: str, start_date: dt.date = None,
                         end_date: dt.date = None):
     where = FieldFilterMap(report_id=report_id, factor=factor, factor_category=factor_category)
     query = DataQuery(where=where, start_date=start_date, end_date=end_date)
     return GsDataApi.query_data(query=query, dataset_id=PFR_DATASET)
示例#15
0
 def get_measure(cls, report_id: str, field: str, start_date: dt.date = None, end_date: dt.date = None):
     fields = [field]
     where = FieldFilterMap(report_id=report_id)
     query = DataQuery(where=where, fields=fields, start_date=start_date, end_date=end_date)
     return GsDataApi.query_data(query=query, dataset_id=PPA_DATASET)