示例#1
0
def test_equals():
    dimensions_a = {
        DataDimension.TENOR: '1m',
        DataDimension.STRIKE_REFERENCE: 'spot',
        DataDimension.RELATIVE_STRIKE: .8
    }

    # Same dimensions but different order
    dimensions_b = {
        DataDimension.RELATIVE_STRIKE: .8,
        DataDimension.STRIKE_REFERENCE: 'spot',
        DataDimension.TENOR: '1m'
    }

    coord_a = DataCoordinate('EDRVOL_PERCENT_STOCK_STANDARD',
                             DataMeasure.IMPLIED_VOLATILITY,
                             dimensions_a)

    coord_b = DataCoordinate('EDRVOL_PERCENT_STOCK_STANDARD',
                             DataMeasure.IMPLIED_VOLATILITY,
                             dimensions_b)

    assert coord_a == coord_b
    assert coord_a == coord_b
    assert hash(coord_a) == hash(coord_b)
def test_equals_measure_str():
    coord_a = DataCoordinate(dataset_id='EDRVOL_PERCENT_STOCK_STANDARD',
                             measure=DataMeasure.IMPLIED_VOLATILITY)

    coord_b = DataCoordinate(dataset_id='EDRVOL_PERCENT_STOCK_STANDARD',
                             measure='impliedVolatility')
    assert coord_a == coord_b
示例#3
0
def test_rdate_datagrid(mocker):
    mocker.patch.object(GsSession.__class__, 'default_value',
                        return_value=GsSession.get(Environment.QA, 'client_id', 'secret'))
    name = 'Testing'
    SPX = get_test_entity('MA4B66MW5E27U8P32SB')
    close = DataCoordinate(
        measure=DataMeasure.CLOSE_PRICE,
        frequency=DataFrequency.DAILY,
    )

    last_trade_price = DataCoordinate(
        measure=DataMeasure.TRADE_PRICE,
        frequency=DataFrequency.REAL_TIME,
    )
    rows = [
        DataRow(SPX),
    ]
    columns = [
        DataColumn(name="1d Chg (RT)",
                   processor=ChangeProcessor(AppendProcessor(close, last_trade_price,
                                                             start=RelativeDate("-1d",
                                                                                base_date=date(2021, 1, 22)))))
    ]

    datagrid = DataGrid(name=name, rows=rows, columns=columns)
    start_date = datagrid.columns[0].processor.children['a'].start
    assert start_date.base_date == RelativeDate('-1d', base_date=date(2021, 1, 22)).base_date
    assert start_date.rule == RelativeDate('-1d').rule

    datagrid.initialize()
    datagrid.poll()
    assert str(datagrid._data_queries[0].query.start) == '2021-01-21'

    as_dict = datagrid.as_dict()
    start = as_dict['parameters']['columns'][0]['parameters']['a']['parameters']['start']
    assert start['type'] == 'relativeDate'
    assert start['value'] == {'rule': '-1d', 'baseDate': '2021-01-22'}

    # Check that base_date is not persisted when not passed in.
    columns = [
        DataColumn(name="1d Chg (RT)",
                   processor=ChangeProcessor(AppendProcessor(close, last_trade_price,
                                                             start=RelativeDate("-1d"))))
    ]
    datagrid = DataGrid(name=name, rows=rows, columns=columns)
    as_dict = datagrid.as_dict()
    start = as_dict['parameters']['columns'][0]['parameters']['a']['parameters']['start']
    assert start['type'] == 'relativeDate'
    assert start['type'] == 'relativeDate'
    assert start['value'] == {'rule': '-1d'}
示例#4
0
def test_immutability():
    dimensions = {
        DataDimension.TENOR: '1m',
        DataDimension.STRIKE_REFERENCE: 'Delta',
        DataDimension.RELATIVE_STRIKE: 50
    }

    coord1 = DataCoordinate(Dataset.GS.EDRVOL_PERCENT_STANDARD, DataMeasure.IMPLIED_VOLATILITY, dimensions)
    coord2 = DataCoordinate(Dataset.GS.EDRVOL_PERCENT_STANDARD, DataMeasure.IMPLIED_VOLATILITY, dimensions)

    assert id(coord1) != id(coord2)
    assert coord1 == coord2

    with pytest.raises(AttributeError):
        coord1.dataset_id = 'test'

    with pytest.raises(AttributeError):
        coord1.dimensions = {}

    with pytest.raises(AttributeError):
        coord1.measure = 'test'

    dimensions[DataDimension.TENOR] = '2m'

    coord3 = DataCoordinate(Dataset.GS.EDRVOL_PERCENT_STANDARD, DataMeasure.IMPLIED_VOLATILITY, dimensions)

    assert id(coord1) != id(coord2)
    assert coord1 != coord3
示例#5
0
    def get_data_coordinate(self,
                            measure: Union[DataMeasure, str],
                            dimensions: Optional[DataDimensions] = None,
                            frequency: DataFrequency = DataFrequency.DAILY,
                            availability=None) -> DataCoordinate:
        id_ = self.get_marquee_id()
        dimensions = dimensions or {}
        dimensions[self.data_dimension] = id_
        measure = measure if isinstance(measure, str) else measure.value
        available: Dict = GsDataApi.get_data_providers(id_, availability).get(measure, {})

        if frequency == DataFrequency.DAILY:
            daily_dataset_id = available.get(DataFrequency.DAILY)
            return DataCoordinate(dataset_id=daily_dataset_id, measure=measure, dimensions=dimensions,
                                  frequency=frequency)
        if frequency == DataFrequency.REAL_TIME:
            rt_dataset_id = available.get(DataFrequency.REAL_TIME)
            return DataCoordinate(dataset_id=rt_dataset_id, measure=measure, dimensions=dimensions, frequency=frequency)
示例#6
0
 def from_dict(cls, obj, reference_list):
     parsed_dimensions = {}
     data_dimension_map = DataDimension._value2member_map_
     for key, value in obj.get('dimensions', {}).items():
         if key in data_dimension_map:
             parsed_dimensions[DataDimension(key)] = value
         else:
             parsed_dimensions[key] = value
     return DimensionsOverride(column_names=obj.get('columnNames', []),
                               dimensions=parsed_dimensions,
                               coordinate=DataCoordinate.from_dict(obj.get('coordinate', {})))
示例#7
0
    def from_dict(cls, obj: Dict, reference_list: List):
        processor_name: str = obj.get(PROCESSOR_NAME)
        # Dynamically import the processor to for instantiation.
        processor = getattr(
            __import__('gs_quant.analytics.processors', fromlist=['']),
            processor_name, None)

        parameters = obj.get(PARAMETERS, {})

        local_reference_list = []
        arguments = {}

        for parameter, parameters_dict in parameters.items():
            # Loop through all the parameters and turned them into objects based off their dictionary values.
            # Will recursively handle the more complex objects such as DataCoordinate and Processors.
            parameter_type: str = parameters_dict.get(TYPE)
            if parameter_type == DATA_COORDINATE:
                # Handle the DataCoordinate parameters
                arguments[parameter] = DataCoordinate.from_dict(
                    parameters_dict)
            elif parameter_type == PROCESSOR:
                # Handle the BaseProcessor parameters
                arguments[parameter] = BaseProcessor.from_dict(
                    parameters_dict, reference_list)
            elif parameter_type == ENTITY:
                # Handle the entity parameter list and put into the reference mapped to be resolved later
                local_reference_list.append({
                    TYPE:
                    PROCESSOR,
                    ENTITY_ID:
                    parameters_dict.get(ENTITY_ID),
                    ENTITY_TYPE:
                    parameters_dict.get(ENTITY_TYPE),
                    PARAMETER:
                    parameter
                })

                arguments[parameter] = None
            elif parameter_type in (DATE, DATETIME, RELATIVE_DATE):
                # Handle date/datetime parameters
                if parameter_type == DATE:
                    arguments[parameter] = datetime.strptime(
                        parameters_dict.get(VALUE), '%Y-%m-%d').date()
                elif parameter_type == RELATIVE_DATE:
                    val = parameters_dict.get(VALUE)
                    base_date = val.get('baseDate')
                    base_date = datetime.strptime(
                        base_date, '%Y-%m-%d').date() if base_date else None
                    arguments[parameter] = RelativeDate(rule=val['rule'],
                                                        base_date=base_date)
                else:
                    arguments[parameter] = datetime.strptime(
                        parameters_dict.get(VALUE)[0:-1],
                        '%Y-%m-%dT%H:%M:%S.%f')
            else:
                # Handle all other object which should be mapped in the PARSABLE_OBJECT_MAP
                if parameter_type in PARSABLE_OBJECT_MAP:
                    parameter_obj = PARSABLE_OBJECT_MAP[parameter_type]
                    if isinstance(parameter_obj, (Enum, EnumMeta)):
                        arguments[parameter] = parameter_obj(
                            parameters_dict.get(VALUE, {}))
                    else:
                        arguments[parameter] = parameter_obj.from_dict(
                            parameters_dict.get(VALUE, {}))
                else:
                    # Handles built in types that are stored natively
                    arguments[parameter] = parameters_dict.get(VALUE)

        processor = processor(
            **arguments)  # Instantiate the processor with all arguments

        # Add all the references to entities to the list which will be resolved later
        for reference in local_reference_list:
            reference[REFERENCE] = processor

        reference_list.extend(local_reference_list)
        return processor