def batch_and_upload_partial_data(model_id: str, data: dict):
    """ Takes in total risk model data for one day and batches requests according to
    asset data size, returns a list of messages from resulting post calls"""
    date = data.get('date')
    target_universe_size = len(data.get('assetData').get('universe'))
    factor_data = {
        'date': date,
        'factorData': data.get('factorData'),
        'covarianceMatrix': data.get('covarianceMatrix')
    }
    print('Uploading factor data')
    print(
        GsFactorRiskModelApi.upload_risk_model_data(model_id,
                                                    factor_data,
                                                    partial_upload=True))
    split_num = math.ceil(target_universe_size /
                          20000) if math.ceil(target_universe_size /
                                              20000) else 1
    split_idx = math.ceil(target_universe_size / split_num)
    for i in range(split_num):
        end_idx = (
            i +
            1) * split_idx if split_num != i + 1 else target_universe_size + 1
        asset_data_subset = {
            'universe':
            data.get('assetData').get('universe')[i * split_idx:end_idx],
            'specificRisk':
            data.get('assetData').get('specificRisk')[i * split_idx:end_idx],
            'factorExposure':
            data.get('assetData').get('factorExposure')[i * split_idx:end_idx]
        }
        optional_asset_inputs = ['totalRisk', 'historicalBeta']
        for optional_input in optional_asset_inputs:
            if data.get('assetData').get(optional_input):
                asset_data_subset[optional_input] = data.get('assetData').get(
                    optional_input)[i * split_idx:end_idx]

        asset_data_request = {'date': date, 'assetData': asset_data_subset}
        print(
            GsFactorRiskModelApi.upload_risk_model_data(
                model_id,
                asset_data_request,
                partial_upload=True,
                target_universe_size=target_universe_size))

    if 'issuerSpecificCovariance' in data.keys(
    ) or 'factorPortfolios' in data.keys():
        optional_data = {}
        for optional_input in ['issuerSpecificCovariance', 'factorPortfolios']:
            if data.get(optional_input):
                optional_data[optional_input] = data.get(optional_input)
        print(f'{list(optional_data.keys())} being uploaded for {date}...')
        optional_data['date'] = date
        print(
            GsFactorRiskModelApi.upload_risk_model_data(
                model_id,
                optional_data,
                partial_upload=True,
                target_universe_size=target_universe_size))
Exemplo n.º 2
0
def batch_and_upload_partial_data(model_id: str, data: dict) -> list:
    """ Takes in total risk model data for one day and batches requests according to
    asset data size, returns a list of messages from resulting post calls"""
    posting_result_messages = []
    target_universe_size = len(data.get('assetData').get('universe'))
    factor_data = RiskModelData(data.get('date'),
                                factor_data=data.get('factorData'),
                                covariance_matrix=data.get('covarianceMatrix'))
    posting_result_messages.append(
        GsFactorRiskModelApi.upload_risk_model_data(model_id,
                                                    factor_data,
                                                    partial_upload=True))
    split_num = int(target_universe_size / 15000) if int(target_universe_size /
                                                         15000) else 1
    split_idx = int(target_universe_size / split_num)
    for i in range(split_num):
        end_idx = (
            i +
            1) * split_idx if split_num != i + 1 else target_universe_size + 1
        asset_data_subset = {
            'universe':
            data.get('assetData').get('universe')[i * split_idx:end_idx],
            'specificRisk':
            data.get('assetData').get('specificRisk')[i * split_idx:end_idx],
            'factorExposure':
            data.get('assetData').get('factorExposure')[i * split_idx:end_idx]
        }
        optional_asset_inputs = ['totalRisk', 'historicalBeta']
        for optional_input in optional_asset_inputs:
            if data.get('assetData').get(optional_input):
                asset_data_subset[optional_input] = data.get('assetData').get(
                    optional_input)[i * split_idx:end_idx]

        asset_data_request = RiskModelData(data.get('date'),
                                           asset_data=asset_data_subset)
        posting_result_messages.append(
            GsFactorRiskModelApi.upload_risk_model_data(
                model_id,
                asset_data_request,
                partial_upload=True,
                target_universe_size=target_universe_size))
    optional_inputs = ['issuerSpecificCovariance', 'factorPortfolios']
    optional_data = {'date': data.get('date')}
    for optional_input in optional_inputs:
        if data.get(optional_input):
            optional_data[optional_input] = data.get(optional_input)
    posting_result_messages.append(
        GsFactorRiskModelApi.upload_risk_model_data(
            model_id,
            optional_data,
            partial_upload=True,
            target_universe_size=target_universe_size))
    return posting_result_messages
Exemplo n.º 3
0
def batch_and_upload_partial_data(model_id: str, data: dict, max_asset_size):
    """ Takes in total risk model data for one day and batches requests according to
    asset data size, returns a list of messages from resulting post calls"""
    date = data.get('date')
    if data.get('factorData'):
        factor_data = {
            'date': date,
            'factorData': data.get('factorData'),
            'covarianceMatrix': data.get('covarianceMatrix')
        }
        print('Uploading factor data')
        print(
            GsFactorRiskModelApi.upload_risk_model_data(model_id,
                                                        factor_data,
                                                        partial_upload=True))

    if data.get('assetData'):
        asset_data_list, target_size = _batch_input_data(
            {'assetData': data.get('assetData')}, max_asset_size)
        for asset_data_batch in asset_data_list:
            print(
                GsFactorRiskModelApi.upload_risk_model_data(
                    model_id, {
                        'assetData': asset_data_batch,
                        'date': date
                    },
                    partial_upload=True,
                    target_universe_size=target_size))

    if 'issuerSpecificCovariance' in data.keys(
    ) or 'factorPortfolios' in data.keys():
        for optional_input_key in [
                'issuerSpecificCovariance', 'factorPortfolios'
        ]:
            if data.get(optional_input_key):
                optional_data = data.get(optional_input_key)
                optional_data_list, target_size = _batch_input_data(
                    {optional_input_key: optional_data}, max_asset_size)
                print(f'{optional_input_key} being uploaded for {date}...')
                for optional_data_batch in optional_data_list:
                    print(
                        GsFactorRiskModelApi.upload_risk_model_data(
                            model_id, {
                                optional_input_key: optional_data_batch,
                                'date': date
                            },
                            partial_upload=True,
                            target_universe_size=target_size))
Exemplo n.º 4
0
    def upload_partial_data(self, data: RiskModelData, target_universe_size: float = None):
        """ Upload partial risk model data to existing risk model in Marquee
            :param data: partial risk model data for uploading on given date
            :param target_universe_size: the size of the complete universe on date

            The models factorData and covarianceMatrix must be uploaded first on given date if repeats in partial
                upload, newer posted data will replace existing data on upload day """
        print(GsFactorRiskModelApi.upload_risk_model_data(
            self.id,
            data,
            partial_upload=True,
            target_universe_size=target_universe_size)
        )
Exemplo n.º 5
0
def test_upload_risk_model_data(mocker):
    risk_model_data = {
        'date':
        '2020-02-05',
        'assetData': {
            'universe': ['2407966', '2046251', 'USD'],
            'specificRisk': [12.09, 45.12, 3.09],
            'factorExposure': [{
                '1': 0.23,
                '2': 0.023
            }],
            'historicalBeta': [0.12, 0.45, 1.2]
        },
        'factorData': [{
            'factorId': '1',
            'factorName': 'USD',
            'factorCategory': 'Currency',
            'factorCategoryId': 'CUR'
        }],
        'covarianceMatrix': [[0.089, 0.0123, 0.345]],
        'issuerSpecificCovariance': {
            'universeId1': ['2407966'],
            'universeId2': ['2046251'],
            'covariance': [0.03754]
        },
        'factorPortfolios': {
            'universe': ['2407966', '2046251'],
            'portfolio': [{
                'factorId': 2,
                'weights': [0.25, 0.75]
            }]
        }
    }

    # mock GsSession
    mocker.patch.object(GsSession.__class__,
                        'default_value',
                        return_value=GsSession.get(Environment.QA, 'client_id',
                                                   'secret'))
    mocker.patch.object(GsSession.current,
                        '_post',
                        return_value='Successfully uploaded')

    # run test
    response = GsFactorRiskModelApi.upload_risk_model_data(
        model_id='id', model_data=risk_model_data)
    GsSession.current._post.assert_called_with(
        '/risk/models/data/{id}'.format(id='id'), risk_model_data)
    assert response == 'Successfully uploaded'
Exemplo n.º 6
0
    def upload_data(self, data: Union[RiskModelData, Dict]):
        """ Upload risk model data to existing risk model
            :param data: complete risk model data for uploading on given date
                includes: date, factorData, assetData, covarianceMatrix with optional inputs:
                issuerSpecificCovariance and factorPortfolios

            If upload universe is over 20000 assets, will batch and upload data in chunks of 20000 assets """

        data = data.to_json() if type(data) == RiskModelData else data
        target_universe_size = len(data.get('assetData').get('universe'))
        if target_universe_size > 20000:
            print('Batching uploads due to universe size')
            batch_and_upload_partial_data(self.id, data)
        else:
            print(GsFactorRiskModelApi.upload_risk_model_data(self.id, data))
Exemplo n.º 7
0
    def upload_data(self,
                    data: Union[RiskModelData, Dict],
                    max_asset_batch_size: int = 20000):
        """ Upload risk model data to existing risk model in Marquee

        :param data: complete risk model data for uploading on given date
            includes: date, factorData, assetData, covarianceMatrix with optional inputs:
            issuerSpecificCovariance and factorPortfolios
        :param max_asset_batch_size: size of payload to batch with. Defaults to 20000 assets

        If upload universe is over 20000 max_asset_batch_size, will batch and upload data in chunks of 20000 assets
        """

        data = risk_model_data_to_json(data) if type(
            data) == RiskModelData else data
        target_universe_size = get_universe_size(data)
        if target_universe_size > max_asset_batch_size:
            print('Batching uploads due to universe size')
            batch_and_upload_partial_data(self.id, data, max_asset_batch_size)
        else:
            print(GsFactorRiskModelApi.upload_risk_model_data(self.id, data))