コード例 #1
0
def add_block(entity_scope: str, entity_code: str, bs: InMemoryBlockStore, sd, ed, ad) -> None:
    """
    Adds a block to a block store

    :param str entity_scope: The scope of the entity to add
    :param str entity_code: The code of the entity to add
    :param InMemoryBlockStore bs: The block store to add the block to
    :param sd: The effectiveAt start date
    :param ed: The effectiveAt end date
    :param ad: The asAt date

    :return: None
    """

    # Create a block using the provided dates
    b = PerformanceDataSet(from_date=sd, to_date=ed, asat=ad)

    global src

    # For each date and group (DataFrame)
    for d, g in src.get_perf_data(
            entity_scope=entity_scope,
            entity_code=entity_code,
            from_date=b.from_date,
            to_date=b.to_date,
            asat=b.asat
    ).groupby('date'):

        # Populate the block with each PerformanceDataPoint in chronological order
        b.add_values(date=d, data_source=g.apply(lambda r: (r['key'], r['mv'], r['net']), axis=1))

    # Add the populated block to the block store
    bs.add_block(entity_scope=entity_scope, entity_code=entity_code, block=b)
コード例 #2
0
def test_lazy_loading():
    # Keep track of how many times the loader is called.
    counter = 0

    def dummy_loader():
        nonlocal counter
        counter += 1
        return []

    pds = PerformanceDataSet('2018-03-05',
                             '2018-03-19',
                             '2018-03-19',
                             loader=dummy_loader)

    # Before we get the data points, counter should be 0
    assert counter == 0
    r = pds.get_data_points()

    # After  get the data points, counter should be 1
    assert counter == 1

    r2 = pds.get_data_points()

    # If we get the points again, the counter is still 1
    assert counter == 1

    # and it is the same object we got earlier
    assert r is r2
コード例 #3
0
    def __init__(self, scope, portfolio):
        super().__init__()
        self.scope = scope
        self.portfolio = portfolio
        self.path = os.path.join(
            PerformanceConfiguration.item('LocalStorePath', 'blocks'), scope,
            portfolio)

        # Load existing blocks (if any)

        # Create a loader function for a block
        def wrap(idx):
            def loader():
                return pd.read_pickle(f'{self.path}.block-{idx+1}')

            return loader

        try:
            df = pd.read_csv(f'{self.path}.idx',
                             parse_dates=['from_date', 'to_date', 'asat'])
        except:
            return  # File doesn't exist. Not a problem at this stage

        for i, r in df.iterrows():
            block = PerformanceDataSet(r['from_date'],
                                       r['to_date'],
                                       r['asat'],
                                       loader=wrap(i))
            super().add_block(self.scope, self.portfolio, block)
    def add_block(self,
                  entity_scope: str,
                  entity_code: str,
                  block: PerformanceDataSet,
                  performance_scope: str = None) -> PerformanceDataSet:
        """
        This adds a block to the BlockStore for the specified entity.

        :param str entity_scope: The scope of the entity to add the block for.
        :param str entity_code: The code of the entity to get blocks for. Together with the entity_scope this uniquely
        identifies the entity.
        :param PerformanceDataSet block: The block to add to the BlockStore
        :param str performance_scope: The scope of the BlockStore to use, this is the scope in LUSID to use when adding
        the block to the Structured Result Store

        :return: PerformanceDataSet block: The block that was added to the BlockStore along with the asAt time of
        the operation
        """
        if performance_scope is None:
            performance_scope = "PerformanceBlockStore"

        serialised_block = serialise(block)
        code = self._create_result_id(entity_scope, entity_code,
                                      block.from_date, block.to_date)
        effective_at = block.to_date

        structured_results_api = StructuredResultDataApi(
            self.api_factory.build(StructuredResultDataApi))

        response = structured_results_api.upsert_structured_result_data(
            scope=performance_scope,
            request_body={
                code:
                UpsertStructuredResultDataRequest(
                    id=StructuredResultDataId(source=self.source,
                                              code=code,
                                              effective_at=effective_at,
                                              result_type=self.result_type),
                    data=StructuredResultData(document_format="Json",
                                              version=block.version,
                                              name="PerformanceDataSet",
                                              document=serialised_block))
            })

        as_at_time = list(response.values.values())[0]

        entity_id = self._create_id_from_scope_code(entity_scope, entity_code)
        self.blocks[entity_id].append((code, as_at_time, performance_scope))

        if block.asat is None:
            block.asat = as_at_time

        return block
コード例 #5
0
def test_local_block(fs):
    # NOTE : Using the fake file-system
    # Set global config file paths
    block_path = os.path.join('folder', 'sub-folder')
    PerformanceConfiguration.set_global_config(LocalStorePath=block_path)

    # Create a block store
    bs = LocalBlockStore('SCOPE', 'NAME')
    bs.add_block('SCOPE', 'NAME',
                 PerformanceDataSet('2018-03-05', '2018-03-19', '2020-03-19'))
    bs.add_block('SCOPE', 'NAME',
                 PerformanceDataSet('2018-03-20', '2018-05-31', '2020-03-19'))

    # Make sure folder have been created
    assert os.listdir(block_path) == ['SCOPE']

    # Make sure files are created
    contents = os.listdir(os.path.join(block_path, 'SCOPE'))

    assert 'NAME.idx' in contents
    assert 'NAME.block-1' in contents
    assert 'NAME.block-2' in contents
コード例 #6
0
def create_test_case(performance_scope: str, entity_scope: str,
                     entity_code: str, start_date, end_date,
                     upserted_pdps: List[Dict], persisted_pdps: List[Dict]):
    """
    Creates a test case for testing the upsert of returns

    :param str performance_scope: The scope of the block store to use to store performance returns
    :param str entity_scope: The scope of the entity to store returns against
    :param str entity_code: The code of the entity to store returns against
    :param start_date: The start date of the returns
    :param end_date: The end date of the returns
    :param List[Dict] upserted_pdps: The keyword arguments to create the PerformanceDataPoints to upsert
    :param List[Dict] persisted_pdps: The keyword arguments for the PerformanceDataPoints expected to be Persisted
    in the BlockStore

    :return: List: The created test case to use
    """
    return [
        performance_scope, entity_scope, entity_code, {
            "set1":
            PerformanceDataSetRequest(data_points=[
                PerformanceDataPointRequest(**pdp) for pdp in upserted_pdps
            ])
        },
        UpsertReturnsResponse(successes={
            "set1":
            PerformanceDataSetResponse(
                from_date=start_date,
                to_date=end_date,
                previous=PerformanceDataPointResponse(**upserted_pdps[-1]),
                data_points=[
                    PerformanceDataPointResponse(**pdp)
                    for pdp in upserted_pdps
                ])
        },
                              failures={}),
        [
            PerformanceDataSet(
                from_date=start_date,
                to_date=end_date,
                data_points=[
                    PerformanceDataPoint(**pdp) for pdp in persisted_pdps
                ],
                previous=PerformanceDataPoint(**persisted_pdps[-1]))
        ]
    ]
コード例 #7
0
    def add_block(self,
                  entity_scope: str,
                  entity_code: str,
                  block: PerformanceDataSet,
                  performance_scope: str = None) -> PerformanceDataSet:
        """
        This adds a block to the BlockStore for the specified entity.

        :param str entity_scope: The scope of the entity to get blocks for.
        :param str entity_code: The code of the entity to get blocks for. Together with the entity_scope this uniquely
        identifies the entity.
        :param PerformanceDataSet block: The block to add to the BlockStore
        :param str performance_scope: The scope to use in the BlockStore. This has no meaning and is not implemented in
        the InMemory implementation.
        """
        entity_id = self._create_id_from_scope_code(entity_scope, entity_code)
        self.blocks[entity_id].append(block)
        if block.asat is None:
            # If the block has no asAt time, add one
            block.asat = datetime.now(pytz.UTC)
        return block
コード例 #8
0
def upsert_portfolio_returns(performance_scope: str, portfolio_scope: str,
                             portfolio_code: str,
                             request_body: Dict[str,
                                                PerformanceDataSetRequest],
                             block_store: IBlockStore):
    """
    Upsert returns into a block store for a given portfolio.

    :param str performance_scope: The scope of the BlockStore to use to store the performance returns
    :param str portfolio_scope: The scope of the Portfolio the returns are associated with
    :param str portfolio_code: The code of the Portfolio the returns are associated with, together with the scope
    this uniquely identifies the Portfolio
    :param Dict[str, PerformanceDataSetRequest] request_body: The body of the request containing the PerformanceDataSets
    to persist as blocks
    :param IBlockStore block_store: The block store to use to persist the blocks

    :return: UpsertReturnsResponse: The response to the Upsert request
    """

    # 2) Do Validation
    # Validate no duplication on dates of PerformanceDataPoint inside PerformanceDataSet
    # Validate no duplicates of PerformanceDataSets (based on unique code using from_date, to_date, entity_scope, entity_code)

    # 3) Cast to PerformanceDataSets
    pds = {}

    # For each PerformanceDataSet
    for correlation_id, performance_data_set_request in request_body.items():

        start_date = performance_data_set_request.start_date
        end_date = performance_data_set_request.end_date

        # Sort all the PerformanceDataPoint into chronological order
        performance_data_point_chronological = sorted(
            performance_data_set_request.data_points, key=lambda x: x.date)

        # Ensure that the start_date and end_date are available
        if start_date is None:
            start_date = performance_data_point_chronological[0].date

        if end_date is None:
            end_date = performance_data_point_chronological[-1].date

        # Create a new PerformanceDataSet
        pd = PerformanceDataSet(from_date=start_date, to_date=end_date)

        # Add the returns to the PerformanceDataSet in the form of a number of PerformanceDataPoint
        [
            pd.add_returns(date=pdp.date,
                           weight=getattr(pdp, "weight", 0),
                           ror=pdp.ror)
            for pdp in performance_data_point_chronological
        ]

        pds[correlation_id] = pd

    # 4) Persist each PerformanceDataSet in the BlockStore
    pds = {
        correlation_id:
        block_store.add_block(entity_scope=portfolio_scope,
                              entity_code=portfolio_code,
                              block=pd,
                              performance_scope=performance_scope)
        for correlation_id, pd in pds.items()
    }

    # 5) Cast to Response objects
    pd_responses = {}
    # For each PerformanceDataSet
    for correlation_id, performance_data_set in pds.items():

        # Create the response object
        pd = PerformanceDataSetResponse(
            from_date=performance_data_set.from_date,
            to_date=performance_data_set.to_date,
            asat=performance_data_set.asat,
            data_points=[
                PerformanceDataPointResponse(**performance_data_point.__dict__)
                for performance_data_point in performance_data_set.data_points
            ],
            previous=PerformanceDataPointResponse(
                **performance_data_set.latest_data_point.__dict__))

        pd_responses[correlation_id] = pd

    return UpsertReturnsResponse(pd_responses, {})