Пример #1
0
def process_args(api, args):

    PerformanceConfiguration.set_global_config(args.config)
    perf_api = PerformanceApi(api)

    if args.op == 'qry':
        return perf_api.performance_report(args.scope, args.portfolio,
                                           args.from_date, args.to_date,
                                           args.locked, args.fields)
    elif args.op == 'post':
        perf_api.lock_period(args.scope,
                             args.portfolio,
                             args.date,
                             asat=args.post_asat)
    elif args.op == 'periods':
        return perf_api.get_periods(args.scope, args.portfolio)
    def __init__(self, scope, portfolio):
        super().__init__()
        self.scope = scope
        self.portfolio = portfolio
        self.path = os.path.join(
            PerformanceConfiguration.item('LocalStorePath', 'blocks'), scope,
            portfolio)

        # Load existing blocks (if any)

        # Create a loader function for a block
        def wrap(idx):
            def loader():
                return pd.read_pickle(f'{self.path}.block-{idx+1}')

            return loader

        try:
            df = pd.read_csv(f'{self.path}.idx',
                             parse_dates=['from_date', 'to_date', 'asat'])
        except:
            return  # File doesn't exist. Not a problem at this stage

        for i, r in df.iterrows():
            block = PerformanceDataSet(r['from_date'],
                                       r['to_date'],
                                       r['asat'],
                                       loader=wrap(i))
            super().add_block(self.scope, self.portfolio, block)
Пример #3
0
def test_get_flows():
    config = PerformanceConfiguration(ext_flow_types={'APPRCY', 'EXPRCY'})
    with api_cacher.CachingApi("flows") as api:
        v = flows.get_flows(api, 'JLH', 'FUND1', config, '2019-07-05',
                            '2019-07-15', now())
        assert len(v) == 3
        assert v[as_date('2019-07-08')] == 306132.26
        assert v[as_date('2019-07-11')] == 261681.14
        assert v[as_date('2019-07-12')] == -1224380.23
def test_config():
    PerformanceConfiguration.set_global_config(gs="a", gn=2)
    cfg = PerformanceConfiguration(ls="a2", ln=5)

    assert cfg.get('gs') == "a"
    assert cfg.get('gn') == 2
    assert cfg.get('ls') == "a2"
    assert cfg.get('ln') == 5
    assert cfg.get('na', -1) == -1
    assert cfg.ln == 5
    assert cfg.gn == 2
    assert PerformanceConfiguration.global_config.get('gs', 5) == "a"
    assert PerformanceConfiguration.item('ls', "n/a") == "n/a"
Пример #5
0
def test_local_block(fs):
    # NOTE : Using the fake file-system
    # Set global config file paths
    block_path = os.path.join('folder', 'sub-folder')
    PerformanceConfiguration.set_global_config(LocalStorePath=block_path)

    # Create a block store
    bs = LocalBlockStore('SCOPE', 'NAME')
    bs.add_block('SCOPE', 'NAME',
                 PerformanceDataSet('2018-03-05', '2018-03-19', '2020-03-19'))
    bs.add_block('SCOPE', 'NAME',
                 PerformanceDataSet('2018-03-20', '2018-05-31', '2020-03-19'))

    # Make sure folder have been created
    assert os.listdir(block_path) == ['SCOPE']

    # Make sure files are created
    contents = os.listdir(os.path.join(block_path, 'SCOPE'))

    assert 'NAME.idx' in contents
    assert 'NAME.block-1' in contents
    assert 'NAME.block-2' in contents
def test_config_from_file(fs):

    # Create dummy config in the fake file-system
    with open("config.json", "w") as fp:
        json.dump({"gn": 123, "gs2": "hello", "ln": -1}, fp)

    PerformanceConfiguration.set_global_config(gs="a",
                                               gn=2,
                                               path="config.json")

    cfg = PerformanceConfiguration(ls="a2", ln=5)

    assert cfg.get('gs') == "a"
    assert cfg.get('gs2') == "hello"
    assert cfg.ln == 5
    assert cfg.ls == "a2"

    assert PerformanceConfiguration.item('ln') == -1
import pytest
from performance_sources.lusid_src import LusidSource
from config.config import PerformanceConfiguration
from misc import *
from os import path
from fields import *
from block_stores.block_store_in_memory import InMemoryBlockStore
from perf import Performance

from tests.utilities import api_cacher

config = PerformanceConfiguration(ext_flow_types={'APPRCY', 'EXPRCY'})


@pytest.mark.skip(
    "Setup of JLH Fund 1 or similar needs to be replicable before this test can be run"
)
def test_get_perf_data(recording):

    with api_cacher.CachingApi(filename='lusid_src') as api:
        l_src = LusidSource(api, config)
        df = l_src.get_perf_data('JLH', 'FUND1', '2019-07-10', '2019-07-12',
                                 now())

        # To record tests, use pytest --recording=test_get_perf_data
        filename = path.join('expected', 'lusid_src_get_perf_data.pk')

        if 'test_get_perf_data' in recording:
            # Record the result and save as the expectation
            df.to_pickle(filename, protocol=0)
Пример #8
0
def get_ext_fields(api_factory: ApiClientFactory, entity_type: str,
                   entity_scope: str, entity_code: str,
                   effective_date: Timestamp, asat: Timestamp,
                   fields: List[str],
                   config: PerformanceConfiguration) -> Dict[str, Timestamp]:
    """
    The responsibility of this function is to get extended fields for performance reporting from LUSID

    :param ApiClientFactory api_factory: The api factory to use to connect to LUSID
    :param str entity_type: Whether the entity is a portfolio or a composite
    :param str entity_scope: The scope of the entity to fetch properties for
    :param str entity_code: The code of the entity to fetch properties for
    :param Timestamp effective_date: The effectiveAt to fetch properties for
    :param Timestamp asat: The asAt date to fetch properties for
    :param List[str] fields: The fields from which to extract available extended fields
    :param PerformanceConfiguration config: The configuration containing the available extended fields

    :return: Dict[str, Timestamp]: The extended fields and their values retrieved from LUSID
    """
    ext_fields = config.get("fields", {})

    api_mapping = {"portfolio": PortfoliosApi, "composite": PortfolioGroupsApi}

    api_call_mapping = {
        "portfolio": "get_portfolio_properties",
        "composite": "get_group_properties"
    }

    entity_type = entity_type.lower()

    if entity_type not in api_mapping:
        raise KeyError(
            f"The entity type of {entity_type} is unsupported. The supported types are {str(list(api_mapping.keys()))}"
        )

    # Get shortlist of extension fields requested, reverse mapping
    shortlist = {
        ext_fields[f]: f
        for f in fields if f in set(ext_fields.keys())
    }

    if len(shortlist) == 0:
        return {}

    def get_props(result: PortfolioProperties) -> Dict[str, Timestamp]:
        """
        The responsibility of this function

        :param PortfolioProperties result: The result from the API call to get properties for the Portfolio

        :return: Dict[str, Timestamp]: The extended fields and their values retrieved from LUSID
        """
        nonlocal shortlist
        return {
            shortlist[pk]: lpt.to_date(pv.value.label_value)
            for pk, pv in result.properties.items() if pk in shortlist
        }

    response = getattr(api_factory.build(api_mapping[entity_type]),
                       api_call_mapping[entity_type])(
                           scope=entity_scope,
                           code=entity_code,
                           effective_at=effective_date,
                           as_at=asat)

    return get_props(response)