def load_instruments(args):
    file_type = "instruments"

    # create ApiFactory
    factory = ApiClientFactory(api_secrets_filename=args["secrets_file"])

    # get data
    if args["delimiter"]:
        logging.info(f"delimiter specified as {repr(args['delimiter'])}")
    logging.debug("Getting data")
    instruments = load_data_to_df_and_detect_delimiter(args)

    # get mappings
    mappings = load_json_file(args["mapping"])

    if "property_columns" in mappings[file_type].keys() and not args["scope"]:
        err = (
            r"properties must be upserted to a specified scope, but no scope was provided. "
            r"Please state what scope to upsert properties to using '-s'.")
        logging.error(err)
        raise ValueError(err)

    validate_mapping_file_structure(mappings, instruments.columns, file_type)
    if "cash_flag" in mappings.keys():
        instruments, mappings = identify_cash_items(instruments, mappings,
                                                    file_type, True)

    if args["dryrun"]:
        logging.info(
            "--dryrun specified as True, exiting before upsert call is made")
        return 0

    instruments_response = load_from_data_frame(
        api_factory=factory,
        data_frame=instruments,
        scope=args["scope"],
        mapping_required=mappings[file_type]["required"],
        mapping_optional=mappings[file_type]["optional"]
        if "optional" in mappings[file_type].keys() else {},
        file_type=file_type,
        identifier_mapping=mappings[file_type]["identifier_mapping"],
        batch_size=args["batch_size"],
        property_columns=mappings[file_type]["property_columns"]
        if "property_columns" in mappings[file_type].keys() else [],
    )

    succ, errors, failed = cocoon_printer.format_instruments_response(
        instruments_response)
    logging.info(f"number of successful upserts: {len(succ)}")
    logging.info(f"number of failed upserts    : {len(failed)}")
    logging.info(f"number of errors            : {len(errors)}")

    if args["display_response_head"]:
        logging.info(succ.head(40))
        logging.info(errors.head(40))
        logging.info(failed.head(40))

    return instruments_response
def load_quotes(args):
    file_type = "quotes"

    # create ApiFactory
    factory = ApiClientFactory(api_secrets_filename=args["secrets_file"])

    # get data
    if args["delimiter"]:
        logging.info(f"delimiter specified as {repr(args['delimiter'])}")
    logging.debug("Getting data")
    quotes = load_data_to_df_and_detect_delimiter(args)

    # get mappings
    mappings = load_json_file(args["mapping"])

    # check properties exist
    if "property_columns" in mappings[file_type].keys() and not args["scope"]:
        err = (
            r"properties must be upserted to a specified scope, but no scope was provided. "
            r"Please state what scope to upsert properties to using '-s'.")
        logging.error(err)
        raise ValueError(err)

    quotes, mappings = identify_cash_items(quotes, mappings, "quotes", True)

    validate_mapping_file_structure(mappings, quotes.columns, file_type)

    if "quote_scalar" in mappings[file_type].keys():
        quotes, mappings = scale_quote_of_type(quotes, mappings)

    if args["dryrun"]:
        return quotes

    quotes_response = load_from_data_frame(
        api_factory=factory,
        data_frame=quotes,
        scope=args["scope"],
        properties_scope=args.get("property_scope", args["scope"]),
        identifier_mapping={},
        mapping_required=mappings[file_type]["required"],
        mapping_optional=mappings[file_type].get("optional", {}),
        file_type=file_type,
        batch_size=args["batch_size"],
        property_columns=mappings[file_type].get("property_columns", []),
    )
    succ, errors, failed = cocoon_printer.format_quotes_response(
        quotes_response)
    logging.info(f"number of successful upserts: {len(succ)}")
    logging.info(f"number of failed upserts    : {len(failed)}")
    logging.info(f"number of errors            : {len(errors)}")

    if args["display_response_head"]:
        logging.info(succ.head(40))
        logging.info(errors.head(40))
        logging.info(failed.head(40))

    return quotes_response
Example #3
0
    def test_load_instrument_lookthrough_portfolio(self):
        code = create_scope_id()
        scope = "test-lookthrough"

        data_frame = pd.DataFrame({
            "instrument_name": [
                "Portfolio",
            ],
            "client_internal": [code],
            "lookthrough_code": [code],
        })

        mapping = {
            "identifier_mapping": {
                "ClientInternal": "client_internal",
            },
            "required": {
                "name": "instrument_name"
            },
            "optional": {
                "look_through_portfolio_id.scope": f"${scope}",
                "look_through_portfolio_id.code": "lookthrough_code",
            },
        }

        # create portfolio
        port_response = self.api_factory.build(
            lusid.api.TransactionPortfoliosApi).create_portfolio(
                scope=scope,
                create_transaction_portfolio_request=lusid.models.
                CreateTransactionPortfolioRequest(display_name=code,
                                                  description=code,
                                                  code=code,
                                                  base_currency="USD"),
            )

        # Upsert lookthrough instrument of portfolio
        instr_response = cocoon.load_from_data_frame(
            api_factory=self.api_factory,
            scope=scope,
            data_frame=data_frame,
            mapping_required=mapping["required"],
            mapping_optional=mapping["optional"],
            file_type="instruments",
            identifier_mapping=mapping["identifier_mapping"],
            property_columns=[],
        )

        self.assertEqual(len(instr_response["instruments"]["success"]), 1)
        self.assertEqual(len(instr_response["instruments"]["errors"]), 0)
        self.assertEqual(
            instr_response["instruments"]["success"]
            [0].values[f"ClientInternal: {code}"].lookthrough_portfolio.code,
            code,
        )
def load_transactions(args):
    file_type = "transactions"

    factory = ApiClientFactory(api_secrets_filename=args["secrets_file"])

    if args["delimiter"]:
        logging.info(f"delimiter specified as {repr(args['delimiter'])}")
    logging.debug("Getting data")
    transactions = load_data_to_df_and_detect_delimiter(args)

    mappings = load_json_file(args["mapping"])

    if "cash_flag" in mappings.keys():
        identify_cash_items(transactions, mappings, file_type)

    validate_mapping_file_structure(mappings, transactions.columns, file_type)

    if args["dryrun"]:
        logging.info("--dryrun specified as True, exiting before upsert call is made")
        return 0

    transactions_response = load_from_data_frame(
        api_factory=factory,
        data_frame=transactions,
        scope=args["scope"],
        identifier_mapping=mappings[file_type]["identifier_mapping"],
        mapping_required=mappings[file_type]["required"],
        mapping_optional=mappings[file_type]["optional"]
        if "optional" in mappings[file_type].keys()
        else {},
        file_type=file_type,
        batch_size=args["batch_size"],
        property_columns=mappings[file_type]["property_columns"]
        if "property_columns" in mappings[file_type].keys()
        else [],
    )

    # print_response(transactions_response, file_type)
    succ, errors = cocoon_printer.format_transactions_response(transactions_response)

    logging.info(f"number of successful upserts: {len(succ)}")
    logging.info(f"number of errors            : {len(errors)}")

    if args["display_response_head"]:
        logging.info(succ.head(40))
        logging.info(errors.head(40))

    return transactions_response
def load_holdings(args):
    file_type = "holdings"

    factory = ApiClientFactory(api_secrets_filename=args["secrets_file"])

    if args["delimiter"]:
        logging.info(f"delimiter specified as {repr(args['delimiter'])}")
    logging.debug("Getting data")
    holdings = load_data_to_df_and_detect_delimiter(args)

    mappings = load_json_file(args["mapping"])
    if "cash_flag" in mappings.keys():
        holdings, mappings = identify_cash_items(holdings, mappings, file_type)

    validate_mapping_file_structure(mappings, holdings.columns, file_type)

    if args["dryrun"]:
        logging.info(
            "--dryrun specified as True, exiting before upsert call is made")
        return 0

    holdings_response = load_from_data_frame(
        api_factory=factory,
        data_frame=holdings,
        scope=args["scope"],
        properties_scope=args.get("property_scope", args["scope"]),
        identifier_mapping=mappings[file_type]["identifier_mapping"],
        mapping_required=mappings[file_type]["required"],
        mapping_optional=mappings[file_type].get("optional", {}),
        file_type=file_type,
        batch_size=args["batch_size"],
        property_columns=mappings[file_type].get("property_columns", []),
        sub_holding_keys=mappings[file_type].get("sub_holding_keys", []),
    )

    succ, errors = cocoon_printer.format_holdings_response(holdings_response)

    logging.info(f"number of successful upserts: {len(succ)}")
    logging.info(f"number of errors            : {len(errors)}")

    if args["display_response_head"]:
        logging.info(succ.head(40))
        logging.info(errors.head(40))

    return holdings_response
    def test_load_instrument_lookthrough(self, _, df, mapping):

        if "default scope" in _:
            self.skipTest("Default parameter using '$' is not supported")

        scope = "test-lookthrough-loading-lusidtools"
        df = pd.read_csv(df)

        # replace lookthrough scope
        df = df.replace({"replace_scope": scope})

        # populate portfolio ids with random codes
        codes = {
            row["client_internal"]: create_scope_id(use_uuid=True) if
            "Portfolio" in row["instrument_name"] else row["client_internal"]
            for index, row in df.iterrows()
        }
        df = df.replace(codes)

        # create portfolios
        [
            self.api_factory.build(
                lusid.api.TransactionPortfoliosApi).create_portfolio(
                    scope=scope,
                    create_transaction_portfolio_request=lusid.models.
                    CreateTransactionPortfolioRequest(
                        display_name=row["client_internal"],
                        description=row["client_internal"],
                        code=row["client_internal"],
                        base_currency="USD",
                    ),
                ) if "Portfolio" in row["instrument_name"] else None
            for index, row in df.drop_duplicates("client_internal").iterrows()
        ]

        # Upsert lookthrough instrument of portfolio
        instr_response = cocoon.load_from_data_frame(
            api_factory=self.api_factory,
            scope=scope,
            data_frame=df,
            mapping_required=mapping["required"],
            mapping_optional=mapping["optional"],
            file_type="instruments",
            identifier_mapping=mapping["identifier_mapping"],
            property_columns=[],
        )

        # check successes, errors and instrument lookthrough codes
        self.assertEqual(
            len(instr_response["instruments"]["success"][0].values.values()),
            len(df))
        self.assertEqual(len(instr_response["instruments"]["errors"]), 0)

        # check lookthrough code on response
        [
            self.assertEqual(
                instr_response["instruments"]["success"]
                [0].values[f"ClientInternal: {row['client_internal']}"].
                lookthrough_portfolio.code,
                row["lookthrough_code"],
            ) if "id" not in row["client_internal"] else None
            for index, row in df.iterrows()
        ]

        # tear down this test
        [
            self.api_factory.build(lusid.api.PortfoliosApi).delete_portfolio(
                scope=scope, code=code) if "id" not in code else None
            for code in list(codes.values())
        ]
        [
            self.api_factory.build(lusid.api.InstrumentsApi).delete_instrument(
                "ClientInternal", CI) for CI in list(df["client_internal"])
        ]
Example #7
0
    def setUpClass(cls) -> None:
        secrets_file = Path(__file__).parent.parent.parent.joinpath(
            "secrets.json")
        cls.api_factory = lusid.utilities.ApiClientFactory(
            api_secrets_filename=secrets_file)
        cls.logger = logger.LusidLogger("debug")

        cls.scope = scope

        portfolio_holdings = pd.read_csv(
            Path(__file__).parent.joinpath("./data/holdings-example.csv"))
        portfolio_groups = pd.read_csv(
            Path(__file__).parent.joinpath("./data/portfolio-groups.csv"))

        # Create portfolios
        response = cocoon.load_from_data_frame(
            api_factory=cls.api_factory,
            scope=cls.scope,
            data_frame=portfolio_holdings,
            mapping_required={
                "code": "FundCode",
                "display_name": "FundCode",
                "created": "$2010-10-09T08:00:00Z",
                "base_currency": "$AUD",
            },
            mapping_optional={},
            file_type="portfolios",
            property_columns=[],
        )
        assert len(response["portfolios"]["errors"]) == 0

        # Add holdings
        response = cocoon.load_from_data_frame(
            api_factory=cls.api_factory,
            scope=cls.scope,
            data_frame=portfolio_holdings,
            mapping_required={
                "code": "FundCode",
                "effective_at": "Effective Date",
                "tax_lots.units": "Quantity",
            },
            mapping_optional={
                "tax_lots.cost.amount": "Local Market Value",
                "tax_lots.cost.currency": "Local Currency Code",
                "tax_lots.portfolio_cost": None,
                "tax_lots.price": None,
                "tax_lots.purchase_date": None,
                "tax_lots.settlement_date": None,
            },
            file_type="holdings",
            identifier_mapping={
                "ClientInternal": "SEDOL Security Identifier",
                "Currency": "is_cash_with_currency",
            },
            property_columns=[],
            holdings_adjustment_only=True,
        )
        assert len(response["holdings"]["errors"]) == 0

        # Create groups
        response = cocoon.load_from_data_frame(
            api_factory=cls.api_factory,
            scope=cls.scope,
            data_frame=portfolio_groups,
            mapping_required={
                "code": "PortGroupCode",
                "display_name": "PortGroupDisplayName",
                "created": "$2010-10-09T08:00:00Z",
            },
            mapping_optional={
                "values.scope": f"${cls.scope}",
                "values.code": "FundCode",
            },
            file_type="portfolio_group",
            property_columns=[],
        )
        assert len(response["portfolio_groups"]["errors"]) == 0

        # Create group with sub-groups
        response = cls.api_factory.build(
            lusid.api.PortfolioGroupsApi).create_portfolio_group(
                scope=cls.scope,
                create_portfolio_group_request=lusid.models.
                CreatePortfolioGroupRequest(
                    code="SubGroups",
                    display_name="SubGroups",
                    created="2010-10-09T08:00:00Z",
                    values=[
                        lusid.models.ResourceId(scope=cls.scope,
                                                code="Portfolio-Y")
                    ],
                    sub_groups=[
                        lusid.models.ResourceId(scope=cls.scope, code="ABC12")
                    ],
                ),
            )

        assert isinstance(response, lusid.models.PortfolioGroup)