def load_transactions(args):
    file_type = "transactions"

    factory = ApiClientFactory(api_secrets_filename=args["secrets_file"])

    if args["delimiter"]:
        logging.info(f"delimiter specified as {repr(args['delimiter'])}")
    logging.debug("Getting data")
    transactions = load_data_to_df_and_detect_delimiter(args)

    mappings = load_json_file(args["mapping"])

    if "cash_flag" in mappings.keys():
        identify_cash_items(transactions, mappings, file_type)

    validate_mapping_file_structure(mappings, transactions.columns, file_type)

    if args["dryrun"]:
        logging.info("--dryrun specified as True, exiting before upsert call is made")
        return 0

    transactions_response = load_from_data_frame(
        api_factory=factory,
        data_frame=transactions,
        scope=args["scope"],
        identifier_mapping=mappings[file_type]["identifier_mapping"],
        mapping_required=mappings[file_type]["required"],
        mapping_optional=mappings[file_type]["optional"]
        if "optional" in mappings[file_type].keys()
        else {},
        file_type=file_type,
        batch_size=args["batch_size"],
        property_columns=mappings[file_type]["property_columns"]
        if "property_columns" in mappings[file_type].keys()
        else [],
    )

    # print_response(transactions_response, file_type)
    succ, errors = cocoon_printer.format_transactions_response(transactions_response)

    logging.info(f"number of successful upserts: {len(succ)}")
    logging.info(f"number of errors            : {len(errors)}")

    if args["display_response_head"]:
        logging.info(succ.head(40))
        logging.info(errors.head(40))

    return transactions_response
def load_instruments(args):
    file_type = "instruments"

    # create ApiFactory
    factory = ApiClientFactory(api_secrets_filename=args["secrets_file"])

    # get data
    if args["delimiter"]:
        logging.info(f"delimiter specified as {repr(args['delimiter'])}")
    logging.debug("Getting data")
    instruments = load_data_to_df_and_detect_delimiter(args)

    # get mappings
    mappings = load_json_file(args["mapping"])

    if "property_columns" in mappings[file_type].keys() and not args["scope"]:
        err = (
            r"properties must be upserted to a specified scope, but no scope was provided. "
            r"Please state what scope to upsert properties to using '-s'.")
        logging.error(err)
        raise ValueError(err)

    validate_mapping_file_structure(mappings, instruments.columns, file_type)
    if "cash_flag" in mappings.keys():
        instruments, mappings = identify_cash_items(instruments, mappings,
                                                    file_type, True)

    if args["dryrun"]:
        logging.info(
            "--dryrun specified as True, exiting before upsert call is made")
        return 0

    instruments_response = load_from_data_frame(
        api_factory=factory,
        data_frame=instruments,
        scope=args["scope"],
        mapping_required=mappings[file_type]["required"],
        mapping_optional=mappings[file_type]["optional"]
        if "optional" in mappings[file_type].keys() else {},
        file_type=file_type,
        identifier_mapping=mappings[file_type]["identifier_mapping"],
        batch_size=args["batch_size"],
        property_columns=mappings[file_type]["property_columns"]
        if "property_columns" in mappings[file_type].keys() else [],
    )

    succ, errors, failed = cocoon_printer.format_instruments_response(
        instruments_response)
    logging.info(f"number of successful upserts: {len(succ)}")
    logging.info(f"number of failed upserts    : {len(failed)}")
    logging.info(f"number of errors            : {len(errors)}")

    if args["display_response_head"]:
        logging.info(succ.head(40))
        logging.info(errors.head(40))
        logging.info(failed.head(40))

    return instruments_response
def load_quotes(args):
    file_type = "quotes"

    # create ApiFactory
    factory = ApiClientFactory(api_secrets_filename=args["secrets_file"])

    # get data
    if args["delimiter"]:
        logging.info(f"delimiter specified as {repr(args['delimiter'])}")
    logging.debug("Getting data")
    quotes = load_data_to_df_and_detect_delimiter(args)

    # get mappings
    mappings = load_json_file(args["mapping"])

    # check properties exist
    if "property_columns" in mappings[file_type].keys() and not args["scope"]:
        err = (
            r"properties must be upserted to a specified scope, but no scope was provided. "
            r"Please state what scope to upsert properties to using '-s'.")
        logging.error(err)
        raise ValueError(err)

    quotes, mappings = identify_cash_items(quotes, mappings, "quotes", True)

    validate_mapping_file_structure(mappings, quotes.columns, file_type)

    if "quote_scalar" in mappings[file_type].keys():
        quotes, mappings = scale_quote_of_type(quotes, mappings)

    if args["dryrun"]:
        return quotes

    quotes_response = load_from_data_frame(
        api_factory=factory,
        data_frame=quotes,
        scope=args["scope"],
        properties_scope=args.get("property_scope", args["scope"]),
        identifier_mapping={},
        mapping_required=mappings[file_type]["required"],
        mapping_optional=mappings[file_type].get("optional", {}),
        file_type=file_type,
        batch_size=args["batch_size"],
        property_columns=mappings[file_type].get("property_columns", []),
    )
    succ, errors, failed = cocoon_printer.format_quotes_response(
        quotes_response)
    logging.info(f"number of successful upserts: {len(succ)}")
    logging.info(f"number of failed upserts    : {len(failed)}")
    logging.info(f"number of errors            : {len(errors)}")

    if args["display_response_head"]:
        logging.info(succ.head(40))
        logging.info(errors.head(40))
        logging.info(failed.head(40))

    return quotes_response
def load_holdings(args):
    file_type = "holdings"

    factory = ApiClientFactory(api_secrets_filename=args["secrets_file"])

    if args["delimiter"]:
        logging.info(f"delimiter specified as {repr(args['delimiter'])}")
    logging.debug("Getting data")
    holdings = load_data_to_df_and_detect_delimiter(args)

    mappings = load_json_file(args["mapping"])
    if "cash_flag" in mappings.keys():
        holdings, mappings = identify_cash_items(holdings, mappings, file_type)

    validate_mapping_file_structure(mappings, holdings.columns, file_type)

    if args["dryrun"]:
        logging.info(
            "--dryrun specified as True, exiting before upsert call is made")
        return 0

    holdings_response = load_from_data_frame(
        api_factory=factory,
        data_frame=holdings,
        scope=args["scope"],
        properties_scope=args.get("property_scope", args["scope"]),
        identifier_mapping=mappings[file_type]["identifier_mapping"],
        mapping_required=mappings[file_type]["required"],
        mapping_optional=mappings[file_type].get("optional", {}),
        file_type=file_type,
        batch_size=args["batch_size"],
        property_columns=mappings[file_type].get("property_columns", []),
        sub_holding_keys=mappings[file_type].get("sub_holding_keys", []),
    )

    succ, errors = cocoon_printer.format_holdings_response(holdings_response)

    logging.info(f"number of successful upserts: {len(succ)}")
    logging.info(f"number of errors            : {len(errors)}")

    if args["display_response_head"]:
        logging.info(succ.head(40))
        logging.info(errors.head(40))

    return holdings_response