Exemple #1
0
def process_args(api, args):

    df = lpt.read_csv(args.filename)
    date = lpt.to_date(args.date)
    scope = args.scope

    c1 = df.columns.values[0]  # First column is the instrument

    if args.fx:
        scope = scope + "_FX"
        df[c1] = "CCY_" + df[c1]
        df["column3"] = None
    else:
        mi.map_instruments(api, df, c1)

    # fix the column names
    df.columns = ["instrument", "price", "ccy"]

    def upsert_analytics(result=None):
        analytics = [
            api.models.InstrumentAnalytic(row["instrument"], row["price"],
                                          row["ccy"])
            for i, row in df.iterrows()
        ]

        return api.call.set_analytics(scope, date.year, date.month, date.day,
                                      analytics).bind(lambda r: None)

    if args.update:
        return upsert_analytics()

    return api.call.create_analytic_store(
        api.models.CreateAnalyticStoreRequest(scope,
                                              date)).bind(upsert_analytics)
Exemple #2
0
def process_args(api, args):
    if args.folder:
        args.input.extend([
            os.path.join(args.folder, f) for f in os.listdir(args.folder)
            if "-txn-" in f and f.endswith(".csv")
        ])

    df = (pd.concat(
        [lpt.read_csv(f)[[args.column]].drop_duplicates() for f in args.input],
        ignore_index=True,
        sort=True,
    ).drop_duplicates().reset_index(drop=True))

    df.columns = ["FROM"]
    df["TO"] = df["FROM"]

    return map_instruments(api, df, "TO")
Exemple #3
0
def include_mappings(path):
    if path:
        df = lpt.read_csv(path)

        mapping_table.update(df.set_index("FROM")["TO"].to_dict())