Beispiel #1
0
def make_historical_balances_and_prices_table(
        game_id: int,
        user_id: int,
        start_time: float = None,
        end_time: float = None) -> pd.DataFrame:
    """start_time and end_time control the window that the function will construct a merged series of running balances
    and prices. If left as None they will default to the game start and the current time.

    the end_time argument determines the reference date for calculating the "bookends" of the running balances series.
    if this is passed as its default value, None, it will use the present time as the bookend reference. being able to
    control this value explicitly lets us "freeze time" when running DAG tests.
    """
    start_time, end_time = get_time_defaults(game_id, start_time, end_time)
    balances_df = get_user_balance_history(game_id, user_id, start_time,
                                           end_time)
    df = add_bookends(balances_df, end_time=end_time)
    df = df.groupby("symbol").apply(resample_values)
    df = df.reset_index().rename(columns={"level_1": "timestamp"})
    df = append_price_data_to_balance_histories(
        df)  # price appends + resampling happen here
    df.sort_values(["symbol", "timestamp"])
    df["value"] = df["balance"] * df["price"]
    df = filter_for_trade_time(df)
    df[["balance", "price", "value"]] = df[["balance", "price",
                                            "value"]].astype(float)
    apply_validation(df, balances_and_prices_table_schema, strict=True)
    return df.reset_index(drop=True).sort_values(["timestamp", "symbol"])
Beispiel #2
0
def serialize_and_pack_order_performance_table(df: pd.DataFrame, game_id: int, user_id: int):
    if df.empty:
        no_fulfilled_orders_table(game_id, user_id)
        return
    apply_validation(df, order_performance_schema, True)
    agg_rules = {"symbol": "first", "quantity": "first", "clear_price": "first", "timestamp": "first",
                 "fifo_balance": "last", "basis": "first", "realized_pl": "sum", "unrealized_pl": "last",
                 "total_pct_sold": "last", "event_type": "first"}
    tab = df.groupby("order_label", as_index=False).agg(agg_rules)
    recent_prices = get_most_recent_prices(tab["symbol"].unique())
    recent_prices.rename(columns={"price": "Market price", "timestamp": "as of"}, inplace=True)
    tab = tab.merge(recent_prices, how="left")
    tab.sort_values(["order_label", "timestamp"], inplace=True)
    label_colors = assign_colors(tab["order_label"].to_list())
    tab["unrealized_pl"] = tab["fifo_balance"] * tab["Market price"] - (1 - tab["total_pct_sold"]) * tab["basis"]
    del tab["total_pct_sold"]
    tab["color"] = tab["order_label"].apply(lambda x: label_colors.get(x))
    # tack on sold orders
    sold_columns = ["symbol", "timestamp", "quantity", "clear_price", "basis", "event_type"]
    sold_df = df.loc[df["event_type"] == "sell", sold_columns]
    sold_df["basis"] = -1 * sold_df["basis"]
    tab = pd.concat([tab, sold_df], axis=0)
    tab.sort_values("timestamp", inplace=True)
    tab["realized_pl_percent"] = tab["realized_pl"] / tab["basis"]
    tab["unrealized_pl_percent"] = tab["unrealized_pl"] / tab["basis"]
    tab.rename(columns=FULFILLED_ORDER_MAPPINGS, inplace=True)
    tab.fillna(NA_NUMERIC_VAL, inplace=True)
    fulfilled_order_table = dict(data=tab.to_dict(orient="records"), headers=list(FULFILLED_ORDER_MAPPINGS.values()))
    s3_cache.set(f"{game_id}/{user_id}/{FULFILLED_ORDER_PREFIX}", json.dumps(fulfilled_order_table))
Beispiel #3
0
def serialize_and_pack_order_performance_chart(df: pd.DataFrame, game_id: int, user_id: int):
    if df.empty:
        chart_json = make_null_chart("Waiting for orders...")
    else:
        apply_validation(df, order_performance_schema, True)
        plot_df = add_bookends(df, group_var="order_label", condition_var="fifo_balance")
        plot_df["cum_pl"] = plot_df.groupby("order_label")["realized_pl"].cumsum()
        plot_df["timestamp"] = plot_df["timestamp"].apply(lambda x: posix_to_datetime(x))
        plot_df.set_index("timestamp", inplace=True)
        plot_df = plot_df.groupby("order_label", as_index=False).resample(f"{RESAMPLING_INTERVAL}T").last().ffill()
        plot_df = plot_df.reset_index(level=1)
        plot_df = filter_for_trade_time(plot_df)
        plot_df = append_price_data_to_balance_histories(plot_df)
        plot_df.sort_values(["order_label", "timestamp"], inplace=True)
        plot_df = add_time_labels(plot_df)
        plot_df = plot_df.groupby(["order_label", "t_index"], as_index=False).agg("last")
        plot_df["label"] = plot_df["timestamp"].apply(lambda x: datetime_to_posix(x)).astype(float)
        plot_df.sort_values("timestamp", inplace=True)
        plot_df["total_pl"] = plot_df["cum_pl"] + plot_df["fifo_balance"] * plot_df["price"] - (
                1 - plot_df["total_pct_sold"]) * plot_df["basis"]
        plot_df["return"] = 100 * plot_df["total_pl"] / plot_df["basis"]
        label_colors = assign_colors(plot_df["order_label"].unique())
        plot_df["color"] = plot_df["order_label"].apply(lambda x: label_colors.get(x))
        chart_json = make_chart_json(plot_df, "order_label", "return", "label", colors=plot_df["color"].unique())
    s3_cache.set(f"{game_id}/{user_id}/{ORDER_PERF_CHART_PREFIX}", json.dumps(chart_json))
Beispiel #4
0
def make_order_labels(order_df: pd.DataFrame) -> pd.DataFrame:
    apply_validation(order_df, order_details_schema)
    order_df["order_label"] = pd.DatetimeIndex(pd.to_datetime(order_df['timestamp_fulfilled'], unit='s')).tz_localize(
        'UTC').tz_convert(TIMEZONE)
    order_df['order_label'] = order_df['order_label'].dt.strftime(DATE_LABEL_FORMAT)
    order_df["order_label"] = order_df["symbol"] + "/" + order_df["quantity"].astype(str) + " @ " + order_df[
        "clear_price_fulfilled"].map(USD_FORMAT.format) + "/" + order_df["order_label"]

    # check for cases where different orders have the same labels. in these cases add an [n]
    mask = order_df[["order_label", "buy_or_sell"]].duplicated(keep=False)
    if mask.any():

        def _index_duplicate_labels(dup_subset):
            new_labels = []
            dup_subset = dup_subset.reset_index(drop=True)
            for i, row in dup_subset.iterrows():
                label_elements = row["order_label"].split("/")
                label_elements[1] += f" [{i + 1}]"
                new_labels.append("/".join(label_elements))
            dup_subset["order_label"] = new_labels
            return dup_subset

        dup_order_df = order_df[mask]
        dup_order_df = dup_order_df.groupby("order_label").apply(_index_duplicate_labels).reset_index(drop=True)
        order_df = pd.concat([dup_order_df, order_df[~mask]])

    return order_df
Beispiel #5
0
def make_the_field_charts(game_id: int, start_time: float = None, end_time: float = None):
    """This function wraps a loop that produces the balances chart for each user and the field chart for the game. This
    will run every time a user places and order, and periodically as prices are collected
    """
    user_ids = get_active_game_user_ids(game_id)
    portfolios = []
    portfolio_table_keys = list(portfolio_comps_schema.keys())
    for user_id in user_ids:
        df = make_user_balances_chart_data(game_id, user_id, start_time, end_time)
        serialize_and_pack_balances_chart(df, game_id, user_id)
        portfolio = aggregate_portfolio_value(df)
        portfolio["username"] = get_usernames([user_id])[0]
        apply_validation(portfolio, portfolio_comps_schema)
        portfolios.append(portfolio[portfolio_table_keys])

    # add index data
    if check_single_player_mode(game_id):
        for index in TRACKED_INDEXES:
            df = get_index_portfolio_value_data(game_id, index, start_time, end_time)
            df["timestamp"] = df["timestamp"].apply(lambda x: posix_to_datetime(x))
            df = add_time_labels(df)
            df = df.groupby("t_index", as_index=False).agg(
                {"username": "******", "label": "last", "value": "last", "timestamp": "last"})
            apply_validation(df, portfolio_comps_schema)
            portfolios.append(df[portfolio_table_keys])

    portfolios_df = pd.concat(portfolios)
    relabelled_df = relabel_aggregated_portfolios(portfolios_df)
    relabelled_df.sort_values("timestamp", inplace=True)
    serialize_and_pack_portfolio_comps_chart(relabelled_df, game_id)
Beispiel #6
0
def serialize_and_pack_balances_chart(df: pd.DataFrame, game_id: int, user_id: int):
    chart_json = make_null_chart("Cash")
    if df.shape[0] > 1:  # a dataframe with a single row means that this user just got started and is only holding cash
        df.sort_values("timestamp", inplace=True)
        apply_validation(df, balances_chart_schema)
        chart_json = make_chart_json(df, "symbol", "value")
    s3_cache.set(f"{game_id}/{user_id}/{BALANCES_CHART_PREFIX}", json.dumps(chart_json))