Ejemplo n.º 1
0
def update_items() -> None:
    """Check current inventory for items not included in master table."""
    driver = start_driver()
    ark_inventory = io.reader("cleaned",
                              "ark_inventory",
                              "parquet",
                              self_schema=True)
    user_items = io.reader(folder="", name="user_items", ftype="json")

    items_character = _character_most_items(ark_inventory)
    update_items = list(set(items_character) - set(user_items))

    with tqdm(total=len(update_items), desc="Items for update") as pbar:
        for item_id in update_items:
            user_items[item_id] = _get_item_facts(driver, item_id)
            user_items[item_id]["ahm"] = items_character[item_id]
            user_items[item_id]["active"] = True
            user_items[item_id]["ignore"] = False
            user_items[item_id]["Sell"] = False
            user_items[item_id]["Buy"] = False
            user_items[item_id]["make_pass"] = True
            pbar.update(1)

    io.writer(user_items, folder="", name="user_items", ftype="json")

    driver.close()
Ejemplo n.º 2
0
def analyse_rolling_buyout() -> None:
    """Builds rolling average of user's auction purchases using beancounter data."""
    bean_purchases = io.reader("cleaned", "bean_purchases", "parquet")

    bean_buys = bean_purchases["item"].isin(utils.user_item_filter("Buy"))
    bean_purchases = bean_purchases[bean_buys].sort_values(
        ["item", "timestamp"])

    cols = ["item", "buyout_per"]
    purchase_each = utils.enumerate_quantities(bean_purchases,
                                               cols=cols,
                                               qty_col="qty")

    # Needed to ensure that groupby will work for a single item
    purchase_each.loc[purchase_each.index.max() + 1] = ("dummy", 0)

    SPAN = cfg.analysis["ROLLING_BUYOUT_SPAN"]
    ewm = (purchase_each.groupby("item").apply(
        lambda x: x["buyout_per"].ewm(span=SPAN).mean()).reset_index())
    # Get the latest item value
    latest_item = ewm.groupby("item")["level_1"].max()
    bean_rolling_buyout = ewm.loc[latest_item].set_index("item")[[
        "buyout_per"
    ]]

    bean_rolling_buyout = bean_rolling_buyout.drop("dummy").astype(int)
    bean_rolling_buyout.columns = ["bean_rolling_buyout"]
    io.writer(bean_rolling_buyout, "intermediate", "bean_rolling_buyout",
              "parquet")
Ejemplo n.º 3
0
def _get_item_facts(driver: webdriver, item_id: int) -> Dict[str, Any]:
    """Given an item_id get info from BB and icon."""
    # Get Booty Bay basic data
    if Path(cfg.data_path, "item_info", f"{item_id}.json").exists():
        result = io.reader("item_info", str(item_id), "json")
    else:
        result = get_bb_item_page(driver, item_id)
        io.writer(result, folder="item_info", name=str(item_id), ftype="json")
        if not result:
            logger.debug(f"No item info for {item_id}")
            # continue

    data = utils.get_bb_fields(result, "stats")
    history = utils.get_bb_fields(result, "history")

    item_info = {k: v for k, v in data.items() if k in cfg.item_info_fields}
    item_info["true_auctionable"] = (bool("vendor_price" not in item_info)
                                     and bool(item_info["auctionable"])
                                     and not bool(item_info["vendornpccount"])
                                     and bool(item_info["price"])
                                     and bool(history))

    # Get icon
    if not Path(cfg.data_path, "item_icons",
                f"{item_info['icon']}.jpg").exists():
        url = cfg.icons_path + item_info["icon"] + ".jpg"
        r = requests.get(url)
        io.writer(r.content, "item_icons", item_info["icon"], "jpg")

    return item_info
Ejemplo n.º 4
0
def produce_activity_tracking() -> None:
    """Produce chart of item prices, sold and bought for."""
    bean_results = io.reader("cleaned", "bean_results", "parquet")
    bean_results["date"] = bean_results["timestamp"].dt.date.astype(
        "datetime64")
    bean_sales = bean_results.groupby(["item", "date"])["buyout_per"].mean()
    bean_sales.name = "sell_price"

    bean_purchases = io.reader("cleaned", "bean_purchases", "parquet")
    bean_purchases["date"] = bean_purchases["timestamp"].dt.date.astype(
        "datetime64")
    bean_buys = bean_purchases.groupby(["item", "date"])["buyout_per"].mean()
    bean_buys.name = "buy_price"

    bb_history = io.reader("cleaned", "bb_history", "parquet")
    bb_history = bb_history[bb_history["date"] >= bean_results["date"].min()]
    bb_history = bb_history.set_index(["item", "date"])

    activity = bb_history.join(bean_buys).join(bean_sales)
    cols = ["silveravg", "buy_price", "sell_price"]

    user_items = io.reader("", "user_items", "json")
    item_names = {
        item_id: v.get("name_enus")
        for item_id, v in user_items.items()
    }

    for item_id, _ in user_items.items():
        item = item_names[item_id]
        if item in activity.index:
            plt.figure()
            activity.loc[item][cols].plot(title=f"Historic activity {item}")
            io.writer(plt, "plots", f"{item}_activity", "png")
            plt.close()
Ejemplo n.º 5
0
def produce_item_reporting() -> None:
    """Collate item information and prepare feasibility chart."""
    item_table = io.reader("intermediate", "item_table", "parquet")
    buy_policy = io.reader("outputs", "buy_policy",
                           "parquet").set_index("item")
    sell_policy = io.reader("outputs", "sell_policy",
                            "parquet").set_index("item")
    make_policy = io.reader("outputs", "make_policy", "parquet")

    item_info = (item_table.join(buy_policy[[
        x for x in buy_policy if x not in item_table
    ]]).join(sell_policy[[
        x for x in sell_policy if x not in item_table
    ]]).join(make_policy[[x for x in make_policy if x not in item_table]]))

    item_info = item_info[sorted(item_info.columns)]

    # item_reporting = {
    #     item: pd.DataFrame(item_info.loc[item]).to_html() for item in item_info.index
    # }
    io.writer(item_info, "reporting", "item_info", "parquet")

    listing_profits = io.reader("reporting", "listing_profits", "parquet")

    MAX_LISTINGS = cfg.analysis["MAX_LISTINGS_PROBABILITY"]
    for item in listing_profits.columns:
        plt.figure()
        listing_profits[item].plot(title=f"List profit {item}")
        pd.Series([sell_policy.loc[item, "profit_feasible"] * MAX_LISTINGS
                   ]).plot()
        io.writer(plt, "plots", f"{item}_feasible", "png")
        plt.close()
Ejemplo n.º 6
0
def draw_profit_charts() -> None:
    """Create charts of alltime and individual item profits."""
    profits = io.reader("reporting", "profits", "parquet")

    alltime_profit = (
        profits.reset_index().groupby("date")["total_profit"].sum().cumsum() /
        10000)

    tot = int(alltime_profit[-1])
    daily = int(tot / alltime_profit.shape[0])

    plt.figure()
    alltime_profit.plot(
        title=f"Total profit over all items ({tot} gold, {daily} per day)")
    io.writer(plt, "plots", "_alltime_profit", "png")
    plt.close()

    user_items = io.reader("", "user_items", "json")
    item_names = {
        item_id: v.get("name_enus")
        for item_id, v in user_items.items()
    }

    for item_id, _ in user_items.items():
        item = item_names[item_id]
        if item in profits.index:
            plt.figure()
            (profits.loc[item, "total_profit"].cumsum() /
             10000).plot(title=f"Profit {item}")
            io.writer(plt, "plots", f"{item}_profit", "png")
            plt.close()
Ejemplo n.º 7
0
def analyse_listings() -> None:
    """Convert live listings into single items."""
    auc_listings = io.reader("cleaned", "auc_listings", "parquet")
    user_items = io.reader("", "user_items", "json")

    auc_listings = auc_listings[auc_listings["item_id"].isin(user_items)]

    predicted_prices = io.reader("intermediate", "predicted_prices", "parquet")

    ranges = pd.merge(
        auc_listings,
        predicted_prices,
        how="left",
        left_on="item",
        right_index=True,
        validate="m:1",
    )
    ranges["price_z"] = (ranges["price_per"] -
                         ranges["bbpred_price"]) / ranges["bbpred_std"]

    cols = ["item", "price_per", "price_z"]
    listing_each = utils.enumerate_quantities(ranges, cols=cols)
    listing_each.columns = ["item", "list_price_per", "list_price_z"]

    io.writer(listing_each, "intermediate", "listing_each", "parquet")
Ejemplo n.º 8
0
def predict_item_prices() -> None:
    """Analyse exponential average mean and std of items given 14 day, 2 hour history."""
    bb_fortnight = io.reader("cleaned", "bb_fortnight", "parquet")
    user_items = io.reader("", "user_items", "json")

    predicted_prices = _predict_item_prices(bb_fortnight, user_items)
    io.writer(predicted_prices, "intermediate", "predicted_prices", "parquet")
Ejemplo n.º 9
0
def clean_auctioneer_data() -> None:
    """Cleans Auctioneer json data into tablular format."""
    aucscan_data = io.reader("raw", "aucscan_data", "json")

    auc_listings_raw = pd.DataFrame(aucscan_data)
    auc_listings = _process_auctioneer_data(auc_listings_raw)

    # Saves latest scan to intermediate (immediate)
    io.writer(auc_listings, "cleaned", "auc_listings", "parquet")
Ejemplo n.º 10
0
def get_beancounter_data() -> None:
    """Reads WoW Addon Beancounter lua and saves to local json."""
    """Reads Ark Inventory json and parses into tabular format."""
    beancounter_data: dict = {}
    for account_name in cfg.wow.get("accounts", {}):
        path = utils.make_lua_path(account_name, "BeanCounter")
        bean = io.reader(name=path, ftype="lua")
        beancounter_data = utils.source_merge(beancounter_data, bean).copy()
    io.writer(beancounter_data, "raw", "beancounter_data", "json")
Ejemplo n.º 11
0
def create_item_inventory() -> None:
    """Convert Arkinventory tabular data into dataframe of counts for user items."""
    ark_inventory = io.reader("cleaned", "ark_inventory", "parquet")

    item_skeleton = io.reader("cleaned", "item_skeleton", "parquet")
    user_ahm = item_skeleton.set_index("item_id")["user_ahm"]

    ark_inventory["ahm"] = ark_inventory["item_id"].replace(user_ahm)
    ark_inventory["role"] = (
        ark_inventory["character"] == ark_inventory["ahm"]).replace({
            True:
            "ahm",
            False:
            "char"
        })
    item_inventory = ark_inventory

    role_types = ["ahm", "char"]
    assert item_inventory["role"].isin(role_types).all()

    location_rename = {
        "Inventory": "bag",
        "Bank": "bank",
        "Auctions": "auc",
        "Mailbox": "mail",
    }
    item_inventory["loc_short"] = item_inventory["location"].replace(
        location_rename)
    item_inventory["inv"] = ("inv_" + item_inventory["role"] + "_" +
                             item_inventory["loc_short"])

    item_inventory = item_inventory.groupby(["inv", "item"
                                             ]).sum()["count"].unstack().T

    # Ensure 9x grid of columns
    for role in role_types:
        for loc in location_rename.values():
            col = f"inv_{role}_{loc}"
            if col not in item_inventory.columns:
                item_inventory[col] = 0

    item_inventory = item_inventory.fillna(0).astype(int)

    # Analyse aggregate; ordering important here
    item_inventory["inv_total_all"] = item_inventory.sum(axis=1)

    cols = [x for x in item_inventory.columns if "ahm" in x]
    item_inventory["inv_total_ahm"] = item_inventory[cols].sum(axis=1)

    io.writer(item_inventory, "intermediate", "item_inventory", "parquet")
Ejemplo n.º 12
0
def merge_item_table() -> None:
    """Combine item information into single master table."""
    item_skeleton = io.reader("cleaned", "item_skeleton", "parquet")
    mat_prices = io.reader("intermediate", "mat_prices", "parquet")
    #     item_facts = io.reader("cleaned", "item_facts", "parquet")
    item_inventory = io.reader("intermediate", "item_inventory", "parquet")
    predicted_prices = io.reader("intermediate", "predicted_prices", "parquet")
    replenish = io.reader("intermediate", "replenish", "parquet")

    item_table = (
        item_skeleton.join(mat_prices).join(predicted_prices).join(
            item_inventory)
        #         .join(item_facts)
        .join(replenish)).fillna(0)

    io.writer(item_table, "intermediate", "item_table", "parquet")
Ejemplo n.º 13
0
def get_auctioneer_data() -> None:
    """Reads WoW Addon Auctioneer lua and parses text file into json."""
    ahm = utils.get_ahm()
    path = utils.make_lua_path(ahm["account"], "Auc-ScanData")
    ropes = io.reader(name=path, ftype="lua", custom="Auc-ScanData")

    listings = []
    for rope in ropes:
        if len(rope) < 10:
            continue
        listings_part = rope.split("},{")
        listings_part[0] = listings_part[0].split("{{")[1]
        listings_part[-1] = listings_part[-1].split("},}")[0]
        listings.extend(listings_part)
    aucscan_data = [x.split("|")[-1].split(",") for x in listings]

    io.writer(aucscan_data, "raw", "aucscan_data", "json")
Ejemplo n.º 14
0
def analyse_buy_policy(MAX_BUY_STD: int = 2) -> None:
    """Create buy policy."""
    logger.debug(f"max buy std {MAX_BUY_STD}")

    item_table = io.reader("intermediate", "item_table", "parquet")

    buy_policy = item_table[item_table["user_Buy"] == True]
    subset_cols = [
        "bbpred_price",
        "bbpred_std",
        "inv_total_all",
        "replenish_qty",
        "user_std_holding",
        "replenish_z",
    ]
    buy_policy = buy_policy[subset_cols]

    listing_each = io.reader("intermediate", "listing_each", "parquet")

    listing_each = listing_each.sort_values("list_price_per")

    rank_list = listing_each.join(buy_policy, on="item").dropna()

    rank_list["sell_rank"] = rank_list.groupby("item")["list_price_per"].rank(
        method="max"
    )

    rank_list = rank_list.drop_duplicates()
    rank_list["updated_rank"] = rank_list["replenish_qty"] - rank_list["sell_rank"]
    rank_list["updated_replenish_z"] = (
        rank_list["updated_rank"] / rank_list["user_std_holding"]
    )

    rank_list["updated_replenish_z"] = rank_list["updated_replenish_z"].clip(
        upper=MAX_BUY_STD
    )

    rank_list = rank_list[rank_list["updated_replenish_z"] > rank_list["list_price_z"]]
    io.writer(rank_list, "reporting", "buy_rank", "parquet")

    buy_policy["buy_price_cap"] = rank_list.groupby("item")["list_price_per"].max()
    buy_policy["buy_price_cap"] = buy_policy["buy_price_cap"].fillna(1).astype(int)

    buy_policy.index.name = "item"
    buy_policy = buy_policy.reset_index()
    io.writer(buy_policy, "outputs", "buy_policy", "parquet")
Ejemplo n.º 15
0
def write_buy_policy() -> None:
    """Writes the buy policy to all accounts."""
    buy_policy = io.reader("outputs", "buy_policy", "parquet")

    cols = ["item", "buy_price_cap"]
    new_snatch = encode_buy_campaign(buy_policy[cols])

    # Read client lua, replace with

    for account in cfg.wow.get("accounts", []):
        path = utils.make_lua_path(account_name=account, datasource="Auc-Advanced")
        data = io.reader(name=path, ftype="lua")
        snatch = data["AucAdvancedData"]["UtilSearchUiData"]["Current"]
        snatch["snatch.itemsList"] = {}
        snatch = snatch["snatch.itemsList"]
        data["AucAdvancedData"]["UtilSearchUiData"]["Current"][
            "snatch.itemsList"
        ] = new_snatch
        io.writer(data, name=path, ftype="lua")
Ejemplo n.º 16
0
def get_bb_data() -> None:
    """Reads Booty Bay web API data using selenium and blizzard login."""
    driver = start_driver()
    # Get item_ids for user specified items of interest
    user_items = io.reader("", "user_items", "json")
    auctionable_items = [
        item_id for item_id, v in user_items.items() if v["true_auctionable"]
    ]

    # Get bb data from API
    bb_data: Dict[str, Dict[Any, Any]] = defaultdict(dict)

    with tqdm(total=len(auctionable_items), desc="Booty Items") as pbar:
        for item_id in auctionable_items:
            bb_data[item_id] = get_bb_item_page(driver, item_id)
            pbar.update(1)

    driver.close()
    io.writer(bb_data, "raw", "bb_data", "json")
Ejemplo n.º 17
0
def get_arkinventory_data() -> None:
    """Reads WoW Addon Ark Inventory lua data and saves local copy as json."""
    acc_inv: dict = {}
    for account_name in cfg.wow.get("accounts", {}):
        path = utils.make_lua_path(account_name, "ArkInventory")
        data = io.reader(name=path, ftype="lua")
        player_data = data["ARKINVDB"]["global"]["player"]["data"]

        # Ensure character data does belong to account
        character_match = []
        for server, characters in cfg.wow["accounts"][account_name][
                "servers"].items():
            for character in characters["characters"]:
                character_match.append(f"{character} - {server}")

        for character in player_data.keys():
            if character in character_match:
                acc_inv[character] = player_data[character]

    io.writer(acc_inv, "raw", "arkinventory_data", "json")
Ejemplo n.º 18
0
def calculate_inventory_valuation() -> None:
    """Get total inventory value based on current market price."""
    item_inventory = io.reader("intermediate", "item_inventory", "parquet")
    predicted_prices = io.reader("intermediate", "predicted_prices", "parquet")

    user_items = io.reader("", "user_items", "json")
    item_ids = {
        v.get("name_enus"): item_id
        for item_id, v in user_items.items()
    }

    item_trade = item_inventory.loc[item_inventory.index.isin(item_ids)]

    bbpred_price = predicted_prices["bbpred_price"]
    bbpred_price.name = "item"

    inventory_valuation = item_trade.multiply(bbpred_price, axis=0)
    inventory_valuation = inventory_valuation.fillna(0).astype(int)

    io.writer(inventory_valuation, "reporting", "inventory_valuation",
              "parquet")
Ejemplo n.º 19
0
def clean_beancounter_data() -> None:
    """Reads Beancounter json and parses into tabular format."""
    data = io.reader("raw", "beancounter_data", "json")
    item_ids = cfg.get_item_ids_fixed()

    # Parses all listings into flat python list
    parsed = []
    for server, server_data in data["BeanCounterDB"].items():
        for character, auction_data in server_data.items():
            for auction_type, item_listings in auction_data.items():
                for item_id, listings in item_listings.items():
                    for _, listing in listings.items():
                        for auction in listing:
                            parsed.append([auction_type] + [int(item_id)] +
                                          [server] + [item_ids[int(item_id)]] +
                                          [character] + auction.split(";"))

    # Setup as pandas dataframe, remove irrelevant columns
    df = pd.DataFrame(parsed)

    bean_purchases = _clean_beancounter_purchases(df)
    io.writer(bean_purchases, "cleaned", "bean_purchases", "parquet")

    failed = _clean_beancounter_failed(df)
    success = _clean_beancounter_success(df)

    bean_results = success.append(failed)
    bean_results["success"] = bean_results["auction_type"].replace({
        "completedAuctions":
        1,
        "failedAuctions":
        0
    })
    io.writer(
        bean_results,
        "cleaned",
        "bean_results",
        "parquet",
        self_schema=True,
    )
Ejemplo n.º 20
0
def clean_bb_data() -> None:
    """Parses all Booty Bay item json into tabular formats."""
    """Parses all Booty Bay item json into tabular formats."""
    item_data = io.reader("raw", "bb_data", "json")
    user_items = io.reader("", "user_items", "json")

    bb_fortnight: List = []
    bb_history: List = []
    bb_alltime: List = []

    for item_id, data in item_data.items():
        item_name = user_items[item_id].get("name_enus")

        bb_fortnight_data = pd.DataFrame(utils.get_bb_fields(data, "history"))
        bb_fortnight_data["snapshot"] = pd.to_datetime(
            bb_fortnight_data["snapshot"], unit="s")
        bb_fortnight_data["item"] = item_name
        bb_fortnight.append(bb_fortnight_data)

        bb_history_data = pd.DataFrame(data["daily"])
        bb_history_data["item"] = item_name
        bb_history.append(bb_history_data)

        bb_alltime_data = pd.DataFrame(utils.get_bb_fields(data, "monthly"))
        bb_alltime_data["item"] = item_name
        bb_alltime.append(bb_alltime_data)

    bb_fortnight_df = pd.concat(bb_fortnight)
    bb_fortnight_df["snapshot"] = pd.to_datetime(bb_fortnight_df["snapshot"])

    bb_history_df = pd.concat(bb_history)
    for col in bb_history_df.columns:
        if col != "date" and col != "item":
            bb_history_df[col] = bb_history_df[col].astype(int)
    bb_history_df["date"] = pd.to_datetime(bb_history_df["date"])

    bb_alltime_df = pd.concat(bb_alltime)
    bb_alltime_df["date"] = pd.to_datetime(bb_alltime_df["date"])

    io.writer(
        bb_fortnight_df,
        "cleaned",
        "bb_fortnight",
        "parquet",
        self_schema=True,
    )
    io.writer(
        bb_history_df,
        "cleaned",
        "bb_history",
        "parquet",
        self_schema=True,
    )
    io.writer(
        bb_alltime_df,
        "cleaned",
        "bb_alltime",
        "parquet",
        self_schema=True,
    )
Ejemplo n.º 21
0
def write_make_policy() -> None:
    """Writes the make policy to all accounts."""
    make_policy = io.reader("outputs", "make_policy", "parquet")
    new_craft_queue, item_groups = encode_make_policy(make_policy)

    ahm = utils.get_ahm()
    path = utils.make_lua_path(
        account_name=ahm["account"], datasource="TradeSkillMaster"
    )
    content = io.reader(name=path, ftype="lua", custom="rb")

    craft_mark = (
        f'f@Alliance - {cfg.wow["booty_server"]["server_name"]}@internalData@crafts'
    )
    start, end = utils.find_tsm_marker(content, f'["{craft_mark}"]'.encode("ascii"))

    crafting_dict = lua.decode("{" + content[start:end].decode("ascii") + "}")
    for _, item_data in crafting_dict[craft_mark].items():
        item_name = item_data.get("name", "_no_name")
        queued = new_craft_queue.get(item_name, 0)
        if "queued" in item_data:
            item_data["queued"] = queued

    new_craft = utils.dict_to_lua(crafting_dict).encode("ascii")
    new_craft = new_craft.replace(
        f"\n{craft_mark}".encode("ascii"), f'\n["{craft_mark}"]'.encode("ascii"),
    )
    content = content[:start] + new_craft + content[end:]

    # Update item groups
    groups_mark = '["p@Default@userData@items"]'
    item_text = f"{groups_mark} = " + "{"
    for item_code, group in item_groups.items():
        item_text += f'["i:{item_code}"] = "{group}", '
    item_text += "}"
    start, end = utils.find_tsm_marker(content, groups_mark.encode("ascii"))
    content = content[:start] + item_text.encode("ascii") + content[end:]

    io.writer(content, name=path, ftype="lua", custom="wb")
Ejemplo n.º 22
0
def write_sell_policy() -> None:
    """Writes the sell policy to accounts."""
    sell_policy = io.reader("outputs", "sell_policy", "parquet")

    cols = [
        "item",
        "sell_buyout",
        "sell_bid",
        "sell_count",
        "sell_stack",
        "sell_duration",
    ]
    new_appraiser = encode_sell_campaign(sell_policy[cols])

    # Read client lua, replace with
    for account in cfg.wow.get("accounts", []):
        path = utils.make_lua_path(account_name=account, datasource="Auc-Advanced")
        data = io.reader(name=path, ftype="lua")
        data["AucAdvancedConfig"]["profile.Default"]["util"][
            "appraiser"
        ] = new_appraiser
        io.writer(data, name=path, ftype="lua")
Ejemplo n.º 23
0
def analyse_material_cost() -> None:
    """Analyse cost of materials for items, using purchase history or BB predicted price."""
    bean_rolling_buyout = io.reader("intermediate", "bean_rolling_buyout",
                                    "parquet")
    item_prices = io.reader("intermediate", "predicted_prices", "parquet")
    mat_prices = item_prices.join(bean_rolling_buyout)

    user_items = io.reader("", "user_items", "json")
    auctionable_items = {
        item_id: v
        for item_id, v in user_items.items() if v["true_auctionable"]
    }

    r = cfg.analysis["BB_MAT_PRICE_RATIO"]

    # Material costs are taken as a ratio of booty bay prices, and (recent) actual buyouts
    mat_prices["material_buyout_cost"] = (
        mat_prices["bean_rolling_buyout"].fillna(mat_prices["bbpred_price"]) *
        (1 - r) + (mat_prices["bbpred_price"] * r)).astype(int)

    mat_prices["material_make_cost"] = 0

    # Determine raw material cost for manufactured items
    for _, item_details in auctionable_items.items():
        item_name = item_details.get("name_enus")
        material_cost = 0
        user_made_from = item_details.get("made_from", {})
        if user_made_from:
            for ingredient, count in user_made_from.items():
                material_cost += (
                    mat_prices.loc[ingredient, "material_buyout_cost"] * count)
        else:
            material_cost = mat_prices.loc[item_name, "material_buyout_cost"]
        mat_prices.loc[item_name, "material_make_cost"] = int(material_cost)

    mat_prices = mat_prices[["material_buyout_cost", "material_make_cost"]]
    io.writer(mat_prices, "intermediate", "mat_prices", "parquet")
Ejemplo n.º 24
0
def analyse_replenishment() -> None:
    """Determine the demand for item replenishment."""
    item_skeleton = io.reader("cleaned", "item_skeleton", "parquet")
    item_inventory = io.reader("intermediate", "item_inventory", "parquet")

    replenish = item_skeleton.join(item_inventory).fillna(0)

    user_items = io.reader("", "user_items", "json")
    item_ids = {
        v.get("name_enus"): item_id
        for item_id, v in user_items.items()
    }

    replenish["replenish_qty"] = (replenish["user_mean_holding"] -
                                  replenish["inv_total_all"])

    # Update replenish list with user_made_from
    for item, row in replenish.iterrows():
        item_id = item_ids[item]

        if row["replenish_qty"] > 0:
            for ingredient, count in user_items[item_id].get("made_from",
                                                             {}).items():
                replenish.loc[ingredient,
                              "replenish_qty"] += (count *
                                                   row["replenish_qty"])
        else:  # pragma: no cover
            pass

    replenish["replenish_z"] = (replenish["replenish_qty"] /
                                replenish["user_std_holding"])
    replenish["replenish_z"] = (replenish["replenish_z"].replace([inf, -inf],
                                                                 0).fillna(0))

    replenish = replenish[["replenish_qty", "replenish_z"]]
    io.writer(replenish, "intermediate", "replenish", "parquet")
Ejemplo n.º 25
0
def produce_listing_items() -> None:
    """Generte the item listing on current AH."""
    listing_each = io.reader("intermediate", "listing_each", "parquet")
    item_info = io.reader("reporting", "item_info", "parquet")

    user_items = io.reader("", "user_items", "json")
    item_names = {
        item_id: v.get("name_enus")
        for item_id, v in user_items.items()
    }

    for item_id, _ in user_items.items():
        item = item_names[item_id]
        plt.figure()
        list_item = listing_each[(listing_each["item"] == item)
                                 & (listing_each["list_price_z"] < 10)]
        list_item = list_item["list_price_per"].sort_values().reset_index(
            drop=True)
        list_item.plot(title=f"Current AH listings {item}")

        pd.Series([item_info.loc[item, "material_make_cost"]] *
                  list_item.shape[0]).plot()
        io.writer(plt, "plots", f"{item}_listing", "png")
        plt.close()
Ejemplo n.º 26
0
def analyse_make_policy() -> None:
    """Prints what potions to make."""
    item_table = io.reader("intermediate", "item_table", "parquet")
    item_table.index.name = "item"

    cols = [
        "item_id",
        "user_Make",
        "user_Sell",
        "user_make_pass",
        "user_mean_holding",
        "inv_total_all",
        "inv_ahm_bag",
        "inv_ahm_bank",
    ]
    make_policy = item_table[cols]

    make_policy["make_ideal"] = (
        make_policy["user_mean_holding"] - make_policy["inv_total_all"]
    )
    make_policy["make_counter"] = make_policy["make_ideal"].apply(lambda x: max(x, 0))
    make_policy["make_mat_available"] = (
        make_policy["inv_ahm_bag"] + make_policy["inv_ahm_bank"]
    )
    make_policy["make_actual"] = 0
    make_policy["make_mat_flag"] = 0

    user_items = io.reader("", "user_items", "json")
    item_ids = {v.get("name_enus"): item_id for item_id, v in user_items.items()}

    # Iterates through the table one at a time, to ensure fair distribution of mat usage
    # Tests if reached counter and is made from stuff
    # Checks the material count can go down first before decrementing
    # If after each check, append to list to see for any changes on any pass through
    change = [True]
    while any(change):
        change = []

        for item, row in make_policy.iterrows():
            item_id = item_ids[item]

            made_from = user_items[item_id].get("made_from", {})
            under_counter = row["make_actual"] < row["make_counter"]
            user_make_pass = row["user_make_pass"]

            if made_from and under_counter and not (user_make_pass):
                item_increment = True
                for material, qty in made_from.items():
                    if "Vial" not in material:
                        item_increment = (
                            make_policy.loc[material, "make_mat_available"] >= qty
                        ) & item_increment

                if item_increment:
                    for material, qty in user_items[item].get("made_from", {}).items():
                        make_policy.loc[material, "make_mat_available"] -= qty
                        make_policy.loc[material, "make_mat_flag"] = 1
                    make_policy.loc[item, "make_actual"] += 1

                change.append(item_increment)

    io.writer(make_policy, "outputs", "make_policy", "parquet")
Ejemplo n.º 27
0
def analyse_sell_policy(
    stack: int = 1,
    max_sell: int = 10,
    duration: str = "m",
    MAX_STD: int = 5,
    MIN_PROFIT: int = 300,
    MIN_PROFIT_PCT: float = 0.015,
) -> None:
    """Creates sell policy based on information."""
    item_table = io.reader("intermediate", "item_table", "parquet")
    listing_each = io.reader("intermediate", "listing_each", "parquet")
    item_volume_change_probability = io.reader(
        "intermediate", "item_volume_change_probability", "parquet"
    )

    cols = [
        "item_deposit",
        "material_make_cost",
        "bbpred_std",
        "bbpred_price",
        "user_max_sell",
        "inv_ahm_bag",
        "replenish_qty",
        "replenish_z",
    ]
    sell_items = item_table[item_table["user_Sell"] == True][cols]
    sell_items["item_deposit"] = sell_items["item_deposit"] * (
        utils.duration_str_to_mins(duration) / (60 * 24)
    )

    sell_items["sell_exp_decay"] = 2 - sell_items["replenish_z"].apply(
        lambda x: norm.cdf(x)
    )

    listing_each = listing_each[listing_each["list_price_z"] < MAX_STD]
    listing_each = listing_each.sort_values(["item", "list_price_per"])
    listing_each["sell_rank"] = (
        listing_each.groupby("item")["list_price_z"].rank(method="first").astype(int)
        - 1
    )

    listing_each = pd.merge(
        item_volume_change_probability,
        listing_each,
        how="left",
        on=["item", "sell_rank"],
    )
    listing_each = listing_each.set_index(["item"])
    listing_each["list_price_z"] = listing_each["list_price_z"].fillna(MAX_STD)

    gouge_price = sell_items["bbpred_price"] + (sell_items["bbpred_std"] * MAX_STD)

    listing_each["list_price_per"] = (
        listing_each["list_price_per"].fillna(gouge_price).astype(int)
    )
    listing_each = listing_each.reset_index().sort_values(["item", "sell_rank"])

    listing_profits = pd.merge(
        listing_each, sell_items, how="left", left_on="item", right_index=True
    )

    listing_profits["sell_buyout"] = listing_profits["list_price_per"] - 9

    listing_profits["sell_estimated_profit"] = (
        (listing_profits["sell_buyout"] * 0.95 - listing_profits["material_make_cost"])
        * (listing_profits["sell_probability"] ** listing_profits["sell_exp_decay"])
    ) - (listing_profits["item_deposit"] * (1 - listing_profits["sell_probability"]))

    best_profits_ind = listing_profits.groupby("item")["sell_estimated_profit"].idxmax()
    sell_policy = listing_profits.loc[best_profits_ind]

    sell_policy["profit_min"] = MIN_PROFIT
    sell_policy["profit_pct"] = MIN_PROFIT_PCT * sell_policy["bbpred_price"]
    sell_policy["profit_feasible"] = sell_policy[["profit_min", "profit_pct"]].max(
        axis=1
    )
    # sell_policy["profit_infeasible"] = (
    #     sell_policy["profit_feasible"] > sell_policy["sell_estimated_profit"]
    # )

    # Shows the amount required to be profitable
    sell_policy["sell_bid"] = (
        sell_policy["sell_buyout"]
        - sell_policy["sell_estimated_profit"]
        + sell_policy["profit_feasible"]
    )

    low_bid_ind = sell_policy[
        sell_policy["sell_bid"] < sell_policy["sell_buyout"]
    ].index
    sell_policy.loc[low_bid_ind, "sell_bid"] = sell_policy.loc[
        low_bid_ind, "sell_buyout"
    ]

    sell_policy["sell_duration"] = utils.duration_str_to_mins(duration)
    sell_policy = sell_policy.sort_values("sell_estimated_profit", ascending=False)

    sell_policy["sell_stack"] = stack
    sell_policy["user_max_sell"] = sell_policy["user_max_sell"].replace(0, max_sell)
    sell_policy["sell_count"] = sell_policy[["inv_ahm_bag", "user_max_sell"]].min(
        axis=1
    )
    sell_policy["sell_count"] = (
        sell_policy["sell_count"] / sell_policy["sell_stack"]
    ).astype(int)

    # TODO Here is likely where we can make it respect min_holding
    sell_policy["sell_min"] = sell_policy[["user_max_sell", "inv_ahm_bag"]].min(axis=1)
    adjust_stack = sell_policy[
        sell_policy["sell_min"] < sell_policy["sell_stack"]
    ].index
    sell_policy.loc[adjust_stack, "sell_stack"] = 1
    sell_policy.loc[adjust_stack, "sell_count"] = sell_policy.loc[
        adjust_stack, "sell_min"
    ]

    io.writer(sell_policy, "outputs", "sell_policy", "parquet")

    listing_profits = listing_profits.set_index(["sell_rank", "item"])[
        "sell_estimated_profit"
    ].unstack()
    io.writer(listing_profits, "reporting", "listing_profits", "parquet")
Ejemplo n.º 28
0
def report_profits() -> None:
    """Compare purchases and sales to expected value to derive profit from action."""
    bean_results = io.reader("cleaned", "bean_results", "parquet")
    bean_purchases = io.reader("cleaned", "bean_purchases", "parquet")
    bb_history = io.reader("cleaned", "bb_history", "parquet")

    user_items = io.reader("", "user_items", "json")
    item_names = {
        item_id: v.get("name_enus")
        for item_id, v in user_items.items()
    }
    item_ids = {
        v.get("name_enus"): item_id
        for item_id, v in user_items.items()
    }

    bean_results["date"] = bean_results["timestamp"].dt.date.astype(
        "datetime64")
    bean_results["profit"] = bean_results["received"].fillna(
        -bean_results["item_deposit"])
    bean_results["qty_change"] = bean_results.apply(
        lambda x: -x["qty"] if x["auction_type"] == "completedAuctions" else 0,
        axis=1)

    bean_purchases["date"] = bean_purchases["timestamp"].dt.date.astype(
        "datetime64")
    bean_purchases["qty_change"] = bean_purchases["qty"]
    bean_purchases["profit"] = -bean_purchases["buyout"]

    purchase_change = bean_purchases.groupby(["item", "date"
                                              ])[["qty_change",
                                                  "profit"]].sum()
    purchase_change.columns = ["purchase_qty_change", "purchase_profit"]

    result_change = bean_results.groupby(["auction_type", "item",
                                          "date"])[["qty_change",
                                                    "profit"]].sum()
    completed_change = result_change.loc["completedAuctions"]
    completed_change.columns = ["completed_qty_change", "completed_profit"]
    failed_change = result_change.loc["failedAuctions"]
    failed_change.columns = ["failed_qty_change", "failed_profit"]

    bb_history = bb_history[bb_history["date"] >= bean_results["date"].min()]
    bb_history = bb_history.set_index(["item", "date"])

    profits = (bb_history.join(purchase_change).join(completed_change).join(
        failed_change).fillna(0).astype(int))

    profits["total_action"] = profits[[x for x in profits
                                       if "_profit" in x]].sum(axis=1)
    profits["total_qty"] = profits[[x for x in profits
                                    if "_qty_change" in x]].sum(axis=1)

    # vector style material cost calculation
    material_update = []
    for item_id, item_details in user_items.items():
        item_name = item_names[item_id]
        if item_name in profits.index:
            material_cost = pd.Series(0,
                                      index=profits.loc[item_name].index,
                                      name="silveravg")
            user_made_from = item_details.get("made_from", {})
            if user_made_from:
                for ingredient, count in user_made_from.items():
                    if ingredient in profits.index:
                        material_cost += profits.loc[ingredient,
                                                     "silveravg"] * count
                    else:
                        material_cost += (
                            user_items[item_ids[ingredient]]["vendor_price"] *
                            count)
            else:
                material_cost = profits.loc[item_name, "silveravg"]
            material_cost = material_cost.reset_index()
            material_cost["item"] = item_name
            material_update.append(material_cost)

    material_updates = pd.concat(material_update)

    profits = profits.join(material_updates.set_index(["item",
                                                       "date"])["silveravg"],
                           rsuffix="_cost")
    profits[
        "total_materials"] = -profits["silveravg_cost"] * profits["total_qty"]
    profits[
        "total_profit"] = profits["total_action"] - profits["total_materials"]

    io.writer(profits, "reporting", "profits", "parquet")
Ejemplo n.º 29
0
def clean_item_skeleton() -> None:
    """Creates basic dataframe from user items information."""
    user_items = io.reader("", "user_items", "json")

    item_facts = pd.DataFrame(user_items).T
    item_facts.index.name = "item_id"

    # Add made_from as a json string on item_id
    item_facts["made_from"] = False
    for item_id, facts in user_items.items():
        item_facts.loc[item_id,
                       "made_from"] = bool(facts.get("made_from", False))

    item_facts = item_facts.reset_index()
    item_facts = item_facts.rename(columns={"name_enus": "item"})
    item_facts = item_facts.set_index("item")

    # # Rename fields and set index
    user_columns = [
        "ahm",
        "active",
        "ignore",
        "Sell",
        "Buy",
        "made_from",
        "max_holding",
        "max_sell",
        "mean_holding",
        "min_holding",
        "std_holding",
        "vendor_price",
        "make_pass",
    ]
    item_facts = item_facts.rename(
        columns={k: f"user_{k}"
                 for k in user_columns})
    item_fact_columns = [
        "icon",
        "stacksize",
        "selltovendor",
        "auctionable",
        "price",
        "vendornpccount",
        "true_auctionable",
    ]
    item_facts = item_facts.rename(
        columns={k: f"item_{k}"
                 for k in item_fact_columns})

    # Ensure user columns exist
    user_items_ensure_columns = [
        "user_min_holding",
        "user_max_holding",
        "user_max_sell",
        "user_Buy",
        "user_Sell",
        "user_Make",
        "user_made_from",
        "user_make_pass",
        "user_vendor_price",
    ]

    for col in user_items_ensure_columns:
        if col not in item_facts:
            item_facts[col] = nan

    # # Additional standardization and cleaning
    item_facts["item_deposit"] = (item_facts["item_selltovendor"] / 20 *
                                  12).astype(int)

    int_cols = [
        "user_min_holding", "user_max_holding", "user_vendor_price", "item_id"
    ]
    item_facts[int_cols] = item_facts[int_cols].fillna(0).astype(int)

    item_facts["user_std_holding"] = (
        item_facts["user_max_holding"] -
        item_facts["user_min_holding"]) / cfg.analysis["USER_STD_SPREAD"]
    item_facts["user_mean_holding"] = (item_facts[[
        "user_min_holding", "user_max_holding"
    ]].mean(axis=1).astype(int))

    item_facts["user_Make"] = item_facts["user_made_from"] & (
        item_facts["user_make_pass"] == False)

    item_facts = item_facts.drop("user_made_from", axis=1)

    bool_cols = ["user_Buy", "user_Sell", "user_Make", "user_make_pass"]
    item_facts[bool_cols] = item_facts[bool_cols].fillna(False).astype(int)

    io.writer(item_facts, "cleaned", "item_skeleton", "parquet")
Ejemplo n.º 30
0
def clean_arkinventory_data(run_dt: dt) -> None:
    """Reads Ark Inventory json and parses into tabular format."""
    inventory_data = io.reader("raw", "arkinventory_data", "json")

    raw_data: list = []
    monies: Dict[str, int] = {}
    for character, character_data in inventory_data.items():
        character_name = character.split(" ")[0]

        character_money = int(character_data.get("info").get("money", 0))
        monies[character] = character_money

        # Get Bank, Inventory, Character, Mailbox etc
        location_slots = character_data.get("location", [])

        for lkey in location_slots:
            items: Dict[str, int] = defaultdict(int)
            if str(lkey) not in cfg.location_info:  # pragma: no cover
                continue
            else:
                loc_name = cfg.location_info[str(lkey)]

            location_slot = location_slots[lkey]
            if location_slot:
                bag_slots = location_slot["bag"]

                # Get the items from each of the bags, add to master list
                for bag in bag_slots:
                    for item in bag.get("slot", []):
                        # Must have item details, a count and must not be a soulbound item
                        if item.get("h") and item.get(
                                "count") and item.get("sb") != 3:
                            item_name: str = item.get("h").split("[")[1].split(
                                "]")[0]
                            item_id: str = item.get("h").split(
                                "tem:")[1].split(":")[0]
                            items[f"{item_id}_{item_name}"] += item.get(
                                "count")

            for item_details, item_count in items.items():
                item_id, item_name = item_details.split("_", 1)
                raw_data.append(
                    (character_name, loc_name, item_id, item_name, item_count))

    # Convert information to dataframe
    cols = ["character", "location", "item_id", "item", "count"]
    ark_inventory = pd.DataFrame(raw_data)
    ark_inventory.columns = cols
    ark_inventory["item_id"] = ark_inventory["item_id"].astype(int)
    ark_inventory["timestamp"] = run_dt
    io.writer(
        ark_inventory,
        "cleaned",
        "ark_inventory",
        "parquet",
        self_schema=True,
    )

    ark_monies = pd.Series(monies)
    ark_monies.name = "monies"
    ark_monies.index.name = "character"
    ark_monies = pd.DataFrame(ark_monies)
    ark_monies["timestamp"] = run_dt
    io.writer(
        ark_monies,
        "cleaned",
        "ark_monies",
        "parquet",
        self_schema=True,
    )