def generate_auction_activity(test=False): """ Generates auction history parquet file with auctions of interest. Reads and parses Beancounter auction history across all characters Works the data into a labelled and cleaned pandas before parquet saves """ relevant_auction_types = [ "failedAuctions", "completedAuctions", "completedBidsBuyouts", ] settings = utils.get_general_settings() data = utils.read_lua("BeanCounter") # Generates BeanCounters id:item_name dict num_item = {} for key, item_raw in data["BeanCounterDBNames"].items(): item_name = item_raw.split(";")[1] num_item[key.split(":")[0]] = item_name # Parses all characters relevant listings into flat list parsed = [] for character, auction_data in data["BeanCounterDB"]["Grobbulus"].items(): for auction_type, item_listings in auction_data.items(): if auction_type in relevant_auction_types: auction_name = settings["auction_type_labels"][auction_type] for item_id, listings in item_listings.items(): for _, listing in listings.items(): for auction in listing: parsed.append([auction_name] + [num_item[item_id]] + [character] + auction.split(";")) # Setup as pandas dataframe, remove irrelevant columns df = pd.DataFrame(parsed) df = df.drop([4, 5, 6, 8, 11, 12], axis=1) cols = [ "auction_type", "item", "character", "count", "price", "agent", "timestamp" ] df.rename(columns=dict(zip(df.columns, cols)), inplace=True) df = df[df["price"] != ""] df["price"] = df["price"].astype(int) df["count"] = df["count"].astype(int) df["price_per"] = round(df["price"] / df["count"], 4) df["timestamp"] = df["timestamp"].apply(lambda x: dt.fromtimestamp(int(x))) if test: return None # avoid saves logger.debug(f"Auction actions full repository. {df.shape[0]} records") df.to_parquet("data/full/auction_activity.parquet", compression="gzip")
def generate_booty_data(): """ Get and save booty bay data """ account = "396255466#1" pricerdata = utils.read_lua("Pricer", merge_account_sources=False, accounts=[account])[account]["PricerData"] pricerdata = pd.DataFrame(pricerdata).T # Saves latest scan to intermediate (immediate) pricerdata.to_parquet("data/intermediate/booty_data.parquet", compression="gzip") pricerdata.to_parquet( f"data/full/booty_data/{str(pricerdata['timestamp'].max())}.parquet", compression="gzip", ) logger.debug(f"Generating booty data {pricerdata.shape[0]}")
def apply_buy_policy(MAT_DEV=0, test=False): """ Determines herbs to buy based on potions in inventory. Always buys at or below current market price. """ # TODO; remove self_demand from this list, not a big deal # TODO need to subtract out oils (stoneshield) etc items = utils.load_items() sell_policy = pd.read_parquet("data/outputs/sell_policy.parquet") # Determine how many potions I have, and how many need to be replaced replenish = (sell_policy["auctions"] + sell_policy["inventory"] + sell_policy["storage"]) replenish.name = "inventory" replenish = pd.DataFrame(replenish) for potion in replenish.index: replenish.loc[potion, "max"] = items.get(potion).get("max_inventory", 60) replenish["inventory_target"] = ( replenish["max"] - replenish["inventory"]).apply(lambda x: max(0, x)) replenish = replenish.join(analyse_auction_success()) # Downweight requirements according to recent auction success replenish["target"] = (replenish["inventory_target"] * replenish["auction_success"]).astype(int) # From potions required, get herbs required herbs_required = pd.Series() for potion, quantity in replenish["target"].iteritems(): for herb, count in items.get(potion).get("made_from").items(): if herb in herbs_required: herbs_required.loc[herb] += count * quantity else: herbs_required.loc[herb] = count * quantity herbs_required.name = "herbs_needed" herbs = pd.DataFrame(herbs_required) # Add item codes from beancounter, used for entering into snatch item_codes = utils.get_item_codes() herbs = herbs.join(pd.Series(item_codes, name="code")) # Remove herbs already in inventory inventory = pd.read_parquet("data/intermediate/inventory.parquet") herbs = herbs.join( inventory.groupby("item").sum()["count"]).fillna(0).astype(int) herbs["herbs_purchasing"] = (herbs["herbs_needed"] - herbs["count"]).apply(lambda x: max(0, x)) # Cleanup herbs = herbs.drop(["Crystal Vial", "Empty Vial", "Leaded Vial"]) herbs = herbs.sort_index() # Get market values # item_prices = pd.read_parquet('intermediate/item_prices.parquet') item_prices = pd.read_parquet("data/intermediate/booty_data.parquet") item_prices["market_price"] = item_prices["recent"] - ( item_prices["stddev"] * MAT_DEV) # Clean up auction data auction_data = pd.read_parquet( "data/intermediate/auction_scandata.parquet") auction_data = auction_data[auction_data["item"].isin(items)] auction_data = auction_data[auction_data["price"] > 0] auction_data = auction_data.sort_values("price_per") auction_data["price_per"] = auction_data["price_per"].astype(int) for herb, count in herbs["herbs_purchasing"].iteritems(): # Always buy at way below market buy_price = item_prices.loc[herb, "market_price"] * 0.3 # Filter to herbs below market price listings = auction_data[auction_data["item"] == herb] listings = listings[listings["price_per"] < ( item_prices.loc[herb, "market_price"])] listings["cumsum"] = listings["count"].cumsum() # Filter to lowest priced herbs for the quantity needed herbs_needed = herbs.loc[herb, "herbs_purchasing"] listings = listings[listings["cumsum"] < herbs_needed] # If there are herbs available after filtering... if listings.shape[0] > 0: # Reject the highest priced item, in case there are 100s of listings at that price (conservative) not_last_priced = listings[ listings["price_per"] != listings["price_per"].iloc[-1]] if not_last_priced.shape[0] > 0: buy_price = not_last_priced["price_per"].iloc[-1] herbs.loc[herb, "buy_price"] = buy_price herbs["buy_price"] = herbs["buy_price"].astype(int) # Get snatch data, populate and save back data = utils.read_lua("Auc-Advanced", merge_account_sources=False) data = data.get("396255466#1") snatch = data["AucAdvancedData"]["UtilSearchUiData"]["Current"][ "snatch.itemsList"] for herb, row in herbs.iterrows(): snatch[f"{row['code']}:0:0"]["price"] = int(row["buy_price"]) data["AucAdvancedData"]["UtilSearchUiData"]["Current"][ "snatch.itemsList"] = snatch logger.debug(herbs.columns) logger.debug(herbs.head()) herbs = herbs[["herbs_purchasing", "buy_price"]] if test: return None # avoid saves utils.write_lua(data) herbs.to_parquet("data/outputs/buy_policy.parquet", compression="gzip")
def generate_inventory(test=False): """ Reads and reformats the Arkinventory data file into a pandas dataframe Loads yaml files to specify item locations and specific items of interest Saves down parquet file ready to go """ settings = utils.get_general_settings() characters = utils.read_lua( "ArkInventory")["ARKINVDB"]["global"]["player"]["data"] # Search through inventory data to create dictionary of all items and counts, also counts total monies monies = {} character_inventories = defaultdict(str) raw_data = [] for ckey in characters: character = characters[ckey] character_name = ckey.split(" ")[0] character_inventories[character_name] = {} monies[ckey] = int(character.get("info").get("money", 0)) # Get Bank, Inventory, Character, Mailbox etc location_slots = character.get("location", []) for lkey in location_slots: items = defaultdict(int) if lkey not in settings["location_info"]: continue else: loc_name = settings["location_info"][lkey] location_slot = location_slots[lkey] if location_slot: bag_slots = location_slot["bag"] # Get the items from each of the bags, add to master list for bag in bag_slots: for item in bag.get("slot", []): if item.get("h") and item.get("count"): item_name = item.get("h").split("[")[1].split( "]")[0] items[item_name] += item.get("count") for item_name, item_count in items.items(): raw_data.append( (character_name, loc_name, item_name, item_count)) character_inventories[character_name][settings["location_info"] [lkey]] = items # Convert information to dataframe cols = ["character", "location", "item", "count", "timestamp"] df = pd.DataFrame(raw_data) df["timestamp"] = dt.now() df.columns = cols df_monies = pd.Series(monies) df_monies.name = "monies" df_monies = pd.DataFrame(df_monies) df_monies["timestamp"] = dt.now() if test: return None # avoid saves df.to_parquet("data/intermediate/inventory.parquet", compression="gzip") df_monies.to_parquet("data/intermediate/monies.parquet", compression="gzip") logger.debug( f"Inventory formatted. {len(df)} records, {int(df_monies['monies'].sum()/10000)} total money across chars" ) inventory_repo = pd.read_parquet("data/full/inventory.parquet") inventory_repo.to_parquet("data/full_backup/inventory.parquet", compression="gzip") monies_repo = pd.read_parquet("data/full/monies.parquet") monies_repo.to_parquet("data/full_backup/monies.parquet", compression="gzip") updated = "*not*" if df["timestamp"].max() > inventory_repo["timestamp"].max(): updated = "" inventory_repo = inventory_repo.append(df) inventory_repo.to_parquet("data/full/inventory.parquet", compression="gzip") monies_repo = monies_repo.append(df_monies) monies_repo.to_parquet("data/full/monies.parquet", compression="gzip") unique_periods = len(inventory_repo["timestamp"].unique()) logger.debug( f"Inventory full repository. {len(inventory_repo)} records with {unique_periods} snapshots. Repository has {updated} been updated this run" )
def apply_sell_policy(stack=1, leads=15, duration="m", update=True, test=False): """ Given a datatable of the sell environment, create sell policy and save to WoW """ df_sell_policy = pd.read_parquet("data/outputs/sell_policy.parquet") for item, row in df_sell_policy.iterrows(): current_leads = row.loc["auction_leads"] aucs = row.loc["auctions"] inv = row.loc["immediate_inv"] # Could optionally leave one item remaining # stacks = max(int(inv / stack) - int(leave_one), 0) stacks = max(int(inv / stack), 0) available_to_sell = stacks * stack sell_count = 0 while current_leads < leads and available_to_sell > 0: current_leads += stack aucs += stack available_to_sell -= stack sell_count += 1 df_sell_policy.loc[item, "stack"] = stack if sell_count > 0 and df_sell_policy.loc[item, "infeasible"] == 0: df_sell_policy.loc[item, "sell_count"] = sell_count df_sell_policy.loc[item, "auction_leads"] = current_leads df_sell_policy.loc[item, "immediate_inv"] -= sell_count * stack df_sell_policy.loc[item, "auctions"] = aucs else: df_sell_policy.loc[item, "sell_count"] = inv + 1 df_sell_policy["sell_count"] = df_sell_policy["sell_count"].astype(int) df_sell_policy["stack"] = df_sell_policy["stack"].astype(int) df_sell_policy["auction_leads"] = df_sell_policy["auction_leads"].astype( int) df_sell_policy["auctions"] = df_sell_policy["auctions"].astype(int) if update and not test: df_sell_policy.to_parquet("data/outputs/sell_policy.parquet", compression="gzip") duration = {"s": 720, "m": 1440, "l": 2880}.get(duration) item_codes = utils.get_item_codes() # Seed new appraiser new_appraiser = { "bid.markdown": 0, "columnsortcurDir": 1, "columnsortcurSort": 6, "duration": 720, "bid.deposit": True, } # Iterate through items setting policy for item, d in df_sell_policy.iterrows(): code = item_codes[item] new_appraiser[f"item.{code}.fixed.bid"] = int(d["sell_price"] + d["infeasible"]) new_appraiser[f"item.{code}.fixed.buy"] = int(d["sell_price"]) new_appraiser[f"item.{code}.match"] = False new_appraiser[f"item.{code}.model"] = "fixed" new_appraiser[f"item.{code}.number"] = int(d["sell_count"]) new_appraiser[f"item.{code}.stack"] = int(d["stack"]) new_appraiser[f"item.{code}.bulk"] = True new_appraiser[f"item.{code}.duration"] = duration # Read client lua, replace with data = utils.read_lua("Auc-Advanced", merge_account_sources=False) data = data.get("396255466#1") data["AucAdvancedConfig"]["profile.Default"]["util"][ "appraiser"] = new_appraiser if test: return None # avoid saves utils.write_lua(data)