def propagate_filter_to_representative_listing_hashes(
        listing_hashes_to_propagate_to, listing_hashes_to_propagate_from):
    filtered_app_ids_based_on_price_threshold = [
        convert_listing_hash_to_app_id(listing_hash)
        for listing_hash in listing_hashes_to_propagate_from
    ]

    filtered_representative_listing_hashes = [
        listing_hash for listing_hash in listing_hashes_to_propagate_to
        if convert_listing_hash_to_app_id(listing_hash) in
        filtered_app_ids_based_on_price_threshold
    ]

    return filtered_representative_listing_hashes
def get_sell_prices_without_fee(app_ids, price_offset_in_euros=0.0):
    # Load sell prices (without fee).
    #
    # NB: an arbitrary price offset (greater than or equal to zero) can be input to constrain the problem even more.
    # This is a security: if the price offset is positive (>0), then we know that we can under-cut the lowest sell order
    # and still be able to make a profit if someone agrees to buy from us.

    data = load_all_listings()

    sell_prices = dict()

    for listing_hash in data:
        app_id_as_int = convert_listing_hash_to_app_id(listing_hash)
        app_id = str(app_id_as_int)

        if app_id in app_ids:
            current_data = data[listing_hash]

            sell_price_in_cents = current_data['sell_price']
            sell_price_in_euros = int(sell_price_in_cents) / 100
            sell_price_after_arbitrary_offset = sell_price_in_euros - abs(
                price_offset_in_euros)
            sell_price_in_euros_without_fee = compute_sell_price_without_fee(
                sell_price_after_arbitrary_offset)

            sell_prices[app_id] = sell_price_in_euros_without_fee

    return sell_prices
def find_listing_hashes_with_unknown_goo_value(
        listing_candidates,
        app_ids_with_unreliable_goo_details,
        all_goo_details,
        verbose=True):
    app_ids_with_unknown_goo_value = []

    for listing_hash in listing_candidates:
        app_id = convert_listing_hash_to_app_id(listing_hash)

        if app_id in app_ids_with_unreliable_goo_details:
            continue

        goo_value_in_gems = safe_read_from_dict(input_dict=all_goo_details,
                                                input_key=app_id)

        if goo_value_in_gems is None:
            app_id_as_int = int(app_id)
            app_ids_with_unknown_goo_value.append(app_id_as_int)

    if verbose:
        print(
            'Unknown goo values for:\n{}\nTotal: {} appIDs with unknown goo value.'
            .format(
                app_ids_with_unknown_goo_value,
                len(app_ids_with_unknown_goo_value),
            ))

    return app_ids_with_unknown_goo_value
Exemplo n.º 4
0
def update_badge_arbitrages_with_latest_market_order_data(
        badge_data,
        arbitrage_data,
        retrieve_market_orders_online=True,
        verbose=False):
    # Objective: ensure that we have the latest market orders before trying to automatically create & sell booster packs

    # Based on arbitrage_data, select the badge_data for which we want to download (again) the latest market orders:
    selected_badge_data = dict()

    for listing_hash in arbitrage_data.keys():
        arbitrage = arbitrage_data[listing_hash]

        if arbitrage['is_marketable'] and arbitrage['profit'] > 0:
            app_id = convert_listing_hash_to_app_id(listing_hash)
            selected_badge_data[app_id] = badge_data[app_id]

    market_order_dict = load_market_order_data(
        badge_data=selected_badge_data,
        retrieve_market_orders_online=retrieve_market_orders_online)

    latest_badge_arbitrages = find_badge_arbitrages(
        badge_data=selected_badge_data,
        market_order_dict=market_order_dict,
        verbose=verbose)

    return latest_badge_arbitrages
def build_dictionary_of_representative_listing_hashes(
        all_listing_details=None, listing_details_output_file_name=None):
    if listing_details_output_file_name is None:
        listing_details_output_file_name = get_listing_details_output_file_name_for_foil_cards(
        )

    if all_listing_details is None:
        all_listing_details = load_all_listing_details(
            listing_details_output_file_name=listing_details_output_file_name)

    dictionary_of_representative_listing_hashes = dict()

    for listing_hash in all_listing_details:
        app_id = convert_listing_hash_to_app_id(listing_hash)

        listing_details = all_listing_details[listing_hash]

        try:
            item_type_no = listing_details['item_type_no']
        except KeyError:
            continue

        try:
            dictionary_of_representative_listing_hashes[app_id].append(
                listing_hash)
        except KeyError:
            dictionary_of_representative_listing_hashes[app_id] = [
                listing_hash
            ]

    return dictionary_of_representative_listing_hashes
Exemplo n.º 6
0
def print_packs_with_high_buzz(hashes_for_best_bid,
                               market_order_dict,
                               item_rarity_patterns_per_app_id=None,
                               category_name=None,
                               num_packs_to_display=10):
    if category_name is None:
        category_name = get_category_name_for_booster_packs()

    print('# {} with high buy orders\n'.format(category_name.capitalize()))

    for i, listing_hash in enumerate(hashes_for_best_bid):

        if i >= num_packs_to_display:
            break

        app_id = convert_listing_hash_to_app_id(listing_hash)
        app_name = convert_listing_hash_to_app_name(listing_hash)

        bid = market_order_dict[listing_hash]['bid']
        bid_volume = market_order_dict[listing_hash]['bid_volume']

        markdown_compatible_steam_market_url = get_steam_market_listing_url(
            listing_hash=listing_hash,
            render_as_json=False,
            replace_spaces=True)

        if category_name != get_category_name_for_booster_packs():
            # Display the listing hash, because we cannot extract the app name from the listing hash for:
            # - profile backgrounds,
            # - and emoticons.
            app_name = listing_hash

        try:
            item_rarity_pattern = item_rarity_patterns_per_app_id[app_id]

            num_different_items_of_common_rarity = item_rarity_pattern[
                'common']
            num_different_items_of_uncommon_rarity = item_rarity_pattern[
                'uncommon']
            num_different_items_of_rare_rarity = item_rarity_pattern['rare']

            item_rarity_pattern_info = ' ; rarity pattern C/UC/R: {}/{}/{} items'.format(
                num_different_items_of_common_rarity,
                num_different_items_of_uncommon_rarity,
                num_different_items_of_rare_rarity,
            )
        except TypeError:
            item_rarity_pattern_info = ''

        print(
            '{:3}) [[store]({})][[market]({})] [{}]({}) ; bid: {}€ (volume: {}){}'
            .format(i + 1, get_steam_store_url(app_id),
                    markdown_compatible_steam_market_url, app_name,
                    get_steamcardexchange_url(app_id), bid, bid_volume,
                    item_rarity_pattern_info))

    return
Exemplo n.º 7
0
def create_booster_packs_for_batch(listing_hashes):
    results = dict()

    for listing_hash in listing_hashes:
        app_id = convert_listing_hash_to_app_id(listing_hash)
        result = create_booster_pack(app_id=app_id)

        results[listing_hash] = result

    return results
Exemplo n.º 8
0
def match_badges_with_listing_hashes(badge_creation_details=None,
                                     all_listings=None,
                                     verbose=True):
    # Badges for games which I own

    if badge_creation_details is None:
        badge_creation_details = parse_badge_creation_details()

    badge_app_ids = list(badge_creation_details.keys())

    # Listings for ALL the existing Booster Packs

    if all_listings is None:
        all_listings = load_all_listings()

    all_listing_hashes = list(all_listings.keys())

    # Dictionaries to match appIDs or app names with listing hashes

    listing_matches_with_app_ids = dict()
    listing_matches_with_app_names = dict()
    for listing_hash in all_listing_hashes:
        app_id = convert_listing_hash_to_app_id(listing_hash)
        app_name = convert_listing_hash_to_app_name(listing_hash)

        listing_matches_with_app_ids[app_id] = listing_hash
        listing_matches_with_app_names[app_name] = listing_hash

    # Match badges with listing hashes

    badge_matches = dict()
    for app_id in badge_app_ids:
        app_name = badge_creation_details[app_id]['name']

        try:
            badge_matches[app_id] = listing_matches_with_app_ids[app_id]
        except KeyError:

            try:
                badge_matches[app_id] = listing_matches_with_app_names[
                    app_name]
                if verbose:
                    print('Match for {} (appID = {}) with name instead of id.'.
                          format(app_name, app_id))
            except KeyError:
                badge_matches[app_id] = None
                if verbose:
                    print('No match found for {} (appID = {})'.format(
                        app_name, app_id))

    if verbose:
        print('#badges = {} ; #matching hashes found = {}'.format(
            len(badge_app_ids), len(badge_matches)))

    return badge_matches
Exemplo n.º 9
0
def print_arbitrages(badge_arbitrages,
                     use_numbered_bullet_points=False,
                     use_hyperlink=False):
    bullet_point = get_bullet_point_for_display(
        use_numbered_bullet_points=use_numbered_bullet_points)

    for listing_hash in sorted(badge_arbitrages.keys(),
                               key=lambda x: badge_arbitrages[x]['profit'],
                               reverse=True):
        arbitrage = badge_arbitrages[listing_hash]

        # Skip unmarketable booster packs
        if not arbitrage['is_marketable']:
            continue

        if use_hyperlink:
            app_id = convert_listing_hash_to_app_id(listing_hash)

            markdown_compatible_steam_market_url = get_steam_market_listing_url(
                listing_hash=listing_hash,
                render_as_json=False,
                replace_spaces=True)

            listing_hash_formatted_for_markdown = '[[store]({})][[market]({})] [{}]({})'.format(
                get_steam_store_url(app_id),
                markdown_compatible_steam_market_url,
                listing_hash,
                get_steamcardexchange_url(app_id),
            )
        else:
            listing_hash_formatted_for_markdown = listing_hash

        gem_amount = arbitrage['gem_amount']

        if gem_amount is None:
            gem_amount_as_str = gem_amount
        else:
            gem_amount_as_str = '{:.0f}'.format(gem_amount)

        print(
            '{}Profit: {:.2f}€\t{}\t| craft pack: {} gems ({:.2f}€) | sell for {:.2f}€ ({:.2f}€ incl. fee) (#={})'
            .format(
                bullet_point,
                arbitrage['profit'],
                listing_hash_formatted_for_markdown,
                gem_amount_as_str,
                arbitrage['gem_price_including_fee'],
                arbitrage['bid_without_fee'],
                arbitrage['bid_including_fee'],
                arbitrage['bid_volume'],
            ))

    return
def load_apps_with_trading_cards(verbose=True):
    all_listings = load_all_listings()

    apps_with_trading_cards = [
        convert_listing_hash_to_app_id(listing_hash)
        for listing_hash in all_listings
    ]

    if verbose:
        print('Apps with trading cards: {}'.format(
            len(apps_with_trading_cards)))

    return apps_with_trading_cards
Exemplo n.º 11
0
def convert_to_badges(filtered_listing_hashes, max_num_badges=None):
    badge_data = dict()

    for i, listing_hash in enumerate(filtered_listing_hashes):

        if max_num_badges is not None and i >= max_num_badges:
            break

        app_id = convert_listing_hash_to_app_id(listing_hash)

        badge_data[app_id] = dict()
        badge_data[app_id]['listing_hash'] = listing_hash

    return badge_data
def try_again_to_download_item_type(app_ids_with_unreliable_goo_details,
                                    filtered_representative_listing_hashes,
                                    listing_details_output_file_name):
    listing_hashes_to_process = [
        listing_hash for listing_hash in filtered_representative_listing_hashes
        if convert_listing_hash_to_app_id(listing_hash) in
        app_ids_with_unreliable_goo_details
    ]

    updated_all_listing_details = update_all_listing_details(
        listing_hashes=listing_hashes_to_process,
        listing_details_output_file_name=listing_details_output_file_name)

    return
def group_listing_hashes_by_app_id(all_listings, verbose=True):
    groups_by_app_id = dict()
    for listing_hash in all_listings:
        app_id = convert_listing_hash_to_app_id(listing_hash)

        try:
            groups_by_app_id[app_id].append(listing_hash)
        except KeyError:
            groups_by_app_id[app_id] = [listing_hash]

    if verbose:
        print('#app_ids = {}'.format(len(groups_by_app_id)))

    return groups_by_app_id
def try_again_to_download_goo_value(app_ids_with_unknown_goo_value,
                                    filtered_representative_listing_hashes,
                                    groups_by_app_id):
    listing_hashes_to_process = [
        listing_hash for listing_hash in filtered_representative_listing_hashes
        if convert_listing_hash_to_app_id(listing_hash) in
        app_ids_with_unknown_goo_value
    ]

    download_missing_goo_details(
        groups_by_app_id=groups_by_app_id,
        listing_candidates=filtered_representative_listing_hashes,
        enforced_app_ids_to_process=listing_hashes_to_process)

    return
Exemplo n.º 15
0
def update_and_save_next_creation_times(creation_results,
                                        verbose=True,
                                        next_creation_time_file_name=None):
    if next_creation_time_file_name is None:
        next_creation_time_file_name = get_next_creation_time_file_name()

    next_creation_times = load_next_creation_time_data(
        next_creation_time_file_name)

    delay_in_days = get_crafting_cooldown_duration_in_days()
    formatted_next_creation_time = get_formatted_current_time(
        delay_in_days=delay_in_days)

    save_to_disk = False
    is_first_displayed_line = True

    for listing_hash in creation_results:
        result = creation_results[listing_hash]

        if result is not None:
            app_id = convert_listing_hash_to_app_id(listing_hash)
            next_creation_times[app_id] = formatted_next_creation_time

            save_to_disk = True

            if verbose:

                # Print an empty line the first time, to clearly separate the block from what was previously displayed.
                if is_first_displayed_line:
                    print('')
                    is_first_displayed_line = False

                app_name = convert_listing_hash_to_app_name(listing_hash)
                print(
                    'Saving the next creation time ({}) for {} (appID = {}) to disk.'
                    .format(formatted_next_creation_time, app_name, app_id))

    if save_to_disk:
        with open(next_creation_time_file_name, 'w', encoding='utf-8') as f:
            json.dump(next_creation_times, f)

    return next_creation_times
def count_listing_hashes_per_app_id(all_listings):
    # For each appID, count the number of known listing hashes.
    #
    # Caveat: this piece of information relies on the downloaded listings, it is NOT NECESSARILY accurate!
    #         Errors can happen, so manually double-check any information before using it for critical usage!
    #
    # If 'all_listings' is constrained to items of 'Common' rarity, then this is the number of **different** items of
    # such rarity. This information is useful to know whether a gamble is worth a try: the more items of Common rarity,
    # the harder it is to receive the item which you are specifically after, by crafting a badge.

    listing_hashes_per_app_id = dict()

    for listing_hash in all_listings:
        app_id = convert_listing_hash_to_app_id(listing_hash)
        try:
            listing_hashes_per_app_id[app_id] += 1
        except KeyError:
            listing_hashes_per_app_id[app_id] = 1

    return listing_hashes_per_app_id
def filter_out_listing_hashes_if_goo_details_are_already_known_for_app_id(
        filtered_cheapest_listing_hashes,
        goo_details_file_name_for_for_foil_cards=None,
        verbose=True):
    # Filter out listings associated with an appID for which we already know the goo details.

    if goo_details_file_name_for_for_foil_cards is None:
        goo_details_file_name_for_for_foil_cards = get_goo_details_file_nam_for_for_foil_cards(
        )

    previously_downloaded_all_goo_details = load_all_goo_details(
        goo_details_file_name_for_for_foil_cards, verbose=verbose)

    app_ids_with_previously_downloaded_goo_details = [
        int(app_id) for app_id in previously_downloaded_all_goo_details
    ]

    filtered_cheapest_listing_hashes = [
        listing_hash for listing_hash in filtered_cheapest_listing_hashes
        if convert_listing_hash_to_app_id(listing_hash) not in
        app_ids_with_previously_downloaded_goo_details
    ]

    return filtered_cheapest_listing_hashes
def filter_out_candidates_whose_ask_price_is_below_threshold(
        all_listings,
        item_rarity_patterns_per_app_id=None,
        price_threshold_in_cents=None,
        category_name=None,
        drop_rate_estimates_for_common_rarity=None,
        gem_price_in_euros=None,
        verbose=True):
    if gem_price_in_euros is None:
        gem_price_in_euros = get_gem_price()

    if drop_rate_estimates_for_common_rarity is None:
        if category_name is not None and category_name != get_category_name_for_booster_packs(
        ):
            drop_rate_estimates = get_drop_rate_estimates_based_on_item_rarity_pattern(
                verbose=verbose)
            drop_rate_field = get_drop_rate_field()
            rarity_field = 'common'
            drop_rate_estimates_for_common_rarity = drop_rate_estimates[
                drop_rate_field][rarity_field]
        else:
            drop_rate_estimates_for_common_rarity = dict()

    gem_amount_required_to_craft_badge = get_gem_amount_required_to_craft_badge(
    )

    badge_price = gem_amount_required_to_craft_badge * gem_price_in_euros

    # Build dummy badge data, in order to reuse functions developed for the analysis of Booster Packs

    badge_data = dict()
    for listing_hash in all_listings:
        app_id = convert_listing_hash_to_app_id(listing_hash)

        item_rarity_pattern = item_rarity_patterns_per_app_id[app_id]

        num_items_of_common_rarity = item_rarity_pattern['common']
        num_items_of_uncommon_rarity = item_rarity_pattern['uncommon']
        num_items_of_rare_rarity = item_rarity_pattern['rare']

        item_rarity_pattern_as_tuple = (num_items_of_common_rarity,
                                        num_items_of_uncommon_rarity,
                                        num_items_of_rare_rarity)

        try:
            drop_rate_for_common_rarity = drop_rate_estimates_for_common_rarity[
                item_rarity_pattern_as_tuple]
        except KeyError:
            drop_rate_for_common_rarity = 1  # Here, 1 would represent 100% chance to receive an item of common rarity.

        drop_rate_for_common_rarity = clamp_proportion(
            drop_rate_for_common_rarity)

        item_price_by_crafting_badges = num_items_of_common_rarity * badge_price / drop_rate_for_common_rarity

        sell_price_in_cents = all_listings[listing_hash]['sell_price']
        sell_price_in_euros = sell_price_in_cents / 100

        # In order to distinguish items linked to the same appID, dummy appIDs are introduced:
        dummy_app_id = listing_hash

        badge_data[dummy_app_id] = dict()
        badge_data[dummy_app_id]['listing_hash'] = listing_hash
        badge_data[dummy_app_id]['sell_price'] = sell_price_in_euros
        badge_data[dummy_app_id]['gem_price'] = item_price_by_crafting_badges

    # Filter out candidates for which the ask is below a given threshold

    filtered_badge_data = filter_out_badges_with_low_sell_price(
        badge_data,
        category_name=category_name,
        user_chosen_price_threshold=price_threshold_in_cents)

    return filtered_badge_data
def determine_whether_an_arbitrage_might_exist_for_foil_cards(
        eligible_listing_hashes,
        all_goo_details,
        app_ids_with_unreliable_goo_details=None,
        app_ids_with_unknown_goo_value=None,
        all_listings=None,
        listing_output_file_name=None,
        sack_of_gems_price_in_euros=None,
        retrieve_gem_price_from_scratch=True,
        verbose=True):
    if sack_of_gems_price_in_euros is None:
        # Load the price of a sack of 1000 gems
        sack_of_gems_price_in_euros = load_sack_of_gems_price(
            retrieve_gem_price_from_scratch=retrieve_gem_price_from_scratch,
            verbose=verbose)

    if listing_output_file_name is None:
        listing_output_file_name = get_listing_output_file_name_for_foil_cards(
        )

    if all_listings is None:
        all_listings = load_all_listings(
            listing_output_file_name=listing_output_file_name)

    if app_ids_with_unreliable_goo_details is None:
        app_ids_with_unreliable_goo_details = []

    if app_ids_with_unknown_goo_value is None:
        app_ids_with_unknown_goo_value = []

    num_gems_per_sack_of_gems = get_num_gems_per_sack_of_gems()

    sack_of_gems_price_in_cents = 100 * sack_of_gems_price_in_euros

    arbitrages = dict()

    for listing_hash in eligible_listing_hashes:
        app_id = convert_listing_hash_to_app_id(listing_hash)

        if app_id in app_ids_with_unreliable_goo_details:
            # NB: This is for goo details which were retrieved with the default item type n° (=2), which can be wrong.
            if verbose:
                print(
                    '[X]\tUnreliable goo details for {}'.format(listing_hash))
            continue

        goo_value_in_gems = safe_read_from_dict(input_dict=all_goo_details,
                                                input_key=app_id)

        if app_id in app_ids_with_unknown_goo_value or goo_value_in_gems is None:
            # NB: This is when the goo value is unknown, despite a correct item type n° used to download goo details.
            if verbose:
                print('[?]\tUnknown goo value for {}'.format(listing_hash))
            continue

        goo_value_in_cents = goo_value_in_gems / num_gems_per_sack_of_gems * sack_of_gems_price_in_cents

        current_listing = all_listings[listing_hash]
        ask_in_cents = current_listing['sell_price']

        if ask_in_cents == 0:
            # NB: The ask cannot be equal to zero. So, we skip the listing because of there must be a bug.
            if verbose:
                print('[!]\tImpossible ask price ({:.2f}€) for {}'.format(
                    ask_in_cents / 100,
                    listing_hash,
                ))
            continue

        profit_in_cents = goo_value_in_cents - ask_in_cents
        is_arbitrage = bool(profit_in_cents > 0)

        if is_arbitrage:
            arbitrage = dict()
            arbitrage['profit'] = profit_in_cents / 100
            arbitrage['ask'] = ask_in_cents / 100
            arbitrage['goo_amount'] = goo_value_in_gems
            arbitrage['goo_value'] = goo_value_in_cents / 100

            arbitrages[listing_hash] = arbitrage

    return arbitrages