Exemple #1
0
async def populate_from_discussion(event: Event) -> None:
    """Populates the given event using the beatmapset discussion json
    (e.g. missing discussion info and additional details like who did votes)."""
    discussions_json = get_discussions_json(event.beatmapset)
    if discussions_json is None:
        # This happens if the beatmapset was deleted in between us scraping it and populating it.
        event.marked_for_deletion = True
        return

    event.discussion = get_complete_discussion_info(event.discussion, event.beatmapset, discussions_json)
    await __populate_additional_details(event, discussions_json)
Exemple #2
0
async def __populate_additional_details(
        event: Event,
        discussions_json: object,
        db_name: str = SCRAPER_DB_NAME) -> None:
    """Populates additional details in the given event from the beatmapset discussion json (e.g. who voted)."""
    if event.discussion and (not event.discussion.user
                             or not event.discussion.content):
        if not __complete_discussion_context(event.discussion,
                                             db_name=db_name):
            # After being deleted, many properties of discussions are inaccessible without referring to cached information.
            # Without cached information, we skip the event, since this its context is no longer visible to the public anyway.
            event.marked_for_deletion = True
            return

    if event.type in [types.NOMINATE]:
        # Nominate/qualify content should reflect recent praise/hype/note content.
        event.content = get_nomination_comment(event, discussions_json)

    if event.type in [types.DISQUALIFY, types.RESET]:
        # Event content should reflect discussion content.
        if event.discussion:  # Discussion may have been deleted.
            event.content = event.discussion.content

    if not discussions_json:
        # Mapset has no discussion, probably associated with some legacy thread (e.g. ranked before modding v2).
        return

    beatmapset_json = discussions_json["beatmapset"]
    for page_event in get_map_page_event_jsons(event.beatmapset,
                                               discussions_json):
        # Likelihood that two same type of events happen in the same second is very unlikely,
        # so this'll work as identification (we have no access to actual event ids on scraping side, so can't use that).
        same_time = event.time == from_string(page_event["created_at"])
        same_type = event.type == page_event["type"]
        if same_time and same_type:
            if event.type in [types.RESOLVE, types.REOPEN]:
                # Event user should be whoever resolved or reopened, rather than the discussion author.
                post_author = discussion_parser.parse_discussion_post_author(
                    page_event["comment"]["beatmap_discussion_post_id"],
                    beatmapset_json)
                event.user = post_author

            if event.type in [types.KUDOSU_GAIN, types.KUDOSU_LOSS]:
                # Event user should be whoever gave or removed the kudosu, not the discssion author.
                kudosu_author = discussion_parser.parse_user(
                    page_event["comment"]["new_vote"]["user_id"],
                    beatmapset_json)
                event.user = kudosu_author