def parse_users_json(group_id: int, users_json: object, last_checked_at: datetime) -> Generator[Event, None, None]: """Returns a generator of group addition and removal events from the given users json and group id.""" missing_user_ids = get_group_user_ids(group_id) new_users = [] for user_json in users_json: user_id = user_json["id"] if user_id in missing_user_ids: missing_user_ids.remove(user_id) else: new_users.append(User(_id=user_id, name=user_json["username"])) content = None time = last_checked_at for missing_user_id in missing_user_ids: yield Event( _type = types.REMOVE, time = time, user = get_group_user(group_id=group_id, user_id=missing_user_id), group = Usergroup(_id=group_id), content = content ) for new_user in new_users: yield Event( _type = types.ADD, time = time, user = new_user, group = Usergroup(_id=group_id), content = content )
def test_parse_timing(): # Test both additions and removals. Database(SCRAPER_TEST_DB_NAME).insert_group_user(group=Usergroup(7), user=User(1, "one")) Database(SCRAPER_TEST_DB_NAME).insert_group_user(group=Usergroup(7), user=User(2, "two")) Database(SCRAPER_TEST_DB_NAME).insert_group_user(group=Usergroup(7), user=User(3, "three")) Database(SCRAPER_TEST_DB_NAME).insert_group_user(group=Usergroup(7), user=User(4, "four")) Database(SCRAPER_TEST_DB_NAME).insert_group_user(group=Usergroup(7), user=User(5, "five")) start_time = datetime.utcnow() events = [] with mock.patch("scraper.parsers.group_parser.SCRAPER_DB_NAME", SCRAPER_TEST_DB_NAME): for event in group_parser.parse( group_id=7, group_page=mock_groups.soup, last_checked_at=from_string("2020-07-22T21:00:00+00:00")): events.append(event) end_time = datetime.utcnow() # We should not be using the api to fill in user names and such, as this data is available within the users json. assert (end_time - start_time).total_seconds() < 3
def test_parse_removals(): Database(SCRAPER_TEST_DB_NAME).insert_group_user(group=Usergroup(7), user=User(1, "someone")) events = [] with mock.patch("scraper.parsers.group_parser.SCRAPER_DB_NAME", SCRAPER_TEST_DB_NAME): for event in group_parser.parse( group_id=7, group_page=mock_groups.soup, last_checked_at=from_string("2020-07-22T21:00:00+00:00")): events.append(event) assert len(events) == 18 assert events[0] == Event(_type="remove", time=from_string("2020-07-22T21:00:00+00:00"), group=Usergroup(7), user=User(_id=1, name="someone"))
def test_parse_additions(): events = [] with mock.patch("scraper.parsers.group_parser.SCRAPER_DB_NAME", SCRAPER_TEST_DB_NAME): for event in group_parser.parse( group_id=7, group_page=mock_groups.soup, last_checked_at=from_string("2020-07-22T21:00:00+00:00")): events.append(event) assert len(events) == 17 assert events[0] == Event(_type="add", time=from_string("2020-07-22T21:00:00+00:00"), group=Usergroup(7), user=User(_id=2202163)) assert events[1] == Event(_type="add", time=from_string("2020-07-22T21:00:00+00:00"), group=Usergroup(7), user=User(_id=3621552))
def test_get_group_nat_comment_wrong_group(group_event): group_event.group = Usergroup(4, "Global Moderation Team") with mock.patch("scraper.populator.datetime") as mock_datetime: mock_datetime.utcnow.return_value = from_string("2020-03-08 00:01:00") mock_datetime.side_effect = datetime with mock.patch("scraper.populator.bnsite_api") as mock_bnsite_api: mock_bnsite_api.request_last_eval.return_value = { "active": True, "kind": "resignation", "updatedAt": "2020-01-01T00:00:00.000Z" } assert get_group_bnsite_comment(group_event) is None
async def test_populate_from_bnsite_mode_integration(): # Mock datetime such that the eval won't be outdated. with mock.patch("scraper.populator.datetime") as mock_datetime: mock_datetime.utcnow.return_value = from_string("2020-10-24 02:00:00") mock_datetime.side_effect = datetime event = Event("add", from_string("2020-10-22 00:00:00"), user=User(12402453), group=Usergroup(28)) await populate_from_bnsite(event) assert event.content is None assert event.group.mode == "osu"
async def test_get_group_events_between(): group_events = [ Event(_type="add", time=from_string("2020-01-01 02:00:00"), user=User(1, "someone"), group=Usergroup(4)), Event(_type="add", time=from_string("2020-01-01 02:00:00"), user=User(2, "sometwo"), group=Usergroup(7)), Event(_type="remove", time=from_string("2020-01-01 02:00:00"), user=User(1, "someone"), group=Usergroup(4)) ] with mock.patch("scraper.crawler.get_group_events", return_value=group_events) as mock_get_group_events: generator = get_group_events_between( start_time=from_string("2020-01-01 03:00:00"), end_time=from_string("2020-01-01 00:00:00"), last_checked_time=from_string("2020-01-01 02:00:00")) event4 = await anext(generator, None) event3 = await anext(generator, None) event2 = await anext(generator, None) event1 = await anext(generator, None) mock_get_group_events.assert_called_with( _from=from_string("2020-01-01 02:00:00")) assert event4.user.name == "someone" assert event4.type == "add" assert event3.user.name == "sometwo" assert event3.type == "add" assert event2.user.name == "someone" assert event2.type == "remove" assert event1 is None
def group_event(): return Event("add", from_string("2020-07-24 20:00:00"), user=User(2, "sometwo"), group=Usergroup(32, mode="taiko"))
def group_event(): return Event("remove", from_string("2020-01-01 00:00:00"), user=User(2, "sometwo"), group=Usergroup(32))