def test_index_aggregate_user_same_checkpoint(app): """Test that we should not update when last index is the same""" with app.app_context(): db = get_db() entities = deepcopy(basic_entities) current_blocknumber = basic_entities["blocks"][0]["number"] entities.update({ "indexing_checkpoints": [{ "tablename": AGGREGATE_USER, "last_checkpoint": current_blocknumber }], }) populate_mock_db(db, entities) with db.scoped_session() as session: results: List[AggregateUser] = (session.query(AggregateUser).order_by( AggregateUser.user_id).all()) assert len(results) == 0 _update_aggregate_user(session) results: List[AggregateUser] = (session.query(AggregateUser).order_by( AggregateUser.user_id).all()) assert len(results) == 0 prev_id_checkpoint = get_last_indexed_checkpoint( session, AGGREGATE_USER) assert prev_id_checkpoint == 3
def test_index_aggregate_user_populate(app): """Test that we should populate users from empty""" with app.app_context(): db = get_db() with db.scoped_session() as session: results: List[AggregateUser] = (session.query(AggregateUser).order_by( AggregateUser.user_id).all()) assert (len(results) == 0 ), "Test aggregate_user is empty before populate_mock_db()" # create db entries based on entities populate_mock_db(db, basic_entities, block_offset=3) with db.scoped_session() as session: # confirm nothing exists before _update_aggregate_user() results: List[AggregateUser] = (session.query(AggregateUser).order_by( AggregateUser.user_id).all()) assert ( len(results) == 0 ), "Test aggregate_user is empty before _update_aggregate_user()" # trigger celery task _update_aggregate_user(session) # run basic tests against basic_entities basic_tests(session)
def test_search_user_tags(app): """Tests that search by tags works for users""" with app.app_context(): db = get_db() test_entities = { "tracks": [ {"track_id": 1, "tags": "pop", "owner_id": 1}, {"track_id": 2, "owner_id": 1, "tags": "pop,rock,electric"}, {"track_id": 3, "owner_id": 2}, {"track_id": 4, "owner_id": 2, "tags": "funk,pop"}, {"track_id": 5, "owner_id": 2, "tags": "funk,pop"}, {"track_id": 6, "owner_id": 2, "tags": "funk,Funk,kpop"}, {"track_id": 7, "owner_id": 3, "tags": "pop"}, {"track_id": 8, "owner_id": 3, "tags": "kpop"}, ], "users": [ {"user_id": 1, "handle": "1"}, {"user_id": 2, "handle": "2"}, {"user_id": 3, "handle": "3"}, ], "follows": [ {"follower_user_id": 1, "followee_user_id": 2}, {"follower_user_id": 1, "followee_user_id": 3}, {"follower_user_id": 2, "followee_user_id": 3}, ], } populate_mock_db(db, test_entities) with db.scoped_session() as session: _update_aggregate_user(session) session.execute("REFRESH MATERIALIZED VIEW tag_track_user") args = { "search_str": "pop", "current_user_id": None, "user_tag_count": 2, "limit": 10, "offset": 0, } users = search_user_tags(session, args) assert len(users) == 2 assert users[0]["user_id"] == 2 # Fir. b/c user 2 has 1 follower assert users[1]["user_id"] == 1 # Sec. b/c user 1 has 0 followers
def test_update_trending_params(app): """Test that refreshing trending params gives the correct values""" with app.app_context(): db = get_db() # setup setup_trending(db) with db.scoped_session() as session: _update_aggregate_track(session) _update_aggregate_plays(session) _update_aggregate_user(session) session.execute("REFRESH MATERIALIZED VIEW aggregate_interval_plays") session.execute("REFRESH MATERIALIZED VIEW trending_params") trending_params = session.query(TrendingParam).all() # Test that trending_params are not generated for hidden/deleted tracks # There should be 7 valid tracks with trending params assert len(trending_params) == 7 def get_track_id(track_id): for param in trending_params: if param.track_id == track_id: return param return None t1 = get_track_id(1) assert t1.play_count == 40 assert t1.owner_follower_count == 10 assert t1.repost_count == 63 assert t1.repost_week_count == 13 assert t1.repost_month_count == 33 assert t1.repost_year_count == 63 assert t1.save_count == 29 assert t1.save_week_count == 4 assert t1.save_month_count == 12 assert t1.save_year_count == 28 # user 1 has 10 followers # user 2 has 15 followers # user 3 has 2 followers # 3 saves from all 3 users # 4 reposts from user 1 # 3 reposts from users 2, 3 # -> (3 * 10 + 3 * 15 + 3 * 2) + (4 * 10 + 3 * 15 + 3 * 2) = 172 assert float(t1.karma) == 172
def test_index_aggregate_user_update(app): """Test that the aggregate_user data is overwritten""" with app.app_context(): db = get_db() entities = deepcopy(basic_entities) entities.update({ "aggregate_user": [ { "user_id": 1, "track_count": 9, "playlist_count": 9, "album_count": 9, "follower_count": 9, "following_count": 9, "repost_count": 9, "track_save_count": 9, }, { "user_id": 2, "track_count": 9, "playlist_count": 9, "album_count": 9, "follower_count": 9, "following_count": 9, "repost_count": 9, "track_save_count": 9, }, ], }) populate_mock_db(db, entities, block_offset=3) with db.scoped_session() as session: results: List[AggregateUser] = (session.query(AggregateUser).order_by( AggregateUser.user_id).all()) created_entity_tests(results, 2) _update_aggregate_user(session) with db.scoped_session() as session: basic_tests(session)
def test_update_related_artist_scores_if_needed(app): """Tests all cases of update_related_artist_scores_if_needed: not enough followers, existing fresh scores, and needing recalculation""" with app.app_context(): db = get_db() with db.scoped_session() as session: result, _ = update_related_artist_scores_if_needed(session, 0) assert not result, "Don't calculate for low number of followers" populate_mock_db(db, entities) _update_aggregate_user(session) result, _ = update_related_artist_scores_if_needed(session, 0) assert result, "Calculate when followers >= MIN_FOLLOWER_REQUIREMENT (200)" result, _ = update_related_artist_scores_if_needed(session, 0) assert ( not result ), "Don't calculate when scores are already calculated and fresh" session.query(RelatedArtist).update( {RelatedArtist.created_at: datetime.utcnow() - timedelta(weeks=5)}) session.commit() result, _ = update_related_artist_scores_if_needed(session, 0) assert result, "Calculate when the scores are stale"
def test_index_aggregate_user_empty_completely(app): """Test a completely empty database won't break""" with app.app_context(): db = get_db() entities = {} populate_mock_db(db, entities, block_offset=3) with db.scoped_session() as session: _update_aggregate_user(session) results: List[AggregateUser] = (session.query(AggregateUser).order_by( AggregateUser.user_id).all()) assert (len(results) == 0 ), "Test that empty entities won't generate AggregateUsers" prev_id_checkpoint = get_last_indexed_checkpoint( session, AGGREGATE_USER) assert prev_id_checkpoint == 0
def test_calculate_related_artists_scores(app): with app.app_context(): db = get_db() populate_mock_db(db, entities) with db.scoped_session() as session: _update_aggregate_user(session) # Check sampled (with large enough sample to get all rows for deterministic result) rows = _calculate_related_artists_scores( session, 0, sample_size=200 + 50 + 100 + 40 + 5 + 500 + 200, # sum of all the follows ) assert rows[0].related_artist_user_id == 1 and math.isclose( rows[0].score, 50, abs_tol=0.001) assert rows[1].related_artist_user_id == 2 and math.isclose( rows[1].score, 25, abs_tol=0.001) assert rows[2].related_artist_user_id == 6 and math.isclose( rows[2].score, 18, abs_tol=0.001) assert rows[3].related_artist_user_id == 3 and math.isclose( rows[3].score, 10, abs_tol=0.001) assert rows[4].related_artist_user_id == 5 and math.isclose( rows[4].score, 5, abs_tol=0.001) assert rows[5].related_artist_user_id == 4 and math.isclose( rows[5].score, 3.2, abs_tol=0.001) # Check unsampled rows = _calculate_related_artists_scores(session, 0) assert rows[0].related_artist_user_id == 1 and math.isclose( rows[0].score, 50, abs_tol=0.001) assert rows[1].related_artist_user_id == 2 and math.isclose( rows[1].score, 25, abs_tol=0.001) assert rows[2].related_artist_user_id == 6 and math.isclose( rows[2].score, 18, abs_tol=0.001) assert rows[3].related_artist_user_id == 3 and math.isclose( rows[3].score, 10, abs_tol=0.001) assert rows[4].related_artist_user_id == 5 and math.isclose( rows[4].score, 5, abs_tol=0.001) assert rows[5].related_artist_user_id == 4 and math.isclose( rows[5].score, 3.2, abs_tol=0.001) # Check edge case with 0 followers populate_mock_db( db, {"follows": [{ "follower_user_id": 100, "followee_user_id": 7 }]}) rows = _calculate_related_artists_scores(session, 0) # Same results as unsampled. Shouldn't throw DivideByZero exception assert rows[0].related_artist_user_id == 1 and math.isclose( rows[0].score, 50, abs_tol=0.001) assert rows[1].related_artist_user_id == 2 and math.isclose( rows[1].score, 25, abs_tol=0.001) assert rows[2].related_artist_user_id == 6 and math.isclose( rows[2].score, 18, abs_tol=0.001) assert rows[3].related_artist_user_id == 3 and math.isclose( rows[3].score, 10, abs_tol=0.001) assert rows[4].related_artist_user_id == 5 and math.isclose( rows[4].score, 5, abs_tol=0.001) assert rows[5].related_artist_user_id == 4 and math.isclose( rows[5].score, 3.2, abs_tol=0.001)
def test_get_user_signals(app): with app.app_context(): db = get_db() test_entities = { "users": [ make_user(1, "user1", "wallet1"), make_user(2, "user2", "wallet2"), make_user(3, "user3", "wallet3"), make_user(4, "user4", "wallet4"), make_user(5, "user5", "wallet5"), make_user( 6, "user6", "wallet6", profile_picture= "Qm0123456789abcdef0123456789abcdef0123456789ab", ), make_user( 7, "user7", "wallet7", profile_picture_sizes= "Qm0123456789abcdef0123456789abcdef0123456789ab", ), make_user( 8, "user8", "wallet8", cover_photo="Qm0123456789abcdef0123456789abcdef0123456789ab", ), make_user( 9, "user9", "wallet9", cover_photo_sizes= "Qm0123456789abcdef0123456789abcdef0123456789ab", ), make_user( 10, "user10", "wallet10", profile_picture= "Qm0123456789abcdef0123456789abcdef0123456789ab", cover_photo="Qm0123456789abcdef0123456789abcdef0123456789cd", ), ], "follows": [ make_follow(2, 1), make_follow(3, 1), make_follow(5, 1), make_follow(1, 5), make_follow(2, 6), make_follow(3, 7), make_follow(4, 8), make_follow(5, 9), make_follow(10, 4), ], } populate_mock_db(db, test_entities) with db.scoped_session() as session: _update_aggregate_user(session) user_signals = _get_user_signals(session, "user1") assert user_signals["num_followers"] == 3 assert user_signals["num_following"] == 1 assert user_signals["has_profile_picture"] == False assert user_signals["has_cover_photo"] == False assert user_signals["wallet"] == "wallet1" user_signals = _get_user_signals(session, "user6") assert user_signals["num_followers"] == 1 assert user_signals["num_following"] == 0 assert user_signals["has_profile_picture"] == True assert user_signals["has_cover_photo"] == False assert user_signals["wallet"] == "wallet6" user_signals = _get_user_signals(session, "user7") assert user_signals["num_followers"] == 1 assert user_signals["num_following"] == 0 assert user_signals["has_profile_picture"] == True assert user_signals["has_cover_photo"] == False assert user_signals["wallet"] == "wallet7" user_signals = _get_user_signals(session, "user8") assert user_signals["num_followers"] == 1 assert user_signals["num_following"] == 0 assert user_signals["has_profile_picture"] == False assert user_signals["has_cover_photo"] == True assert user_signals["wallet"] == "wallet8" user_signals = _get_user_signals(session, "user9") assert user_signals["num_followers"] == 1 assert user_signals["num_following"] == 0 assert user_signals["has_profile_picture"] == False assert user_signals["has_cover_photo"] == True assert user_signals["wallet"] == "wallet9" user_signals = _get_user_signals(session, "user10") assert user_signals["num_followers"] == 0 assert user_signals["num_following"] == 1 assert user_signals["has_profile_picture"] == True assert user_signals["has_cover_photo"] == True assert user_signals["wallet"] == "wallet10"
def test_index_aggregate_user_update_with_only_aggregate_user(app): """Test that aggregate_user will never be truncated even when no other data""" with app.app_context(): db = get_db() entities = { "aggregate_user": [ { "user_id": 1, "track_count": 9, "playlist_count": 9, "album_count": 9, "follower_count": 9, "following_count": 9, "repost_count": 9, "track_save_count": 9, }, { "user_id": 2, "track_count": 9, "playlist_count": 9, "album_count": 9, "follower_count": 9, "following_count": 9, "repost_count": 9, "track_save_count": 9, }, { "user_id": 3, "track_count": 9, "playlist_count": 9, "album_count": 9, "follower_count": 9, "following_count": 9, "repost_count": 9, "track_save_count": 9, }, ], "indexing_checkpoints": [{ "tablename": AGGREGATE_USER, "last_checkpoint": 9 }], } populate_mock_db(db, entities) with db.scoped_session() as session: results: List[AggregateUser] = (session.query(AggregateUser).order_by( AggregateUser.user_id).all()) assert len(results) == 3, "Test that entities exist as expected" _update_aggregate_user(session) results: List[AggregateUser] = (session.query(AggregateUser).order_by( AggregateUser.user_id).all()) assert ( len(results) == 3 ), "Test zero-modifications since last_checkpoint is in the future" prev_id_checkpoint = get_last_indexed_checkpoint( session, AGGREGATE_USER) assert prev_id_checkpoint == 9 entities = { "indexing_checkpoints": [{ "tablename": AGGREGATE_USER, "last_checkpoint": 0 }], } populate_mock_db(db, entities) with db.scoped_session() as session: results: List[AggregateUser] = (session.query(AggregateUser).order_by( AggregateUser.user_id).all()) assert ( len(results) == 3 ), "Test that entities exist as expected, even though checkpoint has been reset" _update_aggregate_user(session) results: List[AggregateUser] = (session.query(AggregateUser).order_by( AggregateUser.user_id).all()) assert ( len(results) == 3 ), "Test that aggregate_user has not been truncated due to reset checkpoint" prev_id_checkpoint = get_last_indexed_checkpoint( session, AGGREGATE_USER) assert prev_id_checkpoint == 0
def test_index_aggregate_user_update_with_extra_user(app): """Test that the entire aggregate_user table is not truncated""" with app.app_context(): db = get_db() entities = deepcopy(basic_entities) entities.update({ "indexing_checkpoints": [{ "tablename": AGGREGATE_USER, "last_checkpoint": 0 }], "aggregate_user": [ { "user_id": 1, "track_count": 9, "playlist_count": 9, "album_count": 9, "follower_count": 9, "following_count": 9, "repost_count": 9, "track_save_count": 9, }, { "user_id": 2, "track_count": 9, "playlist_count": 9, "album_count": 9, "follower_count": 9, "following_count": 9, "repost_count": 9, "track_save_count": 9, }, { "user_id": 3, "track_count": 9, "playlist_count": 9, "album_count": 9, "follower_count": 9, "following_count": 9, "repost_count": 9, "track_save_count": 9, }, ], }) with db.scoped_session() as session: results: List[AggregateUser] = (session.query(AggregateUser).order_by( AggregateUser.user_id).all()) assert len(results) == 0, "Test that we start with clean tables" populate_mock_db(db, entities) with db.scoped_session() as session: results: List[AggregateUser] = (session.query(AggregateUser).order_by( AggregateUser.user_id).all()) assert len( results) == 3, "Test that aggregate_user entities are populated" _update_aggregate_user(session) with db.scoped_session() as session: results: List[AggregateUser] = (session.query(AggregateUser).order_by( AggregateUser.user_id).all()) assert len(results) == 3 assert results[0].user_id == 1 assert results[0].track_count == 2 assert results[0].playlist_count == 1 assert results[0].album_count == 1 assert results[0].follower_count == 1 assert results[0].following_count == 1 assert results[0].repost_count == 0 assert results[0].track_save_count == 0 assert results[1].user_id == 2 assert results[1].track_count == 1 assert results[1].playlist_count == 0 assert results[1].album_count == 0 assert results[1].follower_count == 1 assert results[1].following_count == 1 assert results[1].repost_count == 2 assert results[1].track_save_count == 1 assert results[2].user_id == 3 assert results[2].track_count == 9 assert results[2].playlist_count == 9 assert results[2].album_count == 9 assert results[2].follower_count == 9 assert results[2].following_count == 9 assert results[2].repost_count == 9 assert results[2].track_save_count == 9 prev_id_checkpoint = get_last_indexed_checkpoint( session, AGGREGATE_USER) assert prev_id_checkpoint == 3
def test_index_aggregate_user_empty_activity(app): """Test that a populated users table without activity won't break""" with app.app_context(): db = get_db() entities = { "users": [ { "user_id": 1, "handle": "user1" }, { "user_id": 2, "handle": "user2" }, ], "indexing_checkpoints": [{ "tablename": AGGREGATE_USER, "last_checkpoint": 5 }], } # create user1 and user2 in blocknumbers 3 and 4, respectively populate_mock_db(db, entities, block_offset=3) with db.scoped_session() as session: _update_aggregate_user(session) results: List[AggregateUser] = (session.query(AggregateUser).order_by( AggregateUser.user_id).all()) assert ( len(results) == 0 ), "Test that users updated on blocks previous to '5' will not be targeted" prev_id_checkpoint = get_last_indexed_checkpoint( session, AGGREGATE_USER) assert prev_id_checkpoint == 4 entities = { "indexing_checkpoints": [{ "tablename": AGGREGATE_USER, "last_checkpoint": 1 }], } populate_mock_db(db, entities) with db.scoped_session() as session: _update_aggregate_user(session) results: List[AggregateUser] = (session.query(AggregateUser).order_by( AggregateUser.user_id).all()) assert ( len(results) == 2 ), "Test that users updated on blocks after '1' will be targeted" prev_id_checkpoint = get_last_indexed_checkpoint( session, AGGREGATE_USER) assert prev_id_checkpoint == 4
def test_index_aggregate_user_empty_users(app): """Test that user metadata without users table won't break""" with app.app_context(): db = get_db() entities = { "users": [], "indexing_checkpoints": [{ "tablename": AGGREGATE_USER, "last_checkpoint": 0 }], "tracks": [ { "track_id": 1, "owner_id": 1 }, { "track_id": 2, "owner_id": 1 }, { "track_id": 3, "is_unlisted": True, "owner_id": 1 }, ], "playlists": [ { "playlist_id": 1, "playlist_owner_id": 1, "playlist_name": "name", "description": "description", "playlist_contents": { "track_ids": [ { "track": 1, "time": 1 }, { "track": 2, "time": 2 }, { "track": 3, "time": 3 }, ] }, }, { "playlist_id": 2, "is_album": True, "playlist_owner_id": 1, "playlist_name": "name", "description": "description", "playlist_contents": { "track_ids": [ { "track": 1, "time": 1 }, { "track": 2, "time": 2 }, { "track": 3, "time": 3 }, ] }, }, ], "follows": [ { "follower_user_id": 1, "followee_user_id": 2, "created_at": datetime.now() - timedelta(days=8), }, { "follower_user_id": 2, "followee_user_id": 1, "created_at": datetime.now() - timedelta(days=8), }, ], "reposts": [ { "repost_item_id": 1, "repost_type": "track", "user_id": 1 }, { "repost_item_id": 1, "repost_type": "playlist", "user_id": 1 }, ], "saves": [ { "save_item_id": 1, "save_type": "track", "user_id": 1 }, { "save_item_id": 1, "save_type": "playlist", "user_id": 1 }, ], } populate_mock_db(db, entities) with db.scoped_session() as session: _update_aggregate_user(session) results: List[AggregateUser] = (session.query(AggregateUser).order_by( AggregateUser.user_id).all()) assert (len(results) == 0 ), "Test that without Users there will be no AggregateUsers" prev_id_checkpoint = get_last_indexed_checkpoint( session, AGGREGATE_USER) assert prev_id_checkpoint == 2
def test_trending_challenge_job(app): with app.app_context(): db = get_db() redis_conn = redis.Redis.from_url(url=REDIS_URL) test_entities = { "tracks": [ { "track_id": 1, "owner_id": 1 }, { "track_id": 2, "owner_id": 2 }, { "track_id": 3, "owner_id": 3 }, { "track_id": 4, "owner_id": 4 }, { "track_id": 5, "owner_id": 5 }, { "track_id": 6, "owner_id": 2 }, { "track_id": 7, "owner_id": 3 }, { "track_id": 8, "owner_id": 3 }, { "track_id": 9, "is_unlisted": True, "owner_id": 3 }, { "track_id": 11, "owner_id": 1 }, { "track_id": 12, "owner_id": 2 }, { "track_id": 13, "owner_id": 3 }, { "track_id": 14, "owner_id": 4 }, { "track_id": 15, "owner_id": 5 }, ], "playlists": [ { "playlist_id": 1, "playlist_owner_id": 1, "playlist_name": "name", "description": "description", "playlist_contents": { "track_ids": [ { "track": 1, "time": 1 }, { "track": 2, "time": 2 }, { "track": 3, "time": 3 }, ] }, }, { "playlist_id": 2, "playlist_owner_id": 2, "playlist_name": "name", "description": "description", "playlist_contents": { "track_ids": [ { "track": 1, "time": 1 }, { "track": 2, "time": 2 }, { "track": 3, "time": 3 }, ] }, }, { "playlist_id": 3, "is_album": True, "playlist_owner_id": 3, "playlist_name": "name", "description": "description", "playlist_contents": { "track_ids": [ { "track": 1, "time": 1 }, { "track": 2, "time": 2 }, { "track": 3, "time": 3 }, ] }, }, { "playlist_id": 4, "playlist_owner_id": 4, "playlist_name": "name", "description": "description", "playlist_contents": { "track_ids": [ { "track": 1, "time": 1 }, { "track": 2, "time": 2 }, { "track": 3, "time": 3 }, ] }, }, { "playlist_id": 5, "playlist_owner_id": 5, "playlist_name": "name", "description": "description", "playlist_contents": { "track_ids": [ { "track": 1, "time": 1 }, { "track": 2, "time": 2 }, { "track": 3, "time": 3 }, ] }, }, ], "users": [ { "user_id": 1, "handle": "user1" }, { "user_id": 2, "handle": "user2" }, { "user_id": 3, "handle": "user3" }, { "user_id": 4, "handle": "user4" }, { "user_id": 5, "handle": "user5" }, ], "follows": [ { "follower_user_id": 1, "followee_user_id": 2, "created_at": datetime.now() - timedelta(days=8), }, { "follower_user_id": 1, "followee_user_id": 3, "created_at": datetime.now() - timedelta(days=8), }, { "follower_user_id": 2, "followee_user_id": 3, "created_at": datetime.now() - timedelta(days=8), }, { "follower_user_id": 2, "followee_user_id": 4, "created_at": datetime.now() - timedelta(days=8), }, { "follower_user_id": 3, "followee_user_id": 6, "created_at": datetime.now() - timedelta(days=8), }, { "follower_user_id": 4, "followee_user_id": 5, "created_at": datetime.now() - timedelta(days=8), }, { "follower_user_id": 5, "followee_user_id": 1, "created_at": datetime.now() - timedelta(days=8), }, { "follower_user_id": 6, "followee_user_id": 3, "created_at": datetime.now() - timedelta(days=8), }, ], "reposts": [ { "repost_item_id": 1, "repost_type": "track", "user_id": 2 }, { "repost_item_id": 1, "repost_type": "playlist", "user_id": 2 }, { "repost_item_id": 3, "repost_type": "track", "user_id": 3 }, { "repost_item_id": 1, "repost_type": "playlist", "user_id": 3 }, { "repost_item_id": 4, "repost_type": "track", "user_id": 1 }, { "repost_item_id": 5, "repost_type": "track", "user_id": 1 }, { "repost_item_id": 6, "repost_type": "track", "user_id": 1 }, ], "saves": [ { "save_item_id": 1, "save_type": "track", "user_id": 2 }, { "save_item_id": 1, "save_type": "track", "user_id": 3 }, { "save_item_id": 4, "save_type": "track", "user_id": 1 }, { "save_item_id": 5, "save_type": "track", "user_id": 1 }, { "save_item_id": 6, "save_type": "track", "user_id": 1 }, { "save_item_id": 1, "save_type": "playlist", "user_id": 4 }, { "save_item_id": 2, "save_type": "playlist", "user_id": 3 }, { "save_item_id": 3, "save_type": "playlist", "user_id": 2 }, { "save_item_id": 4, "save_type": "playlist", "user_id": 1 }, { "save_item_id": 5, "save_type": "playlist", "user_id": 2 }, ], "plays": [{ "item_id": 1 } for _ in range(55)] + [{ "item_id": 2 } for _ in range(60)] + [{ "item_id": 3 } for _ in range(70)] + [{ "item_id": 4 } for _ in range(90)] + [{ "item_id": 5 } for _ in range(80)] + [{ "item_id": 6 } for _ in range(40)] + [{ "item_id": 11 } for _ in range(200)] + [{ "item_id": 12 } for _ in range(200)] + [{ "item_id": 13 } for _ in range(200)] + [{ "item_id": 14 } for _ in range(200)] + [{ "item_id": 15 } for _ in range(200)], } populate_mock_db(db, test_entities, BLOCK_NUMBER + 1) bus = ChallengeEventBus(redis_conn) # Register events with the bus bus.register_listener( ChallengeEvent.trending_underground, trending_underground_track_challenge_manager, ) bus.register_listener(ChallengeEvent.trending_track, trending_track_challenge_manager) bus.register_listener(ChallengeEvent.trending_playlist, trending_playlist_challenge_manager) trending_date = datetime.fromisoformat("2021-08-20") with db.scoped_session() as session: _update_aggregate_plays(session) _update_aggregate_track(session) _update_aggregate_user(session) session.execute("REFRESH MATERIALIZED VIEW aggregate_interval_plays") session.execute("REFRESH MATERIALIZED VIEW trending_params") trending_track_versions = trending_strategy_factory.get_versions_for_type( TrendingType.TRACKS).keys() for version in trending_track_versions: strategy = trending_strategy_factory.get_strategy( TrendingType.TRACKS, version) if strategy.use_mat_view: strategy.update_track_score_query(session) session.commit() enqueue_trending_challenges(db, redis_conn, bus, trending_date) with db.scoped_session() as session: session.query(Challenge).filter( or_( Challenge.id == "tp", Challenge.id == "tt", Challenge.id == "tut", )).update({ "active": True, "starting_block": BLOCK_NUMBER }) bus.process_events(session) session.flush() trending_tracks = (session.query(TrendingResult).filter( TrendingResult.type == str(TrendingType.TRACKS)).all()) assert len(trending_tracks) == 5 user_trending_tracks_challenges = (session.query(UserChallenge).filter( UserChallenge.challenge_id == "tt").all()) assert len(user_trending_tracks_challenges) == 5 ranks = { "2021-08-20:1", "2021-08-20:2", "2021-08-20:3", "2021-08-20:4", "2021-08-20:5", } for challenge in user_trending_tracks_challenges: assert challenge.specifier in ranks ranks.remove(challenge.specifier) trending_playlists = (session.query(TrendingResult).filter( TrendingResult.type == str(TrendingType.PLAYLISTS)).all()) assert len(trending_playlists) == 5
def test_populate_user_metadata(app): """Tests that populate_user_metadata works after aggregate_user update""" with app.app_context(): db = get_db() test_entities = { "tracks": [ { "track_id": 1, "owner_id": 1 }, { "track_id": 2, "owner_id": 1 }, { "track_id": 3, "owner_id": 2 }, { "track_id": 4, "owner_id": 2 }, { "track_id": 5, "owner_id": 2 }, { "track_id": 6, "owner_id": 2 }, { "track_id": 7, "owner_id": 3 }, { "track_id": 8, "owner_id": 3 }, { "track_id": 9, "is_unlisted": True, "owner_id": 3 }, ], "playlists": [ { "playlist_id": 1, "playlist_owner_id": 1 }, { "playlist_id": 2, "playlist_owner_id": 1 }, { "playlist_id": 3, "is_album": True, "playlist_owner_id": 1 }, { "playlist_id": 4, "playlist_owner_id": 2 }, { "playlist_id": 5, "is_delete": True, "playlist_owner_id": 2 }, { "playlist_id": 6, "is_album": True, "playlist_owner_id": 3 }, { "playlist_id": 6, "is_private": True, "playlist_owner_id": 3 }, ], "users": [ { "user_id": 1, "handle": "user1", "wallet": "0x111" }, { "user_id": 2, "handle": "user2", "wallet": "0x222" }, { "user_id": 3, "handle": "user3", "wallet": "0x333" }, { "user_id": 4, "handle": "user4", "wallet": "0x444" }, ], "follows": [ { "follower_user_id": 1, "followee_user_id": 2 }, { "follower_user_id": 1, "followee_user_id": 3 }, { "follower_user_id": 2, "followee_user_id": 3 }, ], "reposts": [ { "repost_item_id": 1, "repost_type": "track", "user_id": 2 }, { "repost_item_id": 1, "repost_type": "playlist", "user_id": 2 }, { "repost_item_id": 1, "repost_type": "track", "user_id": 3 }, { "repost_item_id": 1, "repost_type": "playlist", "user_id": 3 }, { "repost_item_id": 4, "repost_type": "track", "user_id": 1 }, { "repost_item_id": 5, "repost_type": "track", "user_id": 1 }, { "repost_item_id": 6, "repost_type": "track", "user_id": 1 }, ], "saves": [ { "save_item_id": 1, "save_type": "track", "user_id": 2 }, { "save_item_id": 1, "save_type": "playlist", "user_id": 2 }, { "save_item_id": 1, "save_type": "track", "user_id": 3 }, { "save_item_id": 1, "save_type": "playlist", "user_id": 3 }, { "save_item_id": 4, "save_type": "track", "user_id": 1 }, { "save_item_id": 5, "save_type": "track", "user_id": 1 }, { "save_item_id": 6, "save_type": "track", "user_id": 1 }, ], } populate_mock_db(db, test_entities) with db.scoped_session() as session: _update_aggregate_user(session) user_ids = [1, 2, 3, 4, 5] users = [ { "user_id": 1, "wallet": "0x111", "is_verified": False }, { "user_id": 2, "wallet": "0x222", "is_verified": False }, { "user_id": 3, "wallet": "0x333", "is_verified": False }, { "user_id": 4, "wallet": "0x444", "is_verified": False }, { "user_id": 5, "wallet": "0x555", "is_verified": False }, ] users = populate_user_metadata(session, user_ids, users, 3) assert len(users) == 5 assert users[0]["does_follow_current_user"] == True assert users[1]["does_follow_current_user"] == True assert users[2]["does_follow_current_user"] == False assert users[3]["does_follow_current_user"] == False assert users[4]["does_follow_current_user"] == False assert users[0]["user_id"] == 1 assert users[0][response_name_constants.track_count] == 2 assert users[0][response_name_constants.playlist_count] == 2 assert users[0][response_name_constants.album_count] == 1 assert users[0][response_name_constants.follower_count] == 0 assert users[0][response_name_constants.followee_count] == 2 assert users[0][response_name_constants.repost_count] == 3 assert users[1]["user_id"] == 2 assert users[1][response_name_constants.track_count] == 4 assert users[1][response_name_constants.playlist_count] == 1 assert users[1][response_name_constants.album_count] == 0 assert users[1][response_name_constants.follower_count] == 1 assert users[1][response_name_constants.followee_count] == 1 assert users[1][response_name_constants.repost_count] == 2 assert users[2]["user_id"] == 3 assert users[2][response_name_constants.track_count] == 2 assert users[2][response_name_constants.playlist_count] == 0 assert users[2][response_name_constants.album_count] == 1 assert users[2][response_name_constants.follower_count] == 2 assert users[2][response_name_constants.followee_count] == 0 assert users[2][response_name_constants.repost_count] == 2 assert users[3]["user_id"] == 4 assert users[3][response_name_constants.track_count] == 0 assert users[3][response_name_constants.playlist_count] == 0 assert users[3][response_name_constants.album_count] == 0 assert users[3][response_name_constants.follower_count] == 0 assert users[3][response_name_constants.followee_count] == 0 assert users[3][response_name_constants.repost_count] == 0 assert users[4]["user_id"] == 5 assert users[4][response_name_constants.track_count] == 0 assert users[4][response_name_constants.playlist_count] == 0 assert users[4][response_name_constants.album_count] == 0 assert users[4][response_name_constants.follower_count] == 0 assert users[4][response_name_constants.followee_count] == 0 assert users[4][response_name_constants.repost_count] == 0 curr_user_ids = [1, 2, 3] curr_users = [ { "user_id": 1, "wallet": "0x111", "is_verified": False }, { "user_id": 2, "wallet": "0x222", "is_verified": False }, { "user_id": 3, "wallet": "0x333", "is_verified": False }, ] users = populate_user_metadata(session, curr_user_ids, curr_users, 1) assert len(users) == 3 assert users[0]["user_id"] == 1 assert users[0][ response_name_constants.does_current_user_follow] == False assert users[0][ response_name_constants.current_user_followee_follow_count] == 0 assert users[0][response_name_constants.balance] == "0" assert users[0][ response_name_constants.associated_wallets_balance] == "0" assert users[1]["user_id"] == 2 assert users[1][ response_name_constants.does_current_user_follow] == True assert users[1][ response_name_constants.current_user_followee_follow_count] == 0 assert users[1][response_name_constants.balance] == "0" assert users[1][ response_name_constants.associated_wallets_balance] == "0" assert users[2]["user_id"] == 3 assert users[2][ response_name_constants.does_current_user_follow] == True assert users[2][ response_name_constants.current_user_followee_follow_count] == 1 assert users[2][response_name_constants.balance] == "0" assert users[2][ response_name_constants.associated_wallets_balance] == "0" # get_top_users: should return only artists, most followers first top_user_ids = [ u["user_id"] for u in _get_top_users(session, 1, 100, 0) ] assert top_user_ids == [3, 2, 1]
def test_index_related_artists(app): redis_conn = redis.Redis.from_url(url=REDIS_URL) with app.app_context(): db = get_db() entities = { "users": [{}] * 7, "follows": [ # at least 200 followers for user_0 {"follower_user_id": i, "followee_user_id": 0} for i in range(1, 201) ] # 50 mutual followers between user_1 & user_0 make up 100% of user_1 followers = score 50 + [{"follower_user_id": i, "followee_user_id": 1} for i in range(151, 201)] # 50 mutual followers between user_2 & user_0 make up 50% of user_2 followers = score 25 + [{"follower_user_id": i, "followee_user_id": 2} for i in range(151, 251)] # 20 mutual followers between user_3 & user_0 make up 50% of user_3 followers = score 10 + [{"follower_user_id": i, "followee_user_id": 3} for i in range(181, 221)] # 4 mutual followers between user_4 & user_0 make up 80% of user_4 followers = score 3.2 + [{"follower_user_id": i, "followee_user_id": 4} for i in range(197, 202)] # 50 mutual followers between user_5 & user_0 make up 10% of user_5 followers = score 5 + [{"follower_user_id": i, "followee_user_id": 5} for i in range(151, 651)] # 60 mutual followers between user_5 & user_0 make up 30% of user_6 followers = score 18 + [{"follower_user_id": i, "followee_user_id": 6} for i in range(141, 341)], "tracks": [{"owner_id": i} for i in range(0, 7)], } populate_mock_db(db, entities) with db.scoped_session() as session: _update_aggregate_user(session) queue_related_artist_calculation(redis_conn, 0) process_related_artists_queue(db, redis_conn) with db.scoped_session() as session: session.query(RelatedArtist).update( {RelatedArtist.created_at: datetime.utcnow() - timedelta(weeks=5)} ) results: List[RelatedArtist] = ( session.query(RelatedArtist) .filter(RelatedArtist.user_id == 0) .order_by(desc(RelatedArtist.score)) .all() ) assert results[0].related_artist_user_id == 1 and math.isclose( results[0].score, 50, abs_tol=0.001 ) assert results[1].related_artist_user_id == 2 and math.isclose( results[1].score, 25, abs_tol=0.001 ) assert results[2].related_artist_user_id == 6 and math.isclose( results[2].score, 18, abs_tol=0.001 ) assert results[3].related_artist_user_id == 3 and math.isclose( results[3].score, 10, abs_tol=0.001 ) assert results[4].related_artist_user_id == 5 and math.isclose( results[4].score, 5, abs_tol=0.001 ) assert results[5].related_artist_user_id == 4 and math.isclose( results[5].score, 3.2, abs_tol=0.001 ) populate_mock_db( db, { "follows": [ {"follower_user_id": i, "followee_user_id": 0} for i in range(201, 251) ] }, block_offset=100000, ) queue_related_artist_calculation(redis_conn, 0) process_related_artists_queue(db, redis_conn) with db.scoped_session() as session: results: List[RelatedArtist] = ( session.query(RelatedArtist) .filter(RelatedArtist.user_id == 0) .order_by(desc(RelatedArtist.score)) .all() ) assert results[0].related_artist_user_id == 2 and math.isclose( results[0].score, 100, abs_tol=0.001 ) assert results[1].related_artist_user_id == 6 and math.isclose( results[1].score, 60.5, abs_tol=0.001 ) assert results[2].related_artist_user_id == 1 and math.isclose( results[2].score, 50, abs_tol=0.001 ) assert results[3].related_artist_user_id == 3 and math.isclose( results[3].score, 40, abs_tol=0.001 ) assert results[4].related_artist_user_id == 5 and math.isclose( results[4].score, 20, abs_tol=0.001 ) assert results[5].related_artist_user_id == 4 and math.isclose( results[5].score, 5, abs_tol=0.001 )