def test_index_aggregate_plays_no_plays(app):
    """Tests that aggregate_plays should skip indexing if there are no plays"""
    # setup
    with app.app_context():
        db = get_db()

    # run
    entities = {"plays": []}

    populate_mock_db(db, entities)

    with db.scoped_session() as session:
        _update_aggregate_plays(session)
Example #2
0
def test_update_trending_params(app):
    """Test that refreshing trending params gives the correct values"""
    with app.app_context():
        db = get_db()

    # setup
    setup_trending(db)

    with db.scoped_session() as session:
        _update_aggregate_track(session)
        _update_aggregate_plays(session)
        _update_aggregate_user(session)
        session.execute("REFRESH MATERIALIZED VIEW aggregate_interval_plays")
        session.execute("REFRESH MATERIALIZED VIEW trending_params")
        trending_params = session.query(TrendingParam).all()

        # Test that trending_params are not generated for hidden/deleted tracks
        # There should be 7 valid tracks with trending params
        assert len(trending_params) == 7

        def get_track_id(track_id):
            for param in trending_params:
                if param.track_id == track_id:
                    return param
            return None

        t1 = get_track_id(1)
        assert t1.play_count == 40
        assert t1.owner_follower_count == 10
        assert t1.repost_count == 63
        assert t1.repost_week_count == 13
        assert t1.repost_month_count == 33
        assert t1.repost_year_count == 63
        assert t1.save_count == 29
        assert t1.save_week_count == 4
        assert t1.save_month_count == 12
        assert t1.save_year_count == 28
        # user 1 has 10 followers
        # user 2 has 15 followers
        # user 3 has 2 followers
        # 3 saves from all 3 users
        # 4 reposts from user 1
        # 3 reposts from users 2, 3
        # -> (3 * 10 + 3 * 15 + 3 * 2) + (4 * 10 + 3 * 15 + 3 * 2) = 172
        assert float(t1.karma) == 172
Example #3
0
def test_update_track_score_query(app):
    """Happy path test: test that we get all valid listens from prior year"""
    with app.app_context():
        db = get_db()

    # setup
    setup_trending(db)
    udpated_strategy = TrendingTracksStrategyEJ57D()

    with db.scoped_session() as session:
        _update_aggregate_track(session)
        _update_aggregate_plays(session)
        session.execute("REFRESH MATERIALIZED VIEW aggregate_interval_plays")
        session.execute("REFRESH MATERIALIZED VIEW trending_params")
        udpated_strategy.update_track_score_query(session)
        scores = session.query(TrackTrendingScore).all()
        # Test that scores are not generated for hidden/deleted tracks
        # There should be 7 valid tracks * 3 valid time ranges (week/month/year)
        assert len(scores) == 21

        def get_time_sorted(time_range):
            return sorted(
                [score for score in scores if score.time_range == time_range],
                key=lambda k: (k.score, k.track_id),
                reverse=True,
            )

        week_scores = get_time_sorted("week")
        month_scores = get_time_sorted("month")
        all_time_scores = get_time_sorted("allTime")

        assert len(week_scores) == 7
        assert len(month_scores) == 7
        assert len(all_time_scores) == 7

        # Check that the type and version fields are correct
        for score in scores:
            assert score.type == udpated_strategy.trending_type.name
            assert score.version == udpated_strategy.version.name

        # Check that the type and version fields are correct
        for score in scores:
            assert score.type == udpated_strategy.trending_type.name
            assert score.version == udpated_strategy.version.name
def test_search_track_tags(app):
    """Tests that search by tags works fopr tracks"""
    with app.app_context():
        db = get_db()

    test_entities = {
        "tracks": [
            {"track_id": 1, "tags": "", "owner_id": 1},
            {"track_id": 2, "owner_id": 1, "tags": "pop,rock,electric"},
            {"track_id": 3, "owner_id": 2},
            {"track_id": 4, "owner_id": 2, "tags": "funk,pop"},
            {"track_id": 5, "owner_id": 2, "tags": "funk,pop"},
            {"track_id": 6, "owner_id": 2, "tags": "funk,Funk,kpop"},
        ],
        "plays": [
            {"item_id": 1},
            {"item_id": 1},
            {"item_id": 2},
            {"item_id": 2},
            {"item_id": 4},
            {"item_id": 5},
            {"item_id": 5},
            {"item_id": 5},
        ],
    }

    populate_mock_db(db, test_entities)

    with db.scoped_session() as session:
        session.execute("REFRESH MATERIALIZED VIEW tag_track_user")
        _update_aggregate_plays(session)
        args = {"search_str": "pop", "current_user_id": None, "limit": 10, "offset": 0}
        tracks = search_track_tags(session, args)

        assert len(tracks) == 3
        assert tracks[0]["track_id"] == 5  # First w/ 3 plays
        assert tracks[1]["track_id"] == 2  # Sec w/ 2 plays
        assert tracks[2]["track_id"] == 4  # Third w/ 1 plays
def test_index_aggregate_plays_same_checkpoint(app):
    """Test that we should not update when last index is the same"""
    # setup
    with app.app_context():
        db = get_db()

    # run
    entities = {
        "tracks": [
            {
                "track_id": 1,
                "title": "track 1"
            },
            {
                "track_id": 2,
                "title": "track 2"
            },
            {
                "track_id": 3,
                "title": "track 3"
            },
            {
                "track_id": 4,
                "title": "track 4"
            },
        ],
        "aggregate_plays": [
            # Current Plays
            {
                "play_item_id": 1,
                "count": 3
            },
            {
                "play_item_id": 2,
                "count": 3
            },
            {
                "play_item_id": 3,
                "count": 3
            },
        ],
        "indexing_checkpoints": [{
            "tablename": "aggregate_plays",
            "last_checkpoint": 9
        }],
        "plays": [
            # Current Plays
            {
                "item_id": 1
            },
            {
                "item_id": 1
            },
            {
                "item_id": 1
            },
            {
                "item_id": 2
            },
            {
                "item_id": 2
            },
            {
                "item_id": 2
            },
            {
                "item_id": 3
            },
            {
                "item_id": 3
            },
            {
                "item_id": 3
            },
        ],
    }

    populate_mock_db(db, entities)

    with db.scoped_session() as session:
        _update_aggregate_plays(session)

        results: List[AggregatePlays] = (
            session.query(AggregatePlays).order_by(
                AggregatePlays.play_item_id).all())

        assert len(results) == 3
def test_index_aggregate_plays_populate(app):
    """Test that we should populate plays from empty"""

    date = datetime.now()
    # setup
    with app.app_context():
        db = get_db()

    # run
    entities = {
        "tracks": [
            {
                "track_id": 0,
                "title": "track 0"
            },
            {
                "track_id": 1,
                "title": "track 1"
            },
            {
                "track_id": 2,
                "title": "track 2"
            },
            {
                "track_id": 3,
                "title": "track 3"
            },
            {
                "track_id": 4,
                "title": "track 4"
            },
        ],
        "plays": [
            # Current Plays
            {
                "item_id": 0
            },
            {
                "item_id": 0
            },
            {
                "item_id": 1
            },
            {
                "item_id": 1
            },
            {
                "item_id": 2
            },
            {
                "item_id": 3
            },
            # > 1 wk plays
            {
                "item_id": 2,
                "created_at": date - timedelta(weeks=2)
            },
            {
                "item_id": 2,
                "created_at": date - timedelta(weeks=2)
            },
            {
                "item_id": 3,
                "created_at": date - timedelta(weeks=2)
            },
            {
                "item_id": 3
            },
            {
                "item_id": 3
            },
            {
                "item_id": 4
            },
            {
                "item_id": 4
            },
        ],
    }

    populate_mock_db(db, entities)

    with db.scoped_session() as session:
        _update_aggregate_plays(session)

        results: List[AggregatePlays] = (
            session.query(AggregatePlays).order_by(
                AggregatePlays.play_item_id).all())

        assert len(results) == 5
        assert results[0].play_item_id == 0
        assert results[0].count == 2
        assert results[1].play_item_id == 1
        assert results[1].count == 2
        assert results[2].play_item_id == 2
        assert results[2].count == 3
        assert results[3].play_item_id == 3
        assert results[3].count == 4
        assert results[4].play_item_id == 4
        assert results[4].count == 2
def test_trending_challenge_job(app):
    with app.app_context():
        db = get_db()
    redis_conn = redis.Redis.from_url(url=REDIS_URL)

    test_entities = {
        "tracks": [
            {
                "track_id": 1,
                "owner_id": 1
            },
            {
                "track_id": 2,
                "owner_id": 2
            },
            {
                "track_id": 3,
                "owner_id": 3
            },
            {
                "track_id": 4,
                "owner_id": 4
            },
            {
                "track_id": 5,
                "owner_id": 5
            },
            {
                "track_id": 6,
                "owner_id": 2
            },
            {
                "track_id": 7,
                "owner_id": 3
            },
            {
                "track_id": 8,
                "owner_id": 3
            },
            {
                "track_id": 9,
                "is_unlisted": True,
                "owner_id": 3
            },
            {
                "track_id": 11,
                "owner_id": 1
            },
            {
                "track_id": 12,
                "owner_id": 2
            },
            {
                "track_id": 13,
                "owner_id": 3
            },
            {
                "track_id": 14,
                "owner_id": 4
            },
            {
                "track_id": 15,
                "owner_id": 5
            },
        ],
        "playlists": [
            {
                "playlist_id": 1,
                "playlist_owner_id": 1,
                "playlist_name": "name",
                "description": "description",
                "playlist_contents": {
                    "track_ids": [
                        {
                            "track": 1,
                            "time": 1
                        },
                        {
                            "track": 2,
                            "time": 2
                        },
                        {
                            "track": 3,
                            "time": 3
                        },
                    ]
                },
            },
            {
                "playlist_id": 2,
                "playlist_owner_id": 2,
                "playlist_name": "name",
                "description": "description",
                "playlist_contents": {
                    "track_ids": [
                        {
                            "track": 1,
                            "time": 1
                        },
                        {
                            "track": 2,
                            "time": 2
                        },
                        {
                            "track": 3,
                            "time": 3
                        },
                    ]
                },
            },
            {
                "playlist_id": 3,
                "is_album": True,
                "playlist_owner_id": 3,
                "playlist_name": "name",
                "description": "description",
                "playlist_contents": {
                    "track_ids": [
                        {
                            "track": 1,
                            "time": 1
                        },
                        {
                            "track": 2,
                            "time": 2
                        },
                        {
                            "track": 3,
                            "time": 3
                        },
                    ]
                },
            },
            {
                "playlist_id": 4,
                "playlist_owner_id": 4,
                "playlist_name": "name",
                "description": "description",
                "playlist_contents": {
                    "track_ids": [
                        {
                            "track": 1,
                            "time": 1
                        },
                        {
                            "track": 2,
                            "time": 2
                        },
                        {
                            "track": 3,
                            "time": 3
                        },
                    ]
                },
            },
            {
                "playlist_id": 5,
                "playlist_owner_id": 5,
                "playlist_name": "name",
                "description": "description",
                "playlist_contents": {
                    "track_ids": [
                        {
                            "track": 1,
                            "time": 1
                        },
                        {
                            "track": 2,
                            "time": 2
                        },
                        {
                            "track": 3,
                            "time": 3
                        },
                    ]
                },
            },
        ],
        "users": [
            {
                "user_id": 1,
                "handle": "user1"
            },
            {
                "user_id": 2,
                "handle": "user2"
            },
            {
                "user_id": 3,
                "handle": "user3"
            },
            {
                "user_id": 4,
                "handle": "user4"
            },
            {
                "user_id": 5,
                "handle": "user5"
            },
        ],
        "follows": [
            {
                "follower_user_id": 1,
                "followee_user_id": 2,
                "created_at": datetime.now() - timedelta(days=8),
            },
            {
                "follower_user_id": 1,
                "followee_user_id": 3,
                "created_at": datetime.now() - timedelta(days=8),
            },
            {
                "follower_user_id": 2,
                "followee_user_id": 3,
                "created_at": datetime.now() - timedelta(days=8),
            },
            {
                "follower_user_id": 2,
                "followee_user_id": 4,
                "created_at": datetime.now() - timedelta(days=8),
            },
            {
                "follower_user_id": 3,
                "followee_user_id": 6,
                "created_at": datetime.now() - timedelta(days=8),
            },
            {
                "follower_user_id": 4,
                "followee_user_id": 5,
                "created_at": datetime.now() - timedelta(days=8),
            },
            {
                "follower_user_id": 5,
                "followee_user_id": 1,
                "created_at": datetime.now() - timedelta(days=8),
            },
            {
                "follower_user_id": 6,
                "followee_user_id": 3,
                "created_at": datetime.now() - timedelta(days=8),
            },
        ],
        "reposts": [
            {
                "repost_item_id": 1,
                "repost_type": "track",
                "user_id": 2
            },
            {
                "repost_item_id": 1,
                "repost_type": "playlist",
                "user_id": 2
            },
            {
                "repost_item_id": 3,
                "repost_type": "track",
                "user_id": 3
            },
            {
                "repost_item_id": 1,
                "repost_type": "playlist",
                "user_id": 3
            },
            {
                "repost_item_id": 4,
                "repost_type": "track",
                "user_id": 1
            },
            {
                "repost_item_id": 5,
                "repost_type": "track",
                "user_id": 1
            },
            {
                "repost_item_id": 6,
                "repost_type": "track",
                "user_id": 1
            },
        ],
        "saves": [
            {
                "save_item_id": 1,
                "save_type": "track",
                "user_id": 2
            },
            {
                "save_item_id": 1,
                "save_type": "track",
                "user_id": 3
            },
            {
                "save_item_id": 4,
                "save_type": "track",
                "user_id": 1
            },
            {
                "save_item_id": 5,
                "save_type": "track",
                "user_id": 1
            },
            {
                "save_item_id": 6,
                "save_type": "track",
                "user_id": 1
            },
            {
                "save_item_id": 1,
                "save_type": "playlist",
                "user_id": 4
            },
            {
                "save_item_id": 2,
                "save_type": "playlist",
                "user_id": 3
            },
            {
                "save_item_id": 3,
                "save_type": "playlist",
                "user_id": 2
            },
            {
                "save_item_id": 4,
                "save_type": "playlist",
                "user_id": 1
            },
            {
                "save_item_id": 5,
                "save_type": "playlist",
                "user_id": 2
            },
        ],
        "plays": [{
            "item_id": 1
        } for _ in range(55)] + [{
            "item_id": 2
        } for _ in range(60)] + [{
            "item_id": 3
        } for _ in range(70)] + [{
            "item_id": 4
        } for _ in range(90)] + [{
            "item_id": 5
        } for _ in range(80)] + [{
            "item_id": 6
        } for _ in range(40)] + [{
            "item_id": 11
        } for _ in range(200)] + [{
            "item_id": 12
        } for _ in range(200)] + [{
            "item_id": 13
        } for _ in range(200)] + [{
            "item_id": 14
        } for _ in range(200)] + [{
            "item_id": 15
        } for _ in range(200)],
    }

    populate_mock_db(db, test_entities, BLOCK_NUMBER + 1)
    bus = ChallengeEventBus(redis_conn)

    # Register events with the bus
    bus.register_listener(
        ChallengeEvent.trending_underground,
        trending_underground_track_challenge_manager,
    )
    bus.register_listener(ChallengeEvent.trending_track,
                          trending_track_challenge_manager)
    bus.register_listener(ChallengeEvent.trending_playlist,
                          trending_playlist_challenge_manager)

    trending_date = datetime.fromisoformat("2021-08-20")

    with db.scoped_session() as session:
        _update_aggregate_plays(session)
        _update_aggregate_track(session)
        _update_aggregate_user(session)
        session.execute("REFRESH MATERIALIZED VIEW aggregate_interval_plays")
        session.execute("REFRESH MATERIALIZED VIEW trending_params")
        trending_track_versions = trending_strategy_factory.get_versions_for_type(
            TrendingType.TRACKS).keys()

        for version in trending_track_versions:
            strategy = trending_strategy_factory.get_strategy(
                TrendingType.TRACKS, version)
            if strategy.use_mat_view:
                strategy.update_track_score_query(session)

        session.commit()

    enqueue_trending_challenges(db, redis_conn, bus, trending_date)

    with db.scoped_session() as session:
        session.query(Challenge).filter(
            or_(
                Challenge.id == "tp",
                Challenge.id == "tt",
                Challenge.id == "tut",
            )).update({
                "active": True,
                "starting_block": BLOCK_NUMBER
            })
        bus.process_events(session)
        session.flush()
        trending_tracks = (session.query(TrendingResult).filter(
            TrendingResult.type == str(TrendingType.TRACKS)).all())
        assert len(trending_tracks) == 5

        user_trending_tracks_challenges = (session.query(UserChallenge).filter(
            UserChallenge.challenge_id == "tt").all())
        assert len(user_trending_tracks_challenges) == 5
        ranks = {
            "2021-08-20:1",
            "2021-08-20:2",
            "2021-08-20:3",
            "2021-08-20:4",
            "2021-08-20:5",
        }
        for challenge in user_trending_tracks_challenges:
            assert challenge.specifier in ranks
            ranks.remove(challenge.specifier)

        trending_playlists = (session.query(TrendingResult).filter(
            TrendingResult.type == str(TrendingType.PLAYLISTS)).all())
        assert len(trending_playlists) == 5
def test_listen_count_milestone_processing(app):
    redis_conn = redis.Redis.from_url(url=REDIS_URL)
    set_json_cached_key(
        redis_conn, CURRENT_PLAY_INDEXING, {"slot": 12, "timestamp": 1634836054}
    )
    with app.app_context():
        db = get_db()

        test_entities = {
            "plays": [{"item_id": 1} for _ in range(8)]
            + [{"item_id": 2} for _ in range(10)]  # milestone 10
            + [{"item_id": 3} for _ in range(11)]  # milestone 10
            + [{"item_id": 4} for _ in range(12)]  # milestone 10
            + [{"item_id": 5} for _ in range(25)]  # milestone 25
            + [{"item_id": 6} for _ in range(27)]  # milestone 25
            + [{"item_id": 7} for _ in range(40)]  # milestone 25
            + [{"item_id": 8} for _ in range(80)]  # milestone 50
            + [{"item_id": 9} for _ in range(111)]  # milestone 100
            + [{"item_id": 10} for _ in range(25)]  # milestone 25
        }
        populate_mock_db(db, test_entities)

        with db.scoped_session() as session:
            _update_aggregate_plays(session)

        redis_conn.sadd(TRACK_LISTEN_IDS, *track_ids)

        index_listen_count_milestones(db, redis_conn)
        with db.scoped_session() as session:
            milestones = session.query(Milestone).all()
            assert len(milestones) == 9
            sorted_milestones = sorted(milestones, key=lambda m: m.id)
            sorted_milestones = [
                (milestone.id, milestone.threshold, milestone.slot, milestone.timestamp)
                for milestone in sorted_milestones
            ]

            assert sorted_milestones == [
                (2, 10, 12, datetime.fromtimestamp(1634836054)),
                (3, 10, 12, datetime.fromtimestamp(1634836054)),
                (4, 10, 12, datetime.fromtimestamp(1634836054)),
                (5, 25, 12, datetime.fromtimestamp(1634836054)),
                (6, 25, 12, datetime.fromtimestamp(1634836054)),
                (7, 25, 12, datetime.fromtimestamp(1634836054)),
                (8, 50, 12, datetime.fromtimestamp(1634836054)),
                (9, 100, 12, datetime.fromtimestamp(1634836054)),
                (10, 25, 12, datetime.fromtimestamp(1634836054)),
            ]

        # Add the same track and process to check that no new milesetones are created
        redis_conn.sadd(TRACK_LISTEN_IDS, *track_ids)
        index_listen_count_milestones(db, redis_conn)
        with db.scoped_session() as session:
            milestones = session.query(Milestone).all()
            assert len(milestones) == 9

        test_entities = {
            "plays": [
                {"item_id": 1, "id": 1000 + i} for i in range(3)
            ]  # 3 + 8 = 11 new
            + [{"item_id": 2, "id": 1200 + i} for i in range(100)]  # 10 + 100 = 110 new
            + [
                {"item_id": 3, "id": 1400 + i} for i in range(10)
            ]  # 10 + 11 = 21 not new
            + [
                {"item_id": 4, "id": 1600 + i} for i in range(1000)
            ]  # 1000 + 12 = 1012 new
            + [
                {"item_id": 8, "id": 3000 + i} for i in range(19)
            ]  # 19 + 80 = 99 not new
            + [
                {"item_id": 9, "id": 9000 + i} for i in range(5000)
            ]  # 5000 + 111 = 5111 new
        }
        populate_mock_db(db, test_entities)
        with db.scoped_session() as session:
            _update_aggregate_plays(session)

        # Add the same track and process to check that no new milesetones are created
        redis_conn.sadd(TRACK_LISTEN_IDS, *track_ids)
        set_json_cached_key(
            redis_conn, CURRENT_PLAY_INDEXING, {"slot": 14, "timestamp": 1634836056}
        )
        index_listen_count_milestones(db, redis_conn)

        with db.scoped_session() as session:
            milestones = session.query(Milestone).filter(Milestone.slot == 14).all()
            assert len(milestones) == 4
            sorted_milestones = sorted(milestones, key=lambda m: m.id)
            sorted_milestones = [
                (milestone.id, milestone.threshold) for milestone in sorted_milestones
            ]

            assert sorted_milestones == [(1, 10), (2, 100), (4, 1000), (9, 5000)]

        # Add a track that's not been indexed yet
        redis_conn.sadd(TRACK_LISTEN_IDS, 20)
        set_json_cached_key(
            redis_conn, CURRENT_PLAY_INDEXING, {"slot": 14, "timestamp": 1634836056}
        )
        index_listen_count_milestones(db, redis_conn)