def test_index_aggregate_track_same_checkpoint(app):
    """Test that we should not update when last index is the same"""

    with app.app_context():
        db = get_db()

    entities = deepcopy(basic_entities)
    current_blocknumber = basic_entities["blocks"][0]["number"] = 9
    entities.update({
        "indexing_checkpoints": [{
            "tablename": AGGREGATE_TRACK,
            "last_checkpoint": current_blocknumber
        }],
    })

    populate_mock_db(db, entities)

    with db.scoped_session() as session:
        results: List[AggregateTrack] = (
            session.query(AggregateTrack).order_by(
                AggregateTrack.track_id).all())
        assert len(results) == 0

        _update_aggregate_track(session)

        results: List[AggregateTrack] = (
            session.query(AggregateTrack).order_by(
                AggregateTrack.track_id).all())
        assert len(results) == 0

        prev_id_checkpoint = get_last_indexed_checkpoint(
            session, AGGREGATE_TRACK)
        assert prev_id_checkpoint == current_blocknumber
def test_index_aggregate_track_empty_tracks(app):
    """Test that track metadata without tracks table won't break"""

    with app.app_context():
        db = get_db()

    entities = {
        "users": [],
        "indexing_checkpoints": [{
            "tablename": AGGREGATE_TRACK,
            "last_checkpoint": 0
        }],
        "tracks": [],
        "reposts": [
            {
                "repost_item_id": 1,
                "repost_type": "track",
                "track_id": 1
            },
            {
                "repost_item_id": 1,
                "repost_type": "playlist",
                "track_id": 1
            },
        ],
        "saves": [
            {
                "save_item_id": 1,
                "save_type": "track",
                "track_id": 1
            },
            {
                "save_item_id": 1,
                "save_type": "playlist",
                "track_id": 1
            },
        ],
    }

    populate_mock_db(db, entities)

    with db.scoped_session() as session:
        _update_aggregate_track(session)

        results: List[AggregateTrack] = (
            session.query(AggregateTrack).order_by(
                AggregateTrack.track_id).all())

        assert (len(results) == 0
                ), "Test that without Tracks there will be no AggregateTracks"

        prev_id_checkpoint = get_last_indexed_checkpoint(
            session, AGGREGATE_TRACK)
        assert prev_id_checkpoint == 1
def test_index_aggregate_track_update(app):
    """Test that the aggregate_track data is continously added to"""

    with app.app_context():
        db = get_db()

    entities = deepcopy(basic_entities)
    entities.update({
        "aggregate_track": [
            {
                "track_id": 1,
                "repost_count": 9,
                "save_count": 9,
            },
            {
                "track_id": 2,
                "repost_count": 9,
                "save_count": 9,
            },
            {
                "track_id": 4,
                "repost_count": 9,
                "save_count": 9,
            },
            {
                "track_id": 5,
                "repost_count": 9,
                "save_count": 9,
            },
        ],
    })

    populate_mock_db(db, entities, block_offset=3)

    with db.scoped_session() as session:
        results: List[AggregateTrack] = (
            session.query(AggregateTrack).order_by(
                AggregateTrack.track_id).all())

        assert len(results) == 4, "Test that the entities were created"
        for i, n in enumerate((0, 1, 3, 4)):
            assert results[
                i].track_id == n + 1, "Test that the entities were created"
            assert results[
                i].repost_count == 9, "Test that the entities were created"
            assert results[
                i].save_count == 9, "Test that the entities were created"

        _update_aggregate_track(session)

    with db.scoped_session() as session:
        basic_tests(session)
def test_get_remixable_tracks(app):

    with app.app_context():
        db = get_db()

        populate_tracks(db)
        populate_mock_db(
            db,
            {
                "remixes": [
                    {
                        "parent_track_id": 9,
                        "child_track_id": 1
                    },
                    {
                        "parent_track_id": 8,
                        "child_track_id": 1
                    },
                ],
                "stems": [
                    {
                        "parent_track_id": 7,
                        "child_track_id": 1
                    },
                    {
                        "parent_track_id": 6,
                        "child_track_id": 1
                    },
                    # Verify that tracks with deleted stems are not returned
                    {
                        "parent_track_id": 5,
                        "child_track_id": 10
                    },
                ],
                "saves": [{
                    "user_id": 4,
                    "save_item_id": 1
                }],
                "reposts": [{
                    "user_id": 4,
                    "repost_item_id": 1
                }],
            },
        )

        with db.scoped_session() as session:
            _update_aggregate_track(session)
        tracks = get_remixable_tracks({"with_users": True})
        assert len(tracks) == 2
        assert tracks[0]["user"]
示例#5
0
def test_update_trending_params(app):
    """Test that refreshing trending params gives the correct values"""
    with app.app_context():
        db = get_db()

    # setup
    setup_trending(db)

    with db.scoped_session() as session:
        _update_aggregate_track(session)
        _update_aggregate_plays(session)
        _update_aggregate_user(session)
        session.execute("REFRESH MATERIALIZED VIEW aggregate_interval_plays")
        session.execute("REFRESH MATERIALIZED VIEW trending_params")
        trending_params = session.query(TrendingParam).all()

        # Test that trending_params are not generated for hidden/deleted tracks
        # There should be 7 valid tracks with trending params
        assert len(trending_params) == 7

        def get_track_id(track_id):
            for param in trending_params:
                if param.track_id == track_id:
                    return param
            return None

        t1 = get_track_id(1)
        assert t1.play_count == 40
        assert t1.owner_follower_count == 10
        assert t1.repost_count == 63
        assert t1.repost_week_count == 13
        assert t1.repost_month_count == 33
        assert t1.repost_year_count == 63
        assert t1.save_count == 29
        assert t1.save_week_count == 4
        assert t1.save_month_count == 12
        assert t1.save_year_count == 28
        # user 1 has 10 followers
        # user 2 has 15 followers
        # user 3 has 2 followers
        # 3 saves from all 3 users
        # 4 reposts from user 1
        # 3 reposts from users 2, 3
        # -> (3 * 10 + 3 * 15 + 3 * 2) + (4 * 10 + 3 * 15 + 3 * 2) = 172
        assert float(t1.karma) == 172
示例#6
0
def test_update_track_score_query(app):
    """Happy path test: test that we get all valid listens from prior year"""
    with app.app_context():
        db = get_db()

    # setup
    setup_trending(db)
    udpated_strategy = TrendingTracksStrategyEJ57D()

    with db.scoped_session() as session:
        _update_aggregate_track(session)
        _update_aggregate_plays(session)
        session.execute("REFRESH MATERIALIZED VIEW aggregate_interval_plays")
        session.execute("REFRESH MATERIALIZED VIEW trending_params")
        udpated_strategy.update_track_score_query(session)
        scores = session.query(TrackTrendingScore).all()
        # Test that scores are not generated for hidden/deleted tracks
        # There should be 7 valid tracks * 3 valid time ranges (week/month/year)
        assert len(scores) == 21

        def get_time_sorted(time_range):
            return sorted(
                [score for score in scores if score.time_range == time_range],
                key=lambda k: (k.score, k.track_id),
                reverse=True,
            )

        week_scores = get_time_sorted("week")
        month_scores = get_time_sorted("month")
        all_time_scores = get_time_sorted("allTime")

        assert len(week_scores) == 7
        assert len(month_scores) == 7
        assert len(all_time_scores) == 7

        # Check that the type and version fields are correct
        for score in scores:
            assert score.type == udpated_strategy.trending_type.name
            assert score.version == udpated_strategy.version.name

        # Check that the type and version fields are correct
        for score in scores:
            assert score.type == udpated_strategy.trending_type.name
            assert score.version == udpated_strategy.version.name
def test_index_aggregate_track_empty_completely(app):
    """Test a completely empty database won't break"""

    with app.app_context():
        db = get_db()

    entities = {}

    populate_mock_db(db, entities, block_offset=3)

    with db.scoped_session() as session:
        _update_aggregate_track(session)

        results: List[AggregateTrack] = (
            session.query(AggregateTrack).order_by(
                AggregateTrack.track_id).all())

        assert (len(results) == 0
                ), "Test that empty entities won't generate AggregateTracks"

        prev_id_checkpoint = get_last_indexed_checkpoint(
            session, AGGREGATE_TRACK)
        assert prev_id_checkpoint == 0
示例#8
0
def setup_search(app_module):
    with app_module.app_context():
        db = get_db()

    # Import app so that it'll run migrations against the db
    now = datetime.now()
    blocks = [
        Block(
            blockhash=hex(1),
            number=1,
            parenthash="0x01",
            is_current=False,
        ),
        Block(
            blockhash=hex(2),
            number=2,
            parenthash="0x02",
            is_current=False,
        ),
        Block(
            blockhash=hex(3),
            number=3,
            parenthash="0x03",
            is_current=True,
        ),
    ]
    tracks = [
        Track(
            blockhash=hex(1),
            blocknumber=1,
            track_id=1,
            is_current=True,
            is_delete=False,
            owner_id=1,
            route_id="",
            track_segments=[],
            genre="",
            updated_at=now,
            created_at=now,
            is_unlisted=False,
            title="the track 1",
            download={"cid": None, "is_downloadable": False, "requires_follow": False},
        ),
        Track(
            blockhash=hex(2),
            blocknumber=2,
            track_id=2,
            is_current=True,
            is_delete=False,
            owner_id=2,
            route_id="",
            track_segments=[],
            genre="",
            updated_at=now,
            created_at=now,
            is_unlisted=False,
            title="the track 2",
            download={"cid": None, "is_downloadable": True, "requires_follow": False},
        ),
        Track(
            blockhash=hex(3),
            blocknumber=3,
            track_id=3,
            is_current=True,
            is_delete=False,
            owner_id=1,
            route_id="",
            track_segments=[],
            genre="",
            updated_at=now,
            created_at=now,
            is_unlisted=False,
            title="xyz",
            download={"cid": None, "is_downloadable": True, "requires_follow": False},
        ),
    ]

    # need users for the lexeme dict to work
    users = [
        User(
            blockhash=hex(1),
            blocknumber=1,
            user_id=1,
            is_current=True,
            handle="",
            wallet="",
            name="user 1",
            updated_at=now,
            created_at=now,
        ),
        User(
            blockhash=hex(2),
            blocknumber=2,
            user_id=2,
            is_current=True,
            handle="",
            name="user 2",
            wallet="",
            updated_at=now,
            created_at=now,
        ),
        User(
            blockhash=hex(3),
            blocknumber=3,
            user_id=3,
            is_current=True,
            handle="",
            wallet="",
            name="fdwea",
            updated_at=now,
            created_at=now,
        ),
    ]

    follows = [
        Follow(
            blockhash=hex(1),
            blocknumber=1,
            follower_user_id=2,
            followee_user_id=1,
            is_current=True,
            is_delete=False,
            created_at=now,
        )
    ]

    playlists = [
        Playlist(
            blockhash=hex(1),
            blocknumber=1,
            playlist_id=1,
            playlist_owner_id=1,
            is_album=False,
            is_private=False,
            playlist_name="playlist 1",
            playlist_contents={"track_ids": [{"track": 1, "time": 1}]},
            is_current=True,
            is_delete=False,
            updated_at=now,
            created_at=now,
        ),
        Playlist(
            blockhash=hex(2),
            blocknumber=2,
            playlist_id=2,
            playlist_owner_id=2,
            is_album=True,
            is_private=False,
            playlist_name="album 1",
            playlist_contents={"track_ids": [{"track": 2, "time": 2}]},
            is_current=True,
            is_delete=False,
            updated_at=now,
            created_at=now,
        ),
    ]

    saves = [
        Save(
            blockhash=hex(1),
            blocknumber=1,
            user_id=1,
            save_item_id=1,
            save_type=SaveType.track,
            created_at=now,
            is_current=True,
            is_delete=False,
        ),
        Save(
            blockhash=hex(1),
            blocknumber=1,
            user_id=1,
            save_item_id=1,
            save_type=SaveType.playlist,
            created_at=now,
            is_current=True,
            is_delete=False,
        ),
        Save(
            blockhash=hex(1),
            blocknumber=1,
            user_id=1,
            save_item_id=2,
            save_type=SaveType.album,
            created_at=now,
            is_current=True,
            is_delete=False,
        ),
    ]

    balances = [
        UserBalance(
            user_id=1,
            balance=0,
            associated_wallets_balance=0,
            associated_sol_wallets_balance=0,
            waudio=0,
        )
    ]

    with db.scoped_session() as session:
        for block in blocks:
            session.add(block)
            session.flush()
        for track in tracks:
            session.add(track)
        for user in users:
            session.add(user)
            session.flush()
        for follow in follows:
            session.add(follow)
            session.flush()
        for playlist in playlists:
            session.add(playlist)
            session.flush()
        for save in saves:
            session.add(save)
            session.flush()
        for balance in balances:
            session.add(balance)
            session.flush()

        # Refresh the lexeme matview
        _update_aggregate_track(session)
        session.execute("REFRESH MATERIALIZED VIEW track_lexeme_dict;")

        session.execute(
            UPDATE_AGGREGATE_USER_QUERY, {"prev_indexed_aggregate_block": 0}
        )
        session.execute("REFRESH MATERIALIZED VIEW user_lexeme_dict;")

        session.execute("REFRESH MATERIALIZED VIEW aggregate_playlist;")
        session.execute("REFRESH MATERIALIZED VIEW playlist_lexeme_dict;")
        session.execute("REFRESH MATERIALIZED VIEW album_lexeme_dict;")

    try:
        output = subprocess.run(
            ["npm", "run", "dev"],
            env=os.environ,
            capture_output=True,
            text=True,
            cwd="es-indexer",
            timeout=5,
        )
        raise Exception(
            f"Elasticsearch indexing stopped: {output.stderr}. With env: {os.environ}"
        )
    except subprocess.TimeoutExpired as timeout:
        if "catchup done" not in timeout.output.decode("utf-8"):
            raise Exception("Elasticsearch failed to index")
def test_trending_challenge_job(app):
    with app.app_context():
        db = get_db()
    redis_conn = redis.Redis.from_url(url=REDIS_URL)

    test_entities = {
        "tracks": [
            {
                "track_id": 1,
                "owner_id": 1
            },
            {
                "track_id": 2,
                "owner_id": 2
            },
            {
                "track_id": 3,
                "owner_id": 3
            },
            {
                "track_id": 4,
                "owner_id": 4
            },
            {
                "track_id": 5,
                "owner_id": 5
            },
            {
                "track_id": 6,
                "owner_id": 2
            },
            {
                "track_id": 7,
                "owner_id": 3
            },
            {
                "track_id": 8,
                "owner_id": 3
            },
            {
                "track_id": 9,
                "is_unlisted": True,
                "owner_id": 3
            },
            {
                "track_id": 11,
                "owner_id": 1
            },
            {
                "track_id": 12,
                "owner_id": 2
            },
            {
                "track_id": 13,
                "owner_id": 3
            },
            {
                "track_id": 14,
                "owner_id": 4
            },
            {
                "track_id": 15,
                "owner_id": 5
            },
        ],
        "playlists": [
            {
                "playlist_id": 1,
                "playlist_owner_id": 1,
                "playlist_name": "name",
                "description": "description",
                "playlist_contents": {
                    "track_ids": [
                        {
                            "track": 1,
                            "time": 1
                        },
                        {
                            "track": 2,
                            "time": 2
                        },
                        {
                            "track": 3,
                            "time": 3
                        },
                    ]
                },
            },
            {
                "playlist_id": 2,
                "playlist_owner_id": 2,
                "playlist_name": "name",
                "description": "description",
                "playlist_contents": {
                    "track_ids": [
                        {
                            "track": 1,
                            "time": 1
                        },
                        {
                            "track": 2,
                            "time": 2
                        },
                        {
                            "track": 3,
                            "time": 3
                        },
                    ]
                },
            },
            {
                "playlist_id": 3,
                "is_album": True,
                "playlist_owner_id": 3,
                "playlist_name": "name",
                "description": "description",
                "playlist_contents": {
                    "track_ids": [
                        {
                            "track": 1,
                            "time": 1
                        },
                        {
                            "track": 2,
                            "time": 2
                        },
                        {
                            "track": 3,
                            "time": 3
                        },
                    ]
                },
            },
            {
                "playlist_id": 4,
                "playlist_owner_id": 4,
                "playlist_name": "name",
                "description": "description",
                "playlist_contents": {
                    "track_ids": [
                        {
                            "track": 1,
                            "time": 1
                        },
                        {
                            "track": 2,
                            "time": 2
                        },
                        {
                            "track": 3,
                            "time": 3
                        },
                    ]
                },
            },
            {
                "playlist_id": 5,
                "playlist_owner_id": 5,
                "playlist_name": "name",
                "description": "description",
                "playlist_contents": {
                    "track_ids": [
                        {
                            "track": 1,
                            "time": 1
                        },
                        {
                            "track": 2,
                            "time": 2
                        },
                        {
                            "track": 3,
                            "time": 3
                        },
                    ]
                },
            },
        ],
        "users": [
            {
                "user_id": 1,
                "handle": "user1"
            },
            {
                "user_id": 2,
                "handle": "user2"
            },
            {
                "user_id": 3,
                "handle": "user3"
            },
            {
                "user_id": 4,
                "handle": "user4"
            },
            {
                "user_id": 5,
                "handle": "user5"
            },
        ],
        "follows": [
            {
                "follower_user_id": 1,
                "followee_user_id": 2,
                "created_at": datetime.now() - timedelta(days=8),
            },
            {
                "follower_user_id": 1,
                "followee_user_id": 3,
                "created_at": datetime.now() - timedelta(days=8),
            },
            {
                "follower_user_id": 2,
                "followee_user_id": 3,
                "created_at": datetime.now() - timedelta(days=8),
            },
            {
                "follower_user_id": 2,
                "followee_user_id": 4,
                "created_at": datetime.now() - timedelta(days=8),
            },
            {
                "follower_user_id": 3,
                "followee_user_id": 6,
                "created_at": datetime.now() - timedelta(days=8),
            },
            {
                "follower_user_id": 4,
                "followee_user_id": 5,
                "created_at": datetime.now() - timedelta(days=8),
            },
            {
                "follower_user_id": 5,
                "followee_user_id": 1,
                "created_at": datetime.now() - timedelta(days=8),
            },
            {
                "follower_user_id": 6,
                "followee_user_id": 3,
                "created_at": datetime.now() - timedelta(days=8),
            },
        ],
        "reposts": [
            {
                "repost_item_id": 1,
                "repost_type": "track",
                "user_id": 2
            },
            {
                "repost_item_id": 1,
                "repost_type": "playlist",
                "user_id": 2
            },
            {
                "repost_item_id": 3,
                "repost_type": "track",
                "user_id": 3
            },
            {
                "repost_item_id": 1,
                "repost_type": "playlist",
                "user_id": 3
            },
            {
                "repost_item_id": 4,
                "repost_type": "track",
                "user_id": 1
            },
            {
                "repost_item_id": 5,
                "repost_type": "track",
                "user_id": 1
            },
            {
                "repost_item_id": 6,
                "repost_type": "track",
                "user_id": 1
            },
        ],
        "saves": [
            {
                "save_item_id": 1,
                "save_type": "track",
                "user_id": 2
            },
            {
                "save_item_id": 1,
                "save_type": "track",
                "user_id": 3
            },
            {
                "save_item_id": 4,
                "save_type": "track",
                "user_id": 1
            },
            {
                "save_item_id": 5,
                "save_type": "track",
                "user_id": 1
            },
            {
                "save_item_id": 6,
                "save_type": "track",
                "user_id": 1
            },
            {
                "save_item_id": 1,
                "save_type": "playlist",
                "user_id": 4
            },
            {
                "save_item_id": 2,
                "save_type": "playlist",
                "user_id": 3
            },
            {
                "save_item_id": 3,
                "save_type": "playlist",
                "user_id": 2
            },
            {
                "save_item_id": 4,
                "save_type": "playlist",
                "user_id": 1
            },
            {
                "save_item_id": 5,
                "save_type": "playlist",
                "user_id": 2
            },
        ],
        "plays": [{
            "item_id": 1
        } for _ in range(55)] + [{
            "item_id": 2
        } for _ in range(60)] + [{
            "item_id": 3
        } for _ in range(70)] + [{
            "item_id": 4
        } for _ in range(90)] + [{
            "item_id": 5
        } for _ in range(80)] + [{
            "item_id": 6
        } for _ in range(40)] + [{
            "item_id": 11
        } for _ in range(200)] + [{
            "item_id": 12
        } for _ in range(200)] + [{
            "item_id": 13
        } for _ in range(200)] + [{
            "item_id": 14
        } for _ in range(200)] + [{
            "item_id": 15
        } for _ in range(200)],
    }

    populate_mock_db(db, test_entities, BLOCK_NUMBER + 1)
    bus = ChallengeEventBus(redis_conn)

    # Register events with the bus
    bus.register_listener(
        ChallengeEvent.trending_underground,
        trending_underground_track_challenge_manager,
    )
    bus.register_listener(ChallengeEvent.trending_track,
                          trending_track_challenge_manager)
    bus.register_listener(ChallengeEvent.trending_playlist,
                          trending_playlist_challenge_manager)

    trending_date = datetime.fromisoformat("2021-08-20")

    with db.scoped_session() as session:
        _update_aggregate_plays(session)
        _update_aggregate_track(session)
        _update_aggregate_user(session)
        session.execute("REFRESH MATERIALIZED VIEW aggregate_interval_plays")
        session.execute("REFRESH MATERIALIZED VIEW trending_params")
        trending_track_versions = trending_strategy_factory.get_versions_for_type(
            TrendingType.TRACKS).keys()

        for version in trending_track_versions:
            strategy = trending_strategy_factory.get_strategy(
                TrendingType.TRACKS, version)
            if strategy.use_mat_view:
                strategy.update_track_score_query(session)

        session.commit()

    enqueue_trending_challenges(db, redis_conn, bus, trending_date)

    with db.scoped_session() as session:
        session.query(Challenge).filter(
            or_(
                Challenge.id == "tp",
                Challenge.id == "tt",
                Challenge.id == "tut",
            )).update({
                "active": True,
                "starting_block": BLOCK_NUMBER
            })
        bus.process_events(session)
        session.flush()
        trending_tracks = (session.query(TrendingResult).filter(
            TrendingResult.type == str(TrendingType.TRACKS)).all())
        assert len(trending_tracks) == 5

        user_trending_tracks_challenges = (session.query(UserChallenge).filter(
            UserChallenge.challenge_id == "tt").all())
        assert len(user_trending_tracks_challenges) == 5
        ranks = {
            "2021-08-20:1",
            "2021-08-20:2",
            "2021-08-20:3",
            "2021-08-20:4",
            "2021-08-20:5",
        }
        for challenge in user_trending_tracks_challenges:
            assert challenge.specifier in ranks
            ranks.remove(challenge.specifier)

        trending_playlists = (session.query(TrendingResult).filter(
            TrendingResult.type == str(TrendingType.PLAYLISTS)).all())
        assert len(trending_playlists) == 5
def test_index_aggregate_track_update_with_only_aggregate_track(app):
    """Test that aggregate_track will not be manipulated when there is no other data"""

    with app.app_context():
        db = get_db()

    entities = {
        "aggregate_track": [
            {
                "track_id": 1,
                "repost_count": 9,
                "save_count": 9,
            },
            {
                "track_id": 2,
                "repost_count": 9,
                "save_count": 9,
            },
            {
                "track_id": 3,
                "repost_count": 9,
                "save_count": 9,
            },
        ],
        "indexing_checkpoints": [{
            "tablename": AGGREGATE_TRACK,
            "last_checkpoint": 9
        }],
    }

    populate_mock_db(db, entities)

    with db.scoped_session() as session:
        results: List[AggregateTrack] = (
            session.query(AggregateTrack).order_by(
                AggregateTrack.track_id).all())
        assert len(results) == 3, "Test that entities exist as expected"

        _update_aggregate_track(session)

        results: List[AggregateTrack] = (
            session.query(AggregateTrack).order_by(
                AggregateTrack.track_id).all())
        assert (
            len(results) == 3
        ), "Test zero-modifications since last_checkpoint is in the future"

        prev_id_checkpoint = get_last_indexed_checkpoint(
            session, AGGREGATE_TRACK)
        assert prev_id_checkpoint == 9

    entities = {
        "indexing_checkpoints": [{
            "tablename": AGGREGATE_TRACK,
            "last_checkpoint": 0
        }],
    }
    populate_mock_db(db, entities)

    with db.scoped_session() as session:
        results: List[AggregateTrack] = (
            session.query(AggregateTrack).order_by(
                AggregateTrack.track_id).all())
        assert (
            len(results) == 3
        ), "Test that entities exist as expected, even though checkpoint has been reset"

        _update_aggregate_track(session)

        results: List[AggregateTrack] = (
            session.query(AggregateTrack).order_by(
                AggregateTrack.track_id).all())
        assert (
            len(results) == 3
        ), "Test that aggregate_track has not been changed due to lack of track data"
        for result in results:
            assert (
                result.repost_count == 9
            ), "Test that aggregate_track has not been changed due to lack of track data"
            assert (
                result.save_count == 9
            ), "Test that aggregate_track has not been changed due to lack of track data"

        prev_id_checkpoint = get_last_indexed_checkpoint(
            session, AGGREGATE_TRACK)
        assert prev_id_checkpoint == 0
def test_index_aggregate_track_update_with_extra_user(app):
    """Test that the aggregate_track only modifies non-deleted tracks"""

    with app.app_context():
        db = get_db()

    entities = deepcopy(basic_entities)
    entities.update({
        "indexing_checkpoints": [{
            "tablename": AGGREGATE_TRACK,
            "last_checkpoint": 0
        }],
        "aggregate_track": [
            {
                "track_id": 1,
                "repost_count": 9,
                "save_count": 9,
            },
            {
                "track_id": 2,
                "repost_count": 9,
                "save_count": 9,
            },
            {
                "track_id": 3,
                "repost_count": 9,
                "save_count": 9,
            },
            {
                "track_id": 4,
                "repost_count": 9,
                "save_count": 9,
            },
            {
                "track_id": 5,
                "repost_count": 9,
                "save_count": 9,
            },
        ],
    })

    with db.scoped_session() as session:
        results: List[AggregateTrack] = (
            session.query(AggregateTrack).order_by(
                AggregateTrack.track_id).all())
        assert len(results) == 0, "Test that we start with clean tables"

    populate_mock_db(db, entities)

    with db.scoped_session() as session:
        results: List[AggregateTrack] = (
            session.query(AggregateTrack).order_by(
                AggregateTrack.track_id).all())
        assert len(
            results) == 5, "Test that aggregate_track entities are populated"
        for result in results:
            assert result.repost_count == 9, "Test entities were populated correctly"
            assert result.save_count == 9, "Test entities were populated correctly"

        _update_aggregate_track(session)

    with db.scoped_session() as session:
        basic_tests(session, last_checkpoint=9)
def test_index_aggregate_track_empty_activity(app):
    """Test that a populated tracks table without activity won't break"""

    with app.app_context():
        db = get_db()

    entities = {
        "tracks": [
            {
                "track_id": 1,
                "owner_id": 1,
                "is_current": True
            },
            {
                "track_id": 2,
                "owner_id": 1,
                "is_current": True
            },
            {
                "track_id": 3,
                "owner_id": 1,
                "is_current": True,
                "is_delete": True,
            },
            {
                "track_id": 4,
                "owner_id": 2,
                "is_current": True
            },
            {
                "track_id": 5,
                "owner_id": 1,
                "is_current": True,
                "is_unlisted": True,
            },
        ],
        "indexing_checkpoints": [{
            "tablename": AGGREGATE_TRACK,
            "last_checkpoint": 10
        }],
    }

    populate_mock_db(db, entities, block_offset=6)

    with db.scoped_session() as session:
        _update_aggregate_track(session)

        results: List[AggregateTrack] = (
            session.query(AggregateTrack).order_by(
                AggregateTrack.track_id).all())

        assert (
            len(results) == 0
        ), "Test that tracks updated on blocks previous to '10' will not be targeted"

        prev_id_checkpoint = get_last_indexed_checkpoint(
            session, AGGREGATE_TRACK)
        assert prev_id_checkpoint == 10

    entities = {
        "indexing_checkpoints": [{
            "tablename": AGGREGATE_TRACK,
            "last_checkpoint": 1
        }],
    }

    populate_mock_db(db, entities)

    with db.scoped_session() as session:
        _update_aggregate_track(session)

        results: List[AggregateTrack] = (
            session.query(AggregateTrack).order_by(
                AggregateTrack.track_id).all())

        assert (
            len(results) == 4
        ), "Test that tracks updated on blocks after '1' will be targeted"

        prev_id_checkpoint = get_last_indexed_checkpoint(
            session, AGGREGATE_TRACK)
        assert prev_id_checkpoint == 10
def test_index_aggregate_track_populate(app):
    """Test that we should populate tracks from empty"""

    with app.app_context():
        db = get_db()

    with db.scoped_session() as session:
        results: List[AggregateTrack] = (
            session.query(AggregateTrack).order_by(
                AggregateTrack.track_id).all())

        assert (len(results) == 0
                ), "Test aggregate_track is empty before populate_mock_db()"

    # create db entries based on entities
    populate_mock_db(db, basic_entities, block_offset=3)

    last_checkpoint = 12
    with db.scoped_session() as session:
        # confirm nothing exists before _update_aggregate_track()
        results: List[AggregateTrack] = (
            session.query(AggregateTrack).order_by(
                AggregateTrack.track_id).all())
        assert (
            len(results) == 0
        ), "Test aggregate_track is empty before _update_aggregate_track()"

        # trigger celery task
        _update_aggregate_track(session)

        # run basic tests against basic_entities
        basic_tests(session, last_checkpoint=last_checkpoint)

    # delete a track
    entities = {
        "tracks": [
            {
                "track_id": 2,
                "owner_id": 1,
                "is_current": True,
                "is_delete": True,
            },
        ],
    }
    populate_mock_db(db, entities)
    last_checkpoint += 1

    # confirm track 2 no longer has a row in aggregate_track
    with db.scoped_session() as session:
        _update_aggregate_track(session)

        results: List[AggregateTrack] = (
            session.query(AggregateTrack).order_by(
                AggregateTrack.track_id).all())

        assert len(results) == 3

        assert results[0].track_id == 1
        assert results[0].repost_count == 3
        assert results[0].save_count == 1

        assert results[1].track_id == 4
        assert results[1].repost_count == 0
        assert results[1].save_count == 4

        assert results[2].track_id == 5
        assert results[2].repost_count == 0
        assert results[2].save_count == 0

        prev_id_checkpoint = get_last_indexed_checkpoint(
            session, AGGREGATE_TRACK)
        assert prev_id_checkpoint == last_checkpoint

    # repost a deleted track
    entities = {
        "reposts": [
            {
                "repost_item_id": 2,
                "repost_type": "track",
                "user_id": 2,
                "is_current": True,
            },
        ],
    }
    populate_mock_db(db, entities)
    last_checkpoint += 1

    # confirm track 2 still no longer has a row in aggregate_track
    with db.scoped_session() as session:
        _update_aggregate_track(session)

        results: List[AggregateTrack] = (
            session.query(AggregateTrack).order_by(
                AggregateTrack.track_id).all())

        assert len(results) == 3

        assert results[0].track_id == 1
        assert results[0].repost_count == 3
        assert results[0].save_count == 1

        assert results[1].track_id == 4
        assert results[1].repost_count == 0
        assert results[1].save_count == 4

        assert results[2].track_id == 5
        assert results[2].repost_count == 0
        assert results[2].save_count == 0

        prev_id_checkpoint = get_last_indexed_checkpoint(
            session, AGGREGATE_TRACK)
        assert prev_id_checkpoint == last_checkpoint

    # undelete a track
    entities = {
        "tracks": [
            {
                "track_id": 2,
                "owner_id": 1,
                "is_current": True,
                "is_delete": False,
            },
        ],
    }
    populate_mock_db(db, entities)
    last_checkpoint += 1

    # confirm track 2 has a row in aggregate_track again, with an additional repost
    with db.scoped_session() as session:
        _update_aggregate_track(session)

        results: List[AggregateTrack] = (
            session.query(AggregateTrack).order_by(
                AggregateTrack.track_id).all())

        assert len(results) == 4

        assert results[0].track_id == 1
        assert results[0].repost_count == 3
        assert results[0].save_count == 1

        assert results[1].track_id == 2
        assert results[1].repost_count == 1
        assert results[1].save_count == 0

        assert results[2].track_id == 4
        assert results[2].repost_count == 0
        assert results[2].save_count == 4

        assert results[3].track_id == 5
        assert results[3].repost_count == 0
        assert results[3].save_count == 0

        prev_id_checkpoint = get_last_indexed_checkpoint(
            session, AGGREGATE_TRACK)
        assert prev_id_checkpoint == last_checkpoint