def test_populate_track_metadata(app): """Tests that populate_track_metadata works after aggregate_user refresh""" with app.app_context(): db = get_db() test_entities = { 'tracks': [{ "track_id": 1, "owner_id": 1 }, { "track_id": 2, "owner_id": 1 }, { "track_id": 3, "owner_id": 2 }, { "track_id": 4, "owner_id": 2 }, { "track_id": 5, "owner_id": 2 }, { "track_id": 6, "owner_id": 2 }, { "track_id": 7, "owner_id": 3 }, { "track_id": 8, "owner_id": 3 }, { "track_id": 9, "owner_id": 3 }, { "track_id": 10, "is_unlisted": True, "owner_id": 3 }], 'users': [{ 'user_id': 1, 'handle': 'user1' }, { 'user_id': 2, 'handle': 'user2' }, { 'user_id': 3, 'handle': 'user3' }, { 'user_id': 4, 'handle': 'user4' }], 'reposts': [{ "repost_item_id": 1, "repost_type": 'track', "user_id": 2 }, { "repost_item_id": 1, "repost_type": 'track', "user_id": 3 }, { "repost_item_id": 2, "repost_type": 'track', "user_id": 1 }], 'saves': [{ "save_item_id": 1, "save_type": 'track', "user_id": 2 }, { "save_item_id": 1, "save_type": 'track', "user_id": 3 }, { "save_item_id": 3, "save_type": 'track', "user_id": 2 }, { "save_item_id": 3, "save_type": 'track', "user_id": 1 }], 'follows': [{ "follower_user_id": 1, "followee_user_id": 2 }, { "follower_user_id": 1, "followee_user_id": 3 }] } populate_mock_db(db, test_entities) with db.scoped_session() as session: session.execute("REFRESH MATERIALIZED VIEW aggregate_track") track_ids = [1, 2, 3] tracks = [ { "track_id": 1 }, { "track_id": 2 }, { "track_id": 3 }, ] tracks = populate_track_metadata(session, track_ids, tracks, None) assert len(tracks) == 3 assert tracks[0]['track_id'] == 1 assert tracks[0][response_name_constants.repost_count] == 2 assert tracks[0][response_name_constants.save_count] == 2 assert tracks[0][response_name_constants.play_count] == 0 assert tracks[1]['track_id'] == 2 assert tracks[1][response_name_constants.repost_count] == 1 assert tracks[1][response_name_constants.save_count] == 0 assert tracks[1][response_name_constants.play_count] == 0 assert tracks[2]['track_id'] == 3 assert tracks[2][response_name_constants.repost_count] == 0 assert tracks[2][response_name_constants.save_count] == 2 assert tracks[2][response_name_constants.play_count] == 0 curr_track_ids = [1, 2, 3] curr_tracks = [{"track_id": 1}, {"track_id": 2}, {"track_id": 3}] tracks = populate_track_metadata(session, curr_track_ids, curr_tracks, 1) assert len(tracks) == 3 assert tracks[0]['track_id'] == 1 repost_user_ids = [ repost['user_id'] for repost in tracks[0][response_name_constants.followee_reposts] ] repost_user_ids.sort() assert repost_user_ids == [2, 3] save_user_ids = [ save['user_id'] for save in tracks[0][response_name_constants.followee_saves] ] save_user_ids.sort() assert save_user_ids == [2, 3] assert tracks[0][ response_name_constants.has_current_user_reposted] == False assert tracks[0][ response_name_constants.has_current_user_saved] == False assert tracks[1]['track_id'] == 2 assert tracks[1][response_name_constants.followee_reposts] == [] assert tracks[1][response_name_constants.followee_saves] == [] assert tracks[1][ response_name_constants.has_current_user_reposted] == True assert tracks[1][ response_name_constants.has_current_user_saved] == False assert tracks[2]['track_id'] == 3 assert tracks[2][response_name_constants.followee_reposts] == [] save_user_ids = [ save['user_id'] for save in tracks[2][response_name_constants.followee_saves] ] save_user_ids.sort() assert save_user_ids == [2] assert tracks[2][ response_name_constants.has_current_user_reposted] == False assert tracks[2][ response_name_constants.has_current_user_saved] == True
def test_playlist_indexing_skip_tx(app, mocker): """Tests that playlists skip cursed txs without throwing an error and are able to process other tx in block""" with app.app_context(): db = get_db() cid_metadata_client = CIDMetadataClient({}) web3 = Web3() challenge_event_bus = setup_challenge_bus() update_task = UpdateTask(cid_metadata_client, web3, challenge_event_bus) class TestPlaylistTransaction: pass blessed_tx_hash = ( "0x34004dfaf5bb7cf9998eaf387b877d72d198c6508608e309df3f89e57def4db3") blessed_tx = TestPlaylistTransaction() blessed_tx.transactionHash = update_task.web3.toBytes( hexstr=blessed_tx_hash) cursed_tx_hash = ( "0x5fe51d735309d3044ae30055ad29101018a1a399066f6c53ea23800225e3a3be") cursed_tx = TestPlaylistTransaction() cursed_tx.transactionHash = update_task.web3.toBytes(hexstr=cursed_tx_hash) test_block_number = 25278765 test_block_timestamp = 1 test_block_hash = update_task.web3.toHex(block_hash) test_playlist_factory_txs = [cursed_tx, blessed_tx] test_timestamp = datetime.utcfromtimestamp(test_block_timestamp) blessed_playlist_record = Playlist( blockhash=test_block_hash, blocknumber=test_block_number, txhash=blessed_tx_hash, playlist_id=91232, is_album=False, is_private=False, playlist_name="test", playlist_contents={}, playlist_image_multihash=None, playlist_image_sizes_multihash=None, description="testing!", upc=None, is_current=True, is_delete=True, last_added_to=None, updated_at=test_timestamp, created_at=test_timestamp, playlist_owner_id=1, ) cursed_playlist_record = Playlist( blockhash=test_block_hash, blocknumber=test_block_number, txhash=cursed_tx_hash, playlist_id=91238, is_album=None, is_private=None, playlist_name=None, playlist_image_multihash=None, playlist_image_sizes_multihash=None, description=None, upc=None, is_current=True, is_delete=True, last_added_to=None, updated_at=test_timestamp, created_at=None, ) mocker.patch( "src.tasks.playlists.lookup_playlist_record", side_effect=[cursed_playlist_record, blessed_playlist_record], autospec=True, ) mocker.patch( "src.tasks.playlists.get_playlist_events_tx", side_effect=[ [], # no playlist created events [ { "args": AttrDict({ "_playlistId": cursed_playlist_record.playlist_id, }) }, ], # playlist deleted event [], [], [], [], [], [], [], [], [], # second tx receipt [ { "args": AttrDict({ "_playlistId": blessed_playlist_record.playlist_id, }) }, ], # playlist deleted event [], [], [], [], [], [], [], [], ], autospec=True, ) test_ipfs_metadata = {} test_blacklisted_cids = {} with db.scoped_session() as session: try: current_block = Block( blockhash=test_block_hash, parenthash=test_block_hash, number=test_block_number, is_current=True, ) session.add(current_block) (total_changes, updated_playlist_ids_set) = playlist_state_update( update_task, update_task, session, test_playlist_factory_txs, test_block_number, test_block_timestamp, block_hash, test_ipfs_metadata, test_blacklisted_cids, ) assert len(updated_playlist_ids_set) == 1 assert (list(updated_playlist_ids_set)[0] == blessed_playlist_record.playlist_id) assert total_changes == 1 assert (session.query(SkippedTransaction).filter( SkippedTransaction.txhash == cursed_playlist_record.txhash, SkippedTransaction.level == SkippedTransactionLevel.node, ).first()) assert (session.query(Playlist).filter( Playlist.playlist_id == blessed_playlist_record.playlist_id).first()) assert (session.query(Playlist).filter( Playlist.playlist_id == cursed_playlist_record.playlist_id).first()) == None except Exception: assert False
def test_get_repost_feed_for_user(app): """Tests that a repost feed for a user can be queried""" with app.app_context(): db = get_db() test_entities = { "reposts": [ # Note these reposts are in chronological order in addition # so the repost feed should pull them "backwards" for reverse chronological # sort order. { "user_id": 1, "repost_item_id": 5, "repost_type": "track" }, { "user_id": 1, "repost_item_id": 2, "repost_type": "track" }, { "user_id": 1, "repost_item_id": 3, "repost_type": "track" }, { "user_id": 1, "repost_item_id": 1, "repost_type": "track" }, { "user_id": 1, "repost_item_id": 4, "repost_type": "track" }, { "user_id": 1, "repost_item_id": 4, "repost_type": "playlist" }, { "user_id": 1, "repost_item_id": 8, "repost_type": "album" }, { "user_id": 1, "repost_item_id": 6, "repost_type": "track" }, ], "tracks": [ { "track_id": 1, "title": "track 1" }, { "track_id": 2, "title": "track 2" }, { "track_id": 3, "title": "track 3" }, { "track_id": 4, "title": "track 4" }, { "track_id": 5, "title": "track 5" }, { "track_id": 6, "title": "track 6" }, { "track_id": 7, "title": "track 7" }, { "track_id": 8, "title": "track 8" }, ], "playlists": [ { "playlist_id": 1, "playlist_name": "playlist 1" }, { "playlist_id": 2, "playlist_name": "playlist 2" }, { "playlist_id": 3, "playlist_name": "playlist 3" }, { "playlist_id": 4, "playlist_name": "playlist 4" }, { "playlist_id": 5, "playlist_name": "playlist 5" }, { "playlist_id": 6, "playlist_name": "playlist 6" }, { "playlist_id": 7, "playlist_name": "playlist 7" }, { "playlist_id": 8, "playlist_name": "album 8" }, ], } populate_mock_db(db, test_entities) with db.scoped_session() as session: repost_feed = _get_repost_feed_for_user(session, 1, { "limit": 10, "offset": 0 }) assert repost_feed[0]["title"] == "track 6" assert repost_feed[1]["playlist_name"] == "album 8" assert repost_feed[2]["playlist_name"] == "playlist 4" assert repost_feed[3]["title"] == "track 4" assert repost_feed[4]["title"] == "track 1" assert repost_feed[5]["title"] == "track 3" assert repost_feed[6]["title"] == "track 2" assert repost_feed[7]["title"] == "track 5"
def test_profile_completion_challenge_with_playlists(app): redis_conn = redis.Redis.from_url(url=REDIS_URL) # create user with app.app_context(): db = get_db() block = Block(blockhash="0x1", number=BLOCK_NUMBER) user = User( blockhash="0x1", blocknumber=BLOCK_NUMBER, txhash="xyz", user_id=1, is_current=True, handle="TestHandle", handle_lc="testhandle", wallet="0x123", is_creator=False, is_verified=False, name="test_name", created_at=datetime.now(), updated_at=datetime.now(), ) with db.scoped_session() as session: bus = ChallengeEventBus(redis_conn) # set challenge as active for purposes of test session.query(Challenge).filter( Challenge.id == "profile-completion").update({ "active": True, "starting_block": BLOCK_NUMBER }) # Register events with the bus bus.register_listener(ChallengeEvent.profile_update, profile_challenge_manager) bus.register_listener(ChallengeEvent.repost, profile_challenge_manager) bus.register_listener(ChallengeEvent.follow, profile_challenge_manager) bus.register_listener(ChallengeEvent.favorite, profile_challenge_manager) session.add(block) session.flush() session.add(user) # Process dummy event just to get this thing initted bus.dispatch(ChallengeEvent.profile_update, BLOCK_NUMBER, 1) bus.flush() bus.process_events(session) state = profile_challenge_manager.get_user_challenge_state( session, ["1"])[0] # We should have completed a single step (name) assert state.current_step_count == 1 and not state.is_complete # Do a repost repost = Repost( blockhash="0x1", blocknumber=BLOCK_NUMBER, user_id=1, repost_item_id=1, repost_type=RepostType.playlist, is_current=True, is_delete=False, created_at=datetime.now(), ) session.add(repost) session.flush() bus.dispatch(ChallengeEvent.repost, BLOCK_NUMBER, 1) bus.flush() bus.process_events(session) state = profile_challenge_manager.get_user_challenge_state( session, ["1"])[0] assert state.current_step_count == 2 and not state.is_complete # Do a save save = Save( blockhash="0x1", blocknumber=BLOCK_NUMBER, user_id=1, save_item_id=1, save_type=SaveType.playlist, is_current=True, is_delete=False, created_at=datetime.now(), ) session.add(save) session.flush() bus.dispatch(ChallengeEvent.favorite, BLOCK_NUMBER, 1) bus.flush() bus.process_events(session) session.flush() state = profile_challenge_manager.get_user_challenge_state( session, ["1"])[0] assert state.current_step_count == 3 and not state.is_complete # Do 1 follow, then 5 total follows follow = Follow( blockhash="0x1", blocknumber=BLOCK_NUMBER, is_current=True, is_delete=False, created_at=datetime.now(), follower_user_id=1, followee_user_id=2, ) session.add(follow) session.flush() bus.dispatch(ChallengeEvent.follow, BLOCK_NUMBER, 1) bus.flush() bus.process_events(session) session.flush() state = profile_challenge_manager.get_user_challenge_state( session, ["1"])[0] # Assert 1 follow didn't do anything assert state.current_step_count == 3 and not state.is_complete follows = [ Follow( blockhash="0x1", blocknumber=BLOCK_NUMBER, is_current=True, is_delete=False, created_at=datetime.now(), follower_user_id=1, followee_user_id=3, ), Follow( blockhash="0x1", blocknumber=BLOCK_NUMBER, is_current=True, is_delete=False, created_at=datetime.now(), follower_user_id=1, followee_user_id=4, ), Follow( blockhash="0x1", blocknumber=BLOCK_NUMBER, is_current=True, is_delete=False, created_at=datetime.now(), follower_user_id=1, followee_user_id=5, ), Follow( blockhash="0x1", blocknumber=BLOCK_NUMBER, is_current=True, is_delete=False, created_at=datetime.now(), follower_user_id=1, followee_user_id=6, ), ] session.add_all(follows) session.flush() bus.dispatch(ChallengeEvent.follow, BLOCK_NUMBER, 1) bus.flush() bus.process_events(session) state = profile_challenge_manager.get_user_challenge_state( session, ["1"])[0] assert state.current_step_count == 4 and not state.is_complete # profile_picture session.query(User).filter(User.user_id == 1).update( {"profile_picture": "profilepictureurl"}) session.flush() bus.dispatch(ChallengeEvent.profile_update, BLOCK_NUMBER, 1) bus.flush() bus.process_events(session) state = profile_challenge_manager.get_user_challenge_state( session, ["1"])[0] assert state.current_step_count == 5 and not state.is_complete # profile description session.query(User).filter(User.user_id == 1).update( {"bio": "profiledescription"}) session.flush() bus.dispatch(ChallengeEvent.profile_update, BLOCK_NUMBER, 1) bus.flush() bus.process_events(session) state = profile_challenge_manager.get_user_challenge_state( session, ["1"])[0] assert state.current_step_count == 6 and not state.is_complete # Undo it, ensure that our count goes down session.query(User).filter(User.user_id == 1).update({"bio": None}) session.flush() bus.dispatch(ChallengeEvent.profile_update, BLOCK_NUMBER, 1) bus.flush() bus.process_events(session) state = profile_challenge_manager.get_user_challenge_state( session, ["1"])[0] assert state.current_step_count == 5 and not state.is_complete # profile_cover_photo session.query(User).filter(User.user_id == 1).update({ "bio": "profiledescription", "cover_photo": "test_cover_photo" }) session.flush() bus.dispatch(ChallengeEvent.profile_update, BLOCK_NUMBER, 1) bus.flush() bus.process_events(session) state = profile_challenge_manager.get_user_challenge_state( session, ["1"])[0] assert state.current_step_count == 7 and state.is_complete == True # ensure that if we lose some data now that the thing is complete, we don't change the status of the challenge session.query(User).filter(User.user_id == 1).update( {"cover_photo": None}) session.flush() bus.dispatch(ChallengeEvent.profile_update, BLOCK_NUMBER, 1) bus.flush() bus.process_events(session) state = profile_challenge_manager.get_user_challenge_state( session, ["1"])[0] assert state.current_step_count == 7 and state.is_complete == True
def test_user_replica_set_indexing_skip_tx(app, mocker): """Tests that URSM indexing skips cursed txs without throwing an error and are able to process other tx in block""" with app.app_context(): db = get_db() cid_metadata_client = CIDMetadataClient({}) web3 = Web3() challenge_event_bus = setup_challenge_bus() update_task = UpdateTask(cid_metadata_client, web3, challenge_event_bus) class TestUserReplicaSetTransaction: pass blessed_user_tx_hash = ( "0x34004dfaf5bb7cf9998eaf387b877d72d198c6508608e309df3f89e57def4db3" ) blessed_user_tx = TestUserReplicaSetTransaction() blessed_user_tx.transactionHash = update_task.web3.toBytes( hexstr=blessed_user_tx_hash ) cursed_user_tx_hash = ( "0x5fe51d735309d3044ae30055ad29101018a1a399066f6c53ea23800225e3a3be" ) cursed_user_tx = TestUserReplicaSetTransaction() cursed_user_tx.transactionHash = update_task.web3.toBytes( hexstr=cursed_user_tx_hash ) blessed_cnode_tx_hash = ( "0x42c66d0542383f06e22ef6a235ddba238050d85562bcbd18667c9711c1daee72" ) blessed_cnode_tx = TestUserReplicaSetTransaction() blessed_cnode_tx.transactionHash = update_task.web3.toBytes( hexstr=blessed_cnode_tx_hash ) cursed_cnode_tx_hash = ( "0xa022761e229302abc2490f8bdc7ec0e642916b0f5cbc2908ccd49498243c1806" ) cursed_cnode_tx = TestUserReplicaSetTransaction() cursed_cnode_tx.transactionHash = update_task.web3.toBytes( hexstr=cursed_cnode_tx_hash ) test_block_number = 25278765 test_block_timestamp = 1 test_block_hash = update_task.web3.toHex(block_hash) test_user_replica_set_mgr_txs = [ cursed_user_tx, blessed_user_tx, cursed_cnode_tx, blessed_cnode_tx, ] test_timestamp = datetime.utcfromtimestamp(test_block_timestamp) test_wallet = "0x0birbchickemcatlet" blessed_user_record = User( blockhash=test_block_hash, blocknumber=test_block_number, txhash=blessed_user_tx_hash, user_id=1, name="tobey maguire", is_creator=False, is_current=True, updated_at=test_timestamp, created_at=test_timestamp, ) cursed_user_record = User( blockhash=test_block_hash, blocknumber=test_block_number, txhash=cursed_user_tx_hash, user_id=2, name="birb", is_current=None, is_creator=None, updated_at=test_timestamp, created_at=None, ) blessed_content_node_record = URSMContentNode( blockhash=test_block_hash, blocknumber=test_block_number, txhash=blessed_cnode_tx_hash, is_current=True, cnode_sp_id=1, delegate_owner_wallet=test_wallet, owner_wallet=test_wallet, created_at=test_timestamp, ) cursed_content_node_record = URSMContentNode( blockhash=test_block_hash, blocknumber=test_block_number, txhash=cursed_cnode_tx_hash, is_current=None, cnode_sp_id=2, delegate_owner_wallet=test_wallet, created_at=None, ) mocker.patch( "src.tasks.user_replica_set.lookup_user_record", side_effect=[cursed_user_record, blessed_user_record], autospec=True, ) mocker.patch( "src.tasks.user_replica_set.lookup_ursm_cnode", side_effect=[cursed_content_node_record, blessed_content_node_record], autospec=True, ) mocker.patch( "src.tasks.user_replica_set.get_endpoint_string_from_sp_ids", return_value="http://localhost:4001,http://localhost:4002,", autospec=True, ) mocker.patch( "src.tasks.user_replica_set.get_ursm_cnode_endpoint", return_value="http://localhost:4001,http://localhost:4002,", autospec=True, ) mocker.patch( # because we do not have the real contract set up in the test # we mock the return value of this fn w events parsed from an imaginary tx "src.tasks.user_replica_set.get_user_replica_set_mgr_tx", side_effect=[ [ { "args": AttrDict( { "_userId": cursed_user_record.user_id, "_primaryId": 1, "_secondaryIds": [2, 3, 4], "_signer": "mobey taguire", } ) }, ], # first tx receipt - update replica set [], # first tx receipt - update content node [ { "args": AttrDict( { "_userId": blessed_user_record.user_id, "_primaryId": 1, "_secondaryIds": [2, 3, 4], "_signer": "dirsten kunst", } ) }, ], # second tx receipt - update replica set [], # second tx receipt - update content node [], # third tx receipt - update replica set [ { "args": AttrDict( { "_cnodeSpId": cursed_content_node_record.cnode_sp_id, "_cnodeDelegateOwnerWallet": test_wallet, "_cnodeOwnerWallet": test_wallet, "_proposer1DelegateOwnerWallet": test_wallet, "_proposer2DelegateOwnerWallet": test_wallet, "_proposer3DelegateOwnerWallet": test_wallet, "_proposerSpIds": [1, 2], } ) }, ], # third tx receipt - update content node [], # fourth tx receipt - update replica set [ { "args": AttrDict( { "_cnodeSpId": blessed_content_node_record.cnode_sp_id, "_cnodeDelegateOwnerWallet": test_wallet, "_cnodeOwnerWallet": test_wallet, "_proposer1DelegateOwnerWallet": test_wallet, "_proposer2DelegateOwnerWallet": test_wallet, "_proposer3DelegateOwnerWallet": test_wallet, "_proposerSpIds": [1, 2], } ) }, ], # fourth tx receipt - update content node ], autospec=True, ) test_ipfs_metadata = {} test_blacklisted_cids = {} with db.scoped_session() as session: try: current_block = Block( blockhash=test_block_hash, parenthash=test_block_hash, number=test_block_number, is_current=True, ) session.add(current_block) (total_changes, updated_user_ids_set) = user_replica_set_state_update( update_task, update_task, session, test_user_replica_set_mgr_txs, test_block_number, test_block_timestamp, block_hash, test_ipfs_metadata, test_blacklisted_cids, ) assert len(updated_user_ids_set) == 1 assert list(updated_user_ids_set)[0] == blessed_user_record.user_id assert total_changes == 2 assert ( session.query(SkippedTransaction) .filter( SkippedTransaction.txhash == cursed_cnode_tx_hash, SkippedTransaction.level == SkippedTransactionLevel.node, ) .first() ) assert ( session.query(SkippedTransaction) .filter( SkippedTransaction.txhash == cursed_user_tx_hash, SkippedTransaction.level == SkippedTransactionLevel.node, ) .first() ) assert ( session.query(User) .filter(User.user_id == blessed_user_record.user_id) .first() ) assert ( session.query(User) .filter(User.user_id == cursed_user_record.user_id) .first() ) == None assert ( session.query(URSMContentNode) .filter( URSMContentNode.cnode_sp_id == blessed_content_node_record.cnode_sp_id ) .first() ) assert ( session.query(URSMContentNode) .filter( URSMContentNode.cnode_sp_id == cursed_content_node_record.cnode_sp_id ) .first() ) == None except Exception: assert False
def test_index_aggregate_user_update_with_extra_user(app): """Test that the entire aggregate_user table is not truncated""" with app.app_context(): db = get_db() entities = deepcopy(basic_entities) entities.update({ "indexing_checkpoints": [{ "tablename": AGGREGATE_USER, "last_checkpoint": 0 }], "aggregate_user": [ { "user_id": 1, "track_count": 9, "playlist_count": 9, "album_count": 9, "follower_count": 9, "following_count": 9, "repost_count": 9, "track_save_count": 9, }, { "user_id": 2, "track_count": 9, "playlist_count": 9, "album_count": 9, "follower_count": 9, "following_count": 9, "repost_count": 9, "track_save_count": 9, }, { "user_id": 3, "track_count": 9, "playlist_count": 9, "album_count": 9, "follower_count": 9, "following_count": 9, "repost_count": 9, "track_save_count": 9, }, ], }) with db.scoped_session() as session: results: List[AggregateUser] = (session.query(AggregateUser).order_by( AggregateUser.user_id).all()) assert len(results) == 0, "Test that we start with clean tables" populate_mock_db(db, entities) with db.scoped_session() as session: results: List[AggregateUser] = (session.query(AggregateUser).order_by( AggregateUser.user_id).all()) assert len( results) == 3, "Test that aggregate_user entities are populated" _update_aggregate_user(session) with db.scoped_session() as session: results: List[AggregateUser] = (session.query(AggregateUser).order_by( AggregateUser.user_id).all()) assert len(results) == 3 assert results[0].user_id == 1 assert results[0].track_count == 2 assert results[0].playlist_count == 1 assert results[0].album_count == 1 assert results[0].follower_count == 1 assert results[0].following_count == 1 assert results[0].repost_count == 0 assert results[0].track_save_count == 0 assert results[1].user_id == 2 assert results[1].track_count == 1 assert results[1].playlist_count == 0 assert results[1].album_count == 0 assert results[1].follower_count == 1 assert results[1].following_count == 1 assert results[1].repost_count == 2 assert results[1].track_save_count == 1 assert results[2].user_id == 3 assert results[2].track_count == 9 assert results[2].playlist_count == 9 assert results[2].album_count == 9 assert results[2].follower_count == 9 assert results[2].following_count == 9 assert results[2].repost_count == 9 assert results[2].track_save_count == 9 prev_id_checkpoint = get_last_indexed_checkpoint( session, AGGREGATE_USER) assert prev_id_checkpoint == 3
def test_get_user_signals(app): with app.app_context(): db = get_db() test_entities = { "users": [ make_user(1, "user1", "wallet1"), make_user(2, "user2", "wallet2"), make_user(3, "user3", "wallet3"), make_user(4, "user4", "wallet4"), make_user(5, "user5", "wallet5"), make_user( 6, "user6", "wallet6", profile_picture= "Qm0123456789abcdef0123456789abcdef0123456789ab", ), make_user( 7, "user7", "wallet7", profile_picture_sizes= "Qm0123456789abcdef0123456789abcdef0123456789ab", ), make_user( 8, "user8", "wallet8", cover_photo="Qm0123456789abcdef0123456789abcdef0123456789ab", ), make_user( 9, "user9", "wallet9", cover_photo_sizes= "Qm0123456789abcdef0123456789abcdef0123456789ab", ), make_user( 10, "user10", "wallet10", profile_picture= "Qm0123456789abcdef0123456789abcdef0123456789ab", cover_photo="Qm0123456789abcdef0123456789abcdef0123456789cd", ), ], "follows": [ make_follow(2, 1), make_follow(3, 1), make_follow(5, 1), make_follow(1, 5), make_follow(2, 6), make_follow(3, 7), make_follow(4, 8), make_follow(5, 9), make_follow(10, 4), ], } populate_mock_db(db, test_entities) with db.scoped_session() as session: _update_aggregate_user(session) user_signals = _get_user_signals(session, "user1") assert user_signals["num_followers"] == 3 assert user_signals["num_following"] == 1 assert user_signals["has_profile_picture"] == False assert user_signals["has_cover_photo"] == False assert user_signals["wallet"] == "wallet1" user_signals = _get_user_signals(session, "user6") assert user_signals["num_followers"] == 1 assert user_signals["num_following"] == 0 assert user_signals["has_profile_picture"] == True assert user_signals["has_cover_photo"] == False assert user_signals["wallet"] == "wallet6" user_signals = _get_user_signals(session, "user7") assert user_signals["num_followers"] == 1 assert user_signals["num_following"] == 0 assert user_signals["has_profile_picture"] == True assert user_signals["has_cover_photo"] == False assert user_signals["wallet"] == "wallet7" user_signals = _get_user_signals(session, "user8") assert user_signals["num_followers"] == 1 assert user_signals["num_following"] == 0 assert user_signals["has_profile_picture"] == False assert user_signals["has_cover_photo"] == True assert user_signals["wallet"] == "wallet8" user_signals = _get_user_signals(session, "user9") assert user_signals["num_followers"] == 1 assert user_signals["num_following"] == 0 assert user_signals["has_profile_picture"] == False assert user_signals["has_cover_photo"] == True assert user_signals["wallet"] == "wallet9" user_signals = _get_user_signals(session, "user10") assert user_signals["num_followers"] == 0 assert user_signals["num_following"] == 1 assert user_signals["has_profile_picture"] == True assert user_signals["has_cover_photo"] == True assert user_signals["wallet"] == "wallet10"
def test_undisbursed_challenges(app): setup_challenges(app) with app.app_context(): db = get_db() with db.scoped_session() as session: # Test that all undisbursed challenges are returned in order undisbursed = get_undisbursed_challenges( session, { "user_id": None, "limit": 10, "offset": 0, "completed_blocknumber": 99 }, ) expected = [ { "challenge_id": "test_challenge_3", "user_id": 6, "specifier": "6", "amount": "5", "completed_blocknumber": 100, "handle": "TestHandle6", "wallet": "0x6", }, { "challenge_id": "test_challenge_2", "user_id": 4, "specifier": "4", "amount": "5", "completed_blocknumber": 102, "handle": "TestHandle4", "wallet": "0x4", }, { "challenge_id": "test_challenge_2", "user_id": 5, "specifier": "5", "amount": "5", "completed_blocknumber": 102, "handle": "TestHandle5", "wallet": "0x5", }, ] assert expected == undisbursed # Test that it filters correctly by user_id undisbursed = get_undisbursed_challenges( session, { "user_id": 6, "limit": 10, "offset": 0, "completed_blocknumber": 99 }, ) expected = [ { "challenge_id": "test_challenge_3", "user_id": 6, "specifier": "6", "amount": "5", "completed_blocknumber": 100, "handle": "TestHandle6", "wallet": "0x6", }, ] assert expected == undisbursed # Test that it filters correctly by user_id & completed blocknumber undisbursed = get_undisbursed_challenges( session, { "user_id": 6, "limit": 10, "offset": 0, "completed_blocknumber": 101 }, ) expected = [] assert expected == undisbursed
def setup_challenges(app): with app.app_context(): db = get_db() populate_mock_db_blocks(db, 99, 110) challenges = [ Challenge( id="test_challenge_1", type=ChallengeType.numeric, amount="5", step_count=3, active=False, starting_block=100, ), Challenge( id="test_challenge_2", type=ChallengeType.boolean, amount="5", active=True, starting_block=100, ), Challenge( id="test_challenge_3", type=ChallengeType.aggregate, amount="5", active=True, starting_block=100, ), ] non_current_users = [ User( blockhash=hex(99), blocknumber=99, txhash=f"xyz{i}", user_id=i, is_current=False, handle=f"TestHandle{i}", handle_lc=f"testhandle{i}", wallet=f"0x{i}", is_creator=False, is_verified=False, name=f"test_name{i}", created_at=datetime.now(), updated_at=datetime.now(), ) for i in range(7) ] users = [ User( blockhash=hex(99), blocknumber=99, txhash=f"xyz{i}", user_id=i, is_current=True, handle=f"TestHandle{i}", handle_lc=f"testhandle{i}", wallet=f"0x{i}", is_creator=False, is_verified=False, name=f"test_name{i}", created_at=datetime.now(), updated_at=datetime.now(), ) for i in range(7) ] user_bank_accounts = [ UserBankAccount( signature=f"0x{i}", ethereum_address=users[i].wallet, bank_account=f"0x{i}", created_at=datetime.now(), ) for i in range(7) ] user_challenges = [ UserChallenge( challenge_id="test_challenge_1", user_id=1, specifier="1", is_complete=False, current_step_count=1, ), UserChallenge( challenge_id="test_challenge_1", user_id=2, specifier="2", is_complete=True, current_step_count=3, completed_blocknumber=100, ), UserChallenge( challenge_id="test_challenge_2", user_id=3, specifier="3", is_complete=False, ), UserChallenge( challenge_id="test_challenge_2", user_id=4, specifier="4", is_complete=True, completed_blocknumber=102, ), UserChallenge( challenge_id="test_challenge_2", user_id=5, specifier="5", is_complete=True, completed_blocknumber=102, ), UserChallenge( challenge_id="test_challenge_3", user_id=6, specifier="6", is_complete=True, completed_blocknumber=100, ), ] with db.scoped_session() as session: session.add_all(challenges) session.flush() session.add_all(non_current_users) session.add_all(users) session.add_all(user_bank_accounts) session.add_all(user_challenges)
def test_get_attestation(app): with app.app_context(): db = get_db() with db.scoped_session() as session: setup_db(session) # Tests: # - Happy path # - No user_challenge # - Challenge not finished # - No disbursement # - Invalid oracle oracle_address = "0x32a10e91820fd10366AC363eD0DEa40B2e598D22" redis_handle.set(oracle_addresses_key, oracle_address) delegate_owner_wallet, signature = get_attestation( session, user_id=1, challenge_id="boolean_challenge_2", oracle_address=oracle_address, specifier="1", ) attestation = Attestation( amount="5", oracle_address=oracle_address, user_address="0x38C68fF3926bf4E68289672F75ee1543117dD9B3", challenge_id="boolean_challenge_2", challenge_specifier="1", ) # Test happy path # confirm the attestation is what we think it should be config_owner_wallet = shared_config["delegate"]["owner_wallet"] config_private_key = shared_config["delegate"]["private_key"] # Ensure we returned the correct owner wallet assert delegate_owner_wallet == config_owner_wallet # Ensure we can derive the owner wallet from the signed stringified attestation attestation_bytes = attestation.get_attestation_bytes() to_sign_hash = Web3.keccak(attestation_bytes) private_key = keys.PrivateKey(HexBytes(config_private_key)) public_key = keys.PublicKey.from_private(private_key) signture_bytes = to_bytes(hexstr=signature) msg_signature = keys.Signature(signature_bytes=signture_bytes, vrs=None) recovered_pubkey = public_key.recover_from_msg_hash( message_hash=to_sign_hash, signature=msg_signature) assert (Web3.toChecksumAddress( recovered_pubkey.to_address()) == config_owner_wallet) # Test no matching user challenge with pytest.raises(AttestationError): get_attestation( session, user_id=1, challenge_id="boolean_challenge_2", oracle_address=oracle_address, specifier="xyz", ) # Test challenge not finished with pytest.raises(AttestationError): get_attestation( session, user_id=1, challenge_id="boolean_challenge_3", oracle_address=oracle_address, specifier="1", ) # Test challenge already disbursed with pytest.raises(AttestationError): get_attestation( session, user_id=1, challenge_id="boolean_challenge_1", oracle_address=oracle_address, specifier="1", ) # Test with bad AAO with pytest.raises(AttestationError): get_attestation( session, user_id=1, challenge_id="boolean_challenge_2", oracle_address="wrong_oracle_address", specifier="1", )
def test_search_user_tags(app): """Tests that search by tags works for users""" with app.app_context(): db = get_db() test_entities = { 'tracks': [{ "track_id": 1, "tags": "pop", "owner_id": 1 }, { "track_id": 2, "owner_id": 1, "tags": "pop,rock,electric" }, { "track_id": 3, "owner_id": 2 }, { "track_id": 4, "owner_id": 2, "tags": "funk,pop" }, { "track_id": 5, "owner_id": 2, "tags": "funk,pop" }, { "track_id": 6, "owner_id": 2, "tags": "funk,Funk,kpop" }, { "track_id": 7, "owner_id": 3, "tags": "pop" }, { "track_id": 8, "owner_id": 3, "tags": "kpop" }], 'users': [{ 'user_id': 1, 'handle': '1' }, { 'user_id': 2, 'handle': '2' }, { 'user_id': 3, 'handle': '3' }], 'follows': [{ "follower_user_id": 1, "followee_user_id": 2 }, { "follower_user_id": 1, "followee_user_id": 3 }, { "follower_user_id": 2, "followee_user_id": 3 }] } populate_mock_db(db, test_entities) with db.scoped_session() as session: session.execute("REFRESH MATERIALIZED VIEW tag_track_user") session.execute("REFRESH MATERIALIZED VIEW aggregate_plays") session.execute("REFRESH MATERIALIZED VIEW aggregate_user") args = { 'search_str': 'pop', 'current_user_id': None, 'user_tag_count': 2, 'limit': 10, 'offset': 0 } users = search_user_tags(session, args) assert len(users) == 2 assert users[0]['user_id'] == 2 # Fir. b/c user 2 has 1 follower assert users[1]['user_id'] == 1 # Sec. b/c user 1 has 0 followers
def test_track_tag_mat_view(app): """Tests that genre metrics can be queried""" with app.app_context(): db = get_db() test_entities = { "tracks": [ { "track_id": 1, "tags": "", "owner_id": 1 }, { "track_id": 2, "owner_id": 1, "tags": "pop,rock,electric" }, { "track_id": 3, "owner_id": 2 }, { "track_id": 4, "owner_id": 2, "tags": "funk,pop" }, { "track_id": 5, "owner_id": 2, "tags": "funk,pop" }, { "track_id": 6, "owner_id": 2, "tags": "funk,Funk,kpop" }, ] } populate_mock_db(db, test_entities) with db.scoped_session() as session: session.execute("REFRESH MATERIALIZED VIEW tag_track_user") user_1_tags = (session.query(TagTrackUserMatview).filter( TagTrackUserMatview.owner_id == 1).order_by( asc(TagTrackUserMatview.tag), asc(TagTrackUserMatview.track_id)).all()) user_2_tags = (session.query(TagTrackUserMatview).filter( TagTrackUserMatview.owner_id == 2).order_by( asc(TagTrackUserMatview.tag), asc(TagTrackUserMatview.track_id)).all()) user_4_tags = (session.query(TagTrackUserMatview).filter( TagTrackUserMatview.owner_id == 4).all()) assert len(user_1_tags) == 3 assert user_1_tags[0].tag == "electric" assert user_1_tags[0].track_id == 2 assert user_1_tags[1].tag == "pop" assert user_1_tags[1].track_id == 2 assert user_1_tags[2].tag == "rock" assert user_1_tags[2].track_id == 2 assert len(user_2_tags) == 6 assert user_2_tags[0].tag == "funk" assert user_2_tags[0].track_id == 4 assert user_2_tags[1].tag == "funk" assert user_2_tags[1].track_id == 5 assert user_2_tags[2].tag == "funk" assert user_2_tags[2].track_id == 6 assert user_2_tags[3].tag == "kpop" assert user_2_tags[3].track_id == 6 assert user_2_tags[4].tag == "pop" assert user_2_tags[4].track_id == 4 assert user_2_tags[5].tag == "pop" assert user_2_tags[5].track_id == 5 assert not user_4_tags
def test_index_user_listening_history_no_update(app): """Test when there are no new plays""" # setup with app.app_context(): db = get_db() # run entities = { "tracks": [ { "track_id": 1, "title": "track 1" }, { "track_id": 2, "title": "track 2" }, { "track_id": 3, "title": "track 3" }, { "track_id": 4, "title": "track 3" }, ], "users": [ { "user_id": 1, "handle": "user-1" }, { "user_id": 2, "handle": "user-2" }, { "user_id": 3, "handle": "user-3" }, { "user_id": 4, "handle": "user-4" }, ], "user_listening_history": [ { "user_id": 1, "listening_history": [{ "timestamp": str(TIMESTAMP_1), "track_id": 1 }], }, { "user_id": 2, "listening_history": [ { "timestamp": str(TIMESTAMP_2), "track_id": 2 }, { "timestamp": str(TIMESTAMP_1), "track_id": 1 }, ], }, { "user_id": 3, "listening_history": [ { "timestamp": str(TIMESTAMP_3), "track_id": 1 }, { "timestamp": str(TIMESTAMP_2), "track_id": 3 }, { "timestamp": str(TIMESTAMP_1), "track_id": 2 }, ], }, ], "plays": [ # Current Plays { "item_id": 1, "user_id": 1, "created_at": TIMESTAMP_1 }, { "item_id": 1, "user_id": 2, "created_at": TIMESTAMP_1 }, { "item_id": 2, "user_id": 3, "created_at": TIMESTAMP_1 }, { "item_id": 2, "user_id": 2, "created_at": TIMESTAMP_2 }, { "item_id": 3, "user_id": 3, "created_at": TIMESTAMP_2 }, { "item_id": 1, "user_id": 3, "created_at": TIMESTAMP_3 }, # New anon plays { "item_id": 1, "user_id": None, "created_at": TIMESTAMP_3 }, { "item_id": 1, "user_id": None, "created_at": TIMESTAMP_4 }, ], "indexing_checkpoints": [{ "tablename": USER_LISTENING_HISTORY_TABLE_NAME, "last_checkpoint": 6 }], } populate_mock_db(db, entities) with db.scoped_session() as session: _index_user_listening_history(session) results: List[UserListeningHistory] = ( session.query(UserListeningHistory).order_by( UserListeningHistory.user_id).all()) assert len(results) == 3 assert results[0].user_id == 1 assert len(results[0].listening_history) == 1 assert results[0].listening_history[0]["track_id"] == 1 assert results[0].listening_history[0]["timestamp"] == str(TIMESTAMP_1) assert results[1].user_id == 2 assert len(results[1].listening_history) == 2 assert results[1].listening_history[0]["track_id"] == 2 assert results[1].listening_history[0]["timestamp"] == str(TIMESTAMP_2) assert results[1].listening_history[1]["track_id"] == 1 assert results[1].listening_history[1]["timestamp"] == str(TIMESTAMP_1) assert results[2].user_id == 3 assert len(results[2].listening_history) == 3 assert results[2].listening_history[0]["track_id"] == 1 assert results[2].listening_history[0]["timestamp"] == str(TIMESTAMP_3) assert results[2].listening_history[1]["track_id"] == 3 assert results[2].listening_history[1]["timestamp"] == str(TIMESTAMP_2) assert results[2].listening_history[2]["track_id"] == 2 assert results[2].listening_history[2]["timestamp"] == str(TIMESTAMP_1) new_checkpoint: IndexingCheckpoints = (session.query( IndexingCheckpoints.last_checkpoint).filter( IndexingCheckpoints.tablename == USER_LISTENING_HISTORY_TABLE_NAME).scalar()) assert new_checkpoint == 6
def test_index_user_listening_history_update(app): """Tests updating user_listening_history""" # setup with app.app_context(): db = get_db() # run entities = { "tracks": [ { "track_id": 1, "title": "track 1" }, { "track_id": 2, "title": "track 2" }, { "track_id": 3, "title": "track 3" }, { "track_id": 4, "title": "track 3" }, ], "users": [ { "user_id": 1, "handle": "user-1" }, { "user_id": 2, "handle": "user-2" }, { "user_id": 3, "handle": "user-3" }, { "user_id": 4, "handle": "user-4" }, ], "user_listening_history": [ { "user_id": 1, "listening_history": [{ "timestamp": str(TIMESTAMP_1), "track_id": 1 }], }, { "user_id": 2, "listening_history": [ { "timestamp": str(TIMESTAMP_2), "track_id": 2 }, { "timestamp": str(TIMESTAMP_1), "track_id": 1 }, ], }, { "user_id": 3, "listening_history": [ { "timestamp": str(TIMESTAMP_3), "track_id": 1 }, { "timestamp": str(TIMESTAMP_2), "track_id": 3 }, { "timestamp": str(TIMESTAMP_1), "track_id": 2 }, ], }, ], "plays": [ # Current Plays { "item_id": 1, "user_id": 1, "created_at": TIMESTAMP_1 }, { "item_id": 1, "user_id": 2, "created_at": TIMESTAMP_1 }, { "item_id": 2, "user_id": 3, "created_at": TIMESTAMP_1 }, { "item_id": 2, "user_id": 2, "created_at": TIMESTAMP_2 }, { "item_id": 3, "user_id": 3, "created_at": TIMESTAMP_2 }, { "item_id": 1, "user_id": 3, "created_at": TIMESTAMP_3 }, # New play { "item_id": 2, "user_id": 1, "created_at": TIMESTAMP_3, }, # listen to new track { "item_id": 1, "user_id": 1, "created_at": TIMESTAMP_4, }, # re-listen to existing track, dedupe ] + [ # new user listens to many tracks { "item_id": i + 1, "user_id": 4, "created_at": TIMESTAMP_4 + timedelta(hours=i), } for i in range(2000) ], "indexing_checkpoints": [{ "tablename": USER_LISTENING_HISTORY_TABLE_NAME, "last_checkpoint": 6 }], } populate_mock_db(db, entities) with db.scoped_session() as session: _index_user_listening_history(session) with db.scoped_session() as session: results: List[UserListeningHistory] = ( session.query(UserListeningHistory).order_by( UserListeningHistory.user_id).all()) assert len(results) == 4 assert results[0].user_id == 1 assert len(results[0].listening_history) == 2 assert results[0].listening_history[0]["track_id"] == 1 assert results[0].listening_history[0]["timestamp"] == str(TIMESTAMP_4) assert results[0].listening_history[1]["track_id"] == 2 assert results[0].listening_history[1]["timestamp"] == str(TIMESTAMP_3) assert results[1].user_id == 2 assert len(results[1].listening_history) == 2 assert results[1].listening_history[0]["track_id"] == 2 assert results[1].listening_history[0]["timestamp"] == str(TIMESTAMP_2) assert results[1].listening_history[1]["track_id"] == 1 assert results[1].listening_history[1]["timestamp"] == str(TIMESTAMP_1) assert results[2].user_id == 3 assert len(results[2].listening_history) == 3 assert results[2].listening_history[0]["track_id"] == 1 assert results[2].listening_history[0]["timestamp"] == str(TIMESTAMP_3) assert results[2].listening_history[1]["track_id"] == 3 assert results[2].listening_history[1]["timestamp"] == str(TIMESTAMP_2) assert results[2].listening_history[2]["track_id"] == 2 assert results[2].listening_history[2]["timestamp"] == str(TIMESTAMP_1) assert results[3].user_id == 4 assert len(results[3].listening_history) == 1000 for i in range(1000): assert results[3].listening_history[i]["track_id"] == 2000 - i assert results[3].listening_history[i]["timestamp"] == str( datetime.fromisoformat("2014-06-26 07:00:00") - timedelta(hours=i)) new_checkpoint: IndexingCheckpoints = (session.query( IndexingCheckpoints.last_checkpoint).filter( IndexingCheckpoints.tablename == USER_LISTENING_HISTORY_TABLE_NAME).scalar()) assert new_checkpoint == 2008
def test_index_aggregate_user_empty_users(app): """Test that user metadata without users table won't break""" with app.app_context(): db = get_db() entities = { "users": [], "indexing_checkpoints": [{ "tablename": AGGREGATE_USER, "last_checkpoint": 0 }], "tracks": [ { "track_id": 1, "owner_id": 1 }, { "track_id": 2, "owner_id": 1 }, { "track_id": 3, "is_unlisted": True, "owner_id": 1 }, ], "playlists": [ { "playlist_id": 1, "playlist_owner_id": 1, "playlist_name": "name", "description": "description", "playlist_contents": { "track_ids": [ { "track": 1, "time": 1 }, { "track": 2, "time": 2 }, { "track": 3, "time": 3 }, ] }, }, { "playlist_id": 2, "is_album": True, "playlist_owner_id": 1, "playlist_name": "name", "description": "description", "playlist_contents": { "track_ids": [ { "track": 1, "time": 1 }, { "track": 2, "time": 2 }, { "track": 3, "time": 3 }, ] }, }, ], "follows": [ { "follower_user_id": 1, "followee_user_id": 2, "created_at": datetime.now() - timedelta(days=8), }, { "follower_user_id": 2, "followee_user_id": 1, "created_at": datetime.now() - timedelta(days=8), }, ], "reposts": [ { "repost_item_id": 1, "repost_type": "track", "user_id": 1 }, { "repost_item_id": 1, "repost_type": "playlist", "user_id": 1 }, ], "saves": [ { "save_item_id": 1, "save_type": "track", "user_id": 1 }, { "save_item_id": 1, "save_type": "playlist", "user_id": 1 }, ], } populate_mock_db(db, entities) with db.scoped_session() as session: _update_aggregate_user(session) results: List[AggregateUser] = (session.query(AggregateUser).order_by( AggregateUser.user_id).all()) assert (len(results) == 0 ), "Test that without Users there will be no AggregateUsers" prev_id_checkpoint = get_last_indexed_checkpoint( session, AGGREGATE_USER) assert prev_id_checkpoint == 2
def search_tags(): search_str = request.args.get("query", type=str) current_user_id = get_current_user_id(required=False) if not search_str: raise exceptions.ArgumentError("Invalid value for parameter 'query'") user_tag_count = request.args.get("user_tag_count", type=str) if not user_tag_count: user_tag_count = "2" (limit, offset) = get_pagination_vars() like_tags_str = str.format('%{}%', search_str) db = get_db() with db.scoped_session() as session: track_res = sqlalchemy.text( f""" select distinct(track_id) from ( select strip(to_tsvector(tracks.tags)) as tagstrip, track_id from tracks where (tags like :like_tags_query) and (is_current is true) and (is_delete is false) order by updated_at desc ) as t where tagstrip @@ to_tsquery(:query); """ ) user_res = sqlalchemy.text( f""" select * from ( select count(track_id), owner_id from ( select strip(to_tsvector(tracks.tags)) as tagstrip, track_id, owner_id from tracks where (tags like :like_tags_query) and (is_current is true) order by updated_at desc ) as t where tagstrip @@ to_tsquery(:query) group by owner_id order by count desc ) as usr where usr.count >= :user_tag_count; """ ) track_ids = session.execute( track_res, { "query":search_str, "like_tags_query":like_tags_str } ).fetchall() user_ids = session.execute( user_res, { "query":search_str, "like_tags_query":like_tags_str, "user_tag_count": user_tag_count } ).fetchall() # track_ids is list of tuples - simplify to 1-D list track_ids = [i[0] for i in track_ids] # user_ids is list of tuples - simplify to 1-D list user_ids = [i[1] for i in user_ids] tracks = ( session.query(Track) .filter( Track.is_current == True, Track.is_delete == False, Track.track_id.in_(track_ids), ) .all() ) tracks = helpers.query_result_to_list(tracks) track_play_counts = get_track_play_counts(track_ids) users = ( session.query(User) .filter( User.is_current == True, User.is_ready == True, User.user_id.in_(user_ids) ) .all() ) users = helpers.query_result_to_list(users) with db.scoped_session() as session: tracks = populate_track_metadata(session, track_ids, tracks, current_user_id) users = populate_user_metadata(session, user_ids, users, current_user_id) followee_sorted_users = \ sorted(users, key=lambda i: i[response_name_constants.follower_count], reverse=True) for track in tracks: track_id = track["track_id"] track[response_name_constants.play_count] = track_play_counts.get(track_id, 0) play_count_sorted_tracks = \ sorted(tracks, key=lambda i: i[response_name_constants.play_count], reverse=True) # Add pagination parameters to track and user results play_count_sorted_tracks = \ play_count_sorted_tracks[slice(offset, offset + limit, 1)] followee_sorted_users = \ followee_sorted_users[slice(offset, offset + limit, 1)] resp = { 'tracks': [], 'users': [], 'saved_tracks': [], 'followed_users': [] } resp['tracks'] = play_count_sorted_tracks resp['users'] = followee_sorted_users # Add personalized results for a given user if current_user_id: # Query saved tracks for the current user that contain this tag saves_query = ( session.query(Save.save_item_id) .filter( Save.is_current == True, Save.is_delete == False, Save.save_type == SaveType.track, Save.user_id == current_user_id, Save.save_item_id.in_(track_ids) ) .all() ) saved_track_ids = [i[0] for i in saves_query] saved_tracks = ( session.query(Track) .filter( Track.is_current == True, Track.is_delete == False, Track.track_id.in_(saved_track_ids), ) .all() ) saved_tracks = helpers.query_result_to_list(saved_tracks) for saved_track in saved_tracks: saved_track_id = saved_track["track_id"] saved_track[response_name_constants.play_count] = \ track_play_counts.get(saved_track_id, 0) # Query followed users that have referenced this tag followed_user_query = ( session.query(Follow.followee_user_id) .filter( Follow.is_current == True, Follow.is_delete == False, Follow.follower_user_id == current_user_id, Follow.followee_user_id.in_(user_ids) ) .all() ) followed_user_ids = [i[0] for i in followed_user_query] followed_users = ( session.query(User) .filter( User.is_current == True, User.is_ready == True, User.user_id.in_(followed_user_ids) ) .all() ) followed_users = helpers.query_result_to_list(followed_users) with db.scoped_session() as session: saved_tracks = \ populate_track_metadata(session, saved_track_ids, saved_tracks, current_user_id) followed_users = \ populate_user_metadata( session, followed_user_ids, followed_users, current_user_id ) # Sort and paginate play_count_sorted_saved_tracks = \ sorted(saved_tracks, key=lambda i: i[response_name_constants.play_count], reverse=True) play_count_sorted_saved_tracks = \ play_count_sorted_saved_tracks[slice(offset, offset + limit, 1)] followed_users_followee_sorted = \ sorted( followed_users, key=lambda i: i[response_name_constants.follower_count], reverse=True) followed_users_followee_sorted = \ followed_users_followee_sorted[slice(offset, offset + limit, 1)] resp['saved_tracks'] = play_count_sorted_saved_tracks resp['followed_users'] = followed_users_followee_sorted return api_helpers.success_response(resp)
def test_index_aggregate_user_empty_activity(app): """Test that a populated users table without activity won't break""" with app.app_context(): db = get_db() entities = { "users": [ { "user_id": 1, "handle": "user1" }, { "user_id": 2, "handle": "user2" }, ], "indexing_checkpoints": [{ "tablename": AGGREGATE_USER, "last_checkpoint": 5 }], } # create user1 and user2 in blocknumbers 3 and 4, respectively populate_mock_db(db, entities, block_offset=3) with db.scoped_session() as session: _update_aggregate_user(session) results: List[AggregateUser] = (session.query(AggregateUser).order_by( AggregateUser.user_id).all()) assert ( len(results) == 0 ), "Test that users updated on blocks previous to '5' will not be targeted" prev_id_checkpoint = get_last_indexed_checkpoint( session, AGGREGATE_USER) assert prev_id_checkpoint == 4 entities = { "indexing_checkpoints": [{ "tablename": AGGREGATE_USER, "last_checkpoint": 1 }], } populate_mock_db(db, entities) with db.scoped_session() as session: _update_aggregate_user(session) results: List[AggregateUser] = (session.query(AggregateUser).order_by( AggregateUser.user_id).all()) assert ( len(results) == 2 ), "Test that users updated on blocks after '1' will be targeted" prev_id_checkpoint = get_last_indexed_checkpoint( session, AGGREGATE_USER) assert prev_id_checkpoint == 4
def test_index_aggregate_track_populate(app): """Test that we should populate tracks from empty""" with app.app_context(): db = get_db() with db.scoped_session() as session: results: List[AggregateTrack] = ( session.query(AggregateTrack).order_by( AggregateTrack.track_id).all()) assert (len(results) == 0 ), "Test aggregate_track is empty before populate_mock_db()" # create db entries based on entities populate_mock_db(db, basic_entities, block_offset=3) last_checkpoint = 12 with db.scoped_session() as session: # confirm nothing exists before _update_aggregate_track() results: List[AggregateTrack] = ( session.query(AggregateTrack).order_by( AggregateTrack.track_id).all()) assert ( len(results) == 0 ), "Test aggregate_track is empty before _update_aggregate_track()" # trigger celery task _update_aggregate_track(session) # run basic tests against basic_entities basic_tests(session, last_checkpoint=last_checkpoint) # delete a track entities = { "tracks": [ { "track_id": 2, "owner_id": 1, "is_current": True, "is_delete": True, }, ], } populate_mock_db(db, entities) last_checkpoint += 1 # confirm track 2 no longer has a row in aggregate_track with db.scoped_session() as session: _update_aggregate_track(session) results: List[AggregateTrack] = ( session.query(AggregateTrack).order_by( AggregateTrack.track_id).all()) assert len(results) == 3 assert results[0].track_id == 1 assert results[0].repost_count == 3 assert results[0].save_count == 1 assert results[1].track_id == 4 assert results[1].repost_count == 0 assert results[1].save_count == 4 assert results[2].track_id == 5 assert results[2].repost_count == 0 assert results[2].save_count == 0 prev_id_checkpoint = get_last_indexed_checkpoint( session, AGGREGATE_TRACK) assert prev_id_checkpoint == last_checkpoint # repost a deleted track entities = { "reposts": [ { "repost_item_id": 2, "repost_type": "track", "user_id": 2, "is_current": True, }, ], } populate_mock_db(db, entities) last_checkpoint += 1 # confirm track 2 still no longer has a row in aggregate_track with db.scoped_session() as session: _update_aggregate_track(session) results: List[AggregateTrack] = ( session.query(AggregateTrack).order_by( AggregateTrack.track_id).all()) assert len(results) == 3 assert results[0].track_id == 1 assert results[0].repost_count == 3 assert results[0].save_count == 1 assert results[1].track_id == 4 assert results[1].repost_count == 0 assert results[1].save_count == 4 assert results[2].track_id == 5 assert results[2].repost_count == 0 assert results[2].save_count == 0 prev_id_checkpoint = get_last_indexed_checkpoint( session, AGGREGATE_TRACK) assert prev_id_checkpoint == last_checkpoint # undelete a track entities = { "tracks": [ { "track_id": 2, "owner_id": 1, "is_current": True, "is_delete": False, }, ], } populate_mock_db(db, entities) last_checkpoint += 1 # confirm track 2 has a row in aggregate_track again, with an additional repost with db.scoped_session() as session: _update_aggregate_track(session) results: List[AggregateTrack] = ( session.query(AggregateTrack).order_by( AggregateTrack.track_id).all()) assert len(results) == 4 assert results[0].track_id == 1 assert results[0].repost_count == 3 assert results[0].save_count == 1 assert results[1].track_id == 2 assert results[1].repost_count == 1 assert results[1].save_count == 0 assert results[2].track_id == 4 assert results[2].repost_count == 0 assert results[2].save_count == 4 assert results[3].track_id == 5 assert results[3].repost_count == 0 assert results[3].save_count == 0 prev_id_checkpoint = get_last_indexed_checkpoint( session, AGGREGATE_TRACK) assert prev_id_checkpoint == last_checkpoint
def test_index_aggregate_user_update_with_only_aggregate_user(app): """Test that aggregate_user will never be truncated even when no other data""" with app.app_context(): db = get_db() entities = { "aggregate_user": [ { "user_id": 1, "track_count": 9, "playlist_count": 9, "album_count": 9, "follower_count": 9, "following_count": 9, "repost_count": 9, "track_save_count": 9, }, { "user_id": 2, "track_count": 9, "playlist_count": 9, "album_count": 9, "follower_count": 9, "following_count": 9, "repost_count": 9, "track_save_count": 9, }, { "user_id": 3, "track_count": 9, "playlist_count": 9, "album_count": 9, "follower_count": 9, "following_count": 9, "repost_count": 9, "track_save_count": 9, }, ], "indexing_checkpoints": [{ "tablename": AGGREGATE_USER, "last_checkpoint": 9 }], } populate_mock_db(db, entities) with db.scoped_session() as session: results: List[AggregateUser] = (session.query(AggregateUser).order_by( AggregateUser.user_id).all()) assert len(results) == 3, "Test that entities exist as expected" _update_aggregate_user(session) results: List[AggregateUser] = (session.query(AggregateUser).order_by( AggregateUser.user_id).all()) assert ( len(results) == 3 ), "Test zero-modifications since last_checkpoint is in the future" prev_id_checkpoint = get_last_indexed_checkpoint( session, AGGREGATE_USER) assert prev_id_checkpoint == 9 entities = { "indexing_checkpoints": [{ "tablename": AGGREGATE_USER, "last_checkpoint": 0 }], } populate_mock_db(db, entities) with db.scoped_session() as session: results: List[AggregateUser] = (session.query(AggregateUser).order_by( AggregateUser.user_id).all()) assert ( len(results) == 3 ), "Test that entities exist as expected, even though checkpoint has been reset" _update_aggregate_user(session) results: List[AggregateUser] = (session.query(AggregateUser).order_by( AggregateUser.user_id).all()) assert ( len(results) == 3 ), "Test that aggregate_user has not been truncated due to reset checkpoint" prev_id_checkpoint = get_last_indexed_checkpoint( session, AGGREGATE_USER) assert prev_id_checkpoint == 0
def test_index_aggregate_track_empty_activity(app): """Test that a populated tracks table without activity won't break""" with app.app_context(): db = get_db() entities = { "tracks": [ { "track_id": 1, "owner_id": 1, "is_current": True }, { "track_id": 2, "owner_id": 1, "is_current": True }, { "track_id": 3, "owner_id": 1, "is_current": True, "is_delete": True, }, { "track_id": 4, "owner_id": 2, "is_current": True }, { "track_id": 5, "owner_id": 1, "is_current": True, "is_unlisted": True, }, ], "indexing_checkpoints": [{ "tablename": AGGREGATE_TRACK, "last_checkpoint": 10 }], } populate_mock_db(db, entities, block_offset=6) with db.scoped_session() as session: _update_aggregate_track(session) results: List[AggregateTrack] = ( session.query(AggregateTrack).order_by( AggregateTrack.track_id).all()) assert ( len(results) == 0 ), "Test that tracks updated on blocks previous to '10' will not be targeted" prev_id_checkpoint = get_last_indexed_checkpoint( session, AGGREGATE_TRACK) assert prev_id_checkpoint == 10 entities = { "indexing_checkpoints": [{ "tablename": AGGREGATE_TRACK, "last_checkpoint": 1 }], } populate_mock_db(db, entities) with db.scoped_session() as session: _update_aggregate_track(session) results: List[AggregateTrack] = ( session.query(AggregateTrack).order_by( AggregateTrack.track_id).all()) assert ( len(results) == 4 ), "Test that tracks updated on blocks after '1' will be targeted" prev_id_checkpoint = get_last_indexed_checkpoint( session, AGGREGATE_TRACK) assert prev_id_checkpoint == 10
def test_populate_playlist_metadata(app): """Tests that populate_playlist_metadata works after aggregate_user refresh""" with app.app_context(): db = get_db() test_entities = { 'playlists': [{ "playlist_id": 1, "playlist_owner_id": 1 }, { "playlist_id": 2, "playlist_owner_id": 1 }, { "playlist_id": 3, "playlist_owner_id": 2 }, { "playlist_id": 4, "playlist_owner_id": 3 }], 'users': [{ 'user_id': 1, 'handle': 'user1' }, { 'user_id': 2, 'handle': 'user2' }, { 'user_id': 3, 'handle': 'user3' }, { 'user_id': 4, 'handle': 'user4' }], 'reposts': [{ "repost_item_id": 1, "repost_type": 'playlist', "user_id": 2 }, { "repost_item_id": 1, "repost_type": 'playlist', "user_id": 3 }, { "repost_item_id": 2, "repost_type": 'playlist', "user_id": 1 }], 'saves': [{ "save_item_id": 1, "save_type": 'playlist', "user_id": 2 }, { "save_item_id": 1, "save_type": 'playlist', "user_id": 3 }, { "save_item_id": 3, "save_type": 'playlist', "user_id": 2 }, { "save_item_id": 3, "save_type": 'playlist', "user_id": 1 }], 'follows': [{ "follower_user_id": 1, "followee_user_id": 2 }, { "follower_user_id": 1, "followee_user_id": 3 }] } populate_mock_db(db, test_entities) with db.scoped_session() as session: session.execute("REFRESH MATERIALIZED VIEW aggregate_playlist") playlist_ids = [1, 2, 3, 4] playlists = [{ "playlist_id": 1, "playlist_contents": { "track_ids": [] } }, { "playlist_id": 2, "playlist_contents": { "track_ids": [] } }, { "playlist_id": 3, "playlist_contents": { "track_ids": [] } }, { "playlist_id": 4, "playlist_contents": { "track_ids": [] } }] playlists = populate_playlist_metadata( session, playlist_ids, playlists, [RepostType.playlist, RepostType.album], [SaveType.playlist, SaveType.album], None) assert len(playlists) == 4 assert playlists[0]['playlist_id'] == 1 assert playlists[0][response_name_constants.repost_count] == 2 assert playlists[0][response_name_constants.save_count] == 2 assert playlists[0][response_name_constants.total_play_count] == 0 assert playlists[1]['playlist_id'] == 2 assert playlists[1][response_name_constants.repost_count] == 1 assert playlists[1][response_name_constants.save_count] == 0 assert playlists[1][response_name_constants.total_play_count] == 0 assert playlists[2]['playlist_id'] == 3 assert playlists[2][response_name_constants.repost_count] == 0 assert playlists[2][response_name_constants.save_count] == 2 assert playlists[2][response_name_constants.total_play_count] == 0 curr_playlist_ids = [1, 2, 3] curr_playlists = [{ "playlist_id": 1, "playlist_contents": { "track_ids": [] } }, { "playlist_id": 2, "playlist_contents": { "track_ids": [] } }, { "playlist_id": 3, "playlist_contents": { "track_ids": [] } }] playlists = populate_playlist_metadata( session, curr_playlist_ids, curr_playlists, [RepostType.playlist, RepostType.album], [SaveType.playlist, SaveType.album], 1) assert len(playlists) == 3 assert playlists[0]['playlist_id'] == 1 repost_user_ids = [ repost['user_id'] for repost in playlists[0][ response_name_constants.followee_reposts] ] repost_user_ids.sort() assert repost_user_ids == [2, 3] save_user_ids = [ save['user_id'] for save in playlists[0][response_name_constants.followee_saves] ] save_user_ids.sort() assert save_user_ids == [2, 3] assert playlists[0][ response_name_constants.has_current_user_reposted] == False assert playlists[0][ response_name_constants.has_current_user_saved] == False assert playlists[1]['playlist_id'] == 2 assert playlists[1][response_name_constants.followee_reposts] == [] assert playlists[1][response_name_constants.followee_saves] == [] assert playlists[1][ response_name_constants.has_current_user_reposted] == True assert playlists[1][ response_name_constants.has_current_user_saved] == False assert playlists[2]['playlist_id'] == 3 assert playlists[2][response_name_constants.followee_reposts] == [] save_user_ids = [ save['user_id'] for save in playlists[2][response_name_constants.followee_saves] ] save_user_ids.sort() assert save_user_ids == [2] assert playlists[2][ response_name_constants.has_current_user_reposted] == False assert playlists[2][ response_name_constants.has_current_user_saved] == True
def test_index_aggregate_track_update_with_extra_user(app): """Test that the aggregate_track only modifies non-deleted tracks""" with app.app_context(): db = get_db() entities = deepcopy(basic_entities) entities.update({ "indexing_checkpoints": [{ "tablename": AGGREGATE_TRACK, "last_checkpoint": 0 }], "aggregate_track": [ { "track_id": 1, "repost_count": 9, "save_count": 9, }, { "track_id": 2, "repost_count": 9, "save_count": 9, }, { "track_id": 3, "repost_count": 9, "save_count": 9, }, { "track_id": 4, "repost_count": 9, "save_count": 9, }, { "track_id": 5, "repost_count": 9, "save_count": 9, }, ], }) with db.scoped_session() as session: results: List[AggregateTrack] = ( session.query(AggregateTrack).order_by( AggregateTrack.track_id).all()) assert len(results) == 0, "Test that we start with clean tables" populate_mock_db(db, entities) with db.scoped_session() as session: results: List[AggregateTrack] = ( session.query(AggregateTrack).order_by( AggregateTrack.track_id).all()) assert len( results) == 5, "Test that aggregate_track entities are populated" for result in results: assert result.repost_count == 9, "Test entities were populated correctly" assert result.save_count == 9, "Test entities were populated correctly" _update_aggregate_track(session) with db.scoped_session() as session: basic_tests(session, last_checkpoint=9)
def test_populate_user_metadata(app): """Tests that populate_user_metadata works after aggregate_user refresh""" with app.app_context(): db = get_db() test_entities = { 'tracks': [{ "track_id": 1, "owner_id": 1 }, { "track_id": 2, "owner_id": 1 }, { "track_id": 3, "owner_id": 2 }, { "track_id": 4, "owner_id": 2 }, { "track_id": 5, "owner_id": 2 }, { "track_id": 6, "owner_id": 2 }, { "track_id": 7, "owner_id": 3 }, { "track_id": 8, "owner_id": 3 }, { "track_id": 8, "owner_id": 3 }, { "track_id": 9, "is_unlisted": True, "owner_id": 3 }], 'playlists': [ { "playlist_id": 1, "playlist_owner_id": 1 }, { "playlist_id": 2, "playlist_owner_id": 1 }, { "playlist_id": 3, "is_album": True, "playlist_owner_id": 1 }, { "playlist_id": 4, "playlist_owner_id": 2 }, { "playlist_id": 5, "is_delete": True, "playlist_owner_id": 2 }, { "playlist_id": 6, "is_album": True, "playlist_owner_id": 3 }, { "playlist_id": 6, "is_private": True, "playlist_owner_id": 3 }, ], 'users': [{ 'user_id': 1, 'handle': 'user1' }, { 'user_id': 2, 'handle': 'user2' }, { 'user_id': 3, 'handle': 'user3' }, { 'user_id': 4, 'handle': 'user4' }], 'follows': [{ "follower_user_id": 1, "followee_user_id": 2 }, { "follower_user_id": 1, "followee_user_id": 3 }, { "follower_user_id": 2, "followee_user_id": 3 }], 'reposts': [{ "repost_item_id": 1, "repost_type": 'track', "user_id": 2 }, { "repost_item_id": 1, "repost_type": 'playlist', "user_id": 2 }, { "repost_item_id": 1, "repost_type": 'track', "user_id": 3 }, { "repost_item_id": 1, "repost_type": 'playlist', "user_id": 3 }, { "repost_item_id": 4, "repost_type": 'track', "user_id": 1 }, { "repost_item_id": 5, "repost_type": 'track', "user_id": 1 }, { "repost_item_id": 6, "repost_type": 'track', "user_id": 1 }], 'saves': [{ "save_item_id": 1, "save_type": 'track', "user_id": 2 }, { "save_item_id": 1, "save_type": 'playlist', "user_id": 2 }, { "save_item_id": 1, "save_type": 'track', "user_id": 3 }, { "save_item_id": 1, "save_type": 'playlist', "user_id": 3 }, { "save_item_id": 4, "save_type": 'track', "user_id": 1 }, { "save_item_id": 5, "save_type": 'track', "user_id": 1 }, { "save_item_id": 6, "save_type": 'track', "user_id": 1 }] } populate_mock_db(db, test_entities) with db.scoped_session() as session: session.execute("REFRESH MATERIALIZED VIEW aggregate_user") user_ids = [1, 2, 3, 4, 5] users = [{ "user_id": 1, "is_verified": False }, { "user_id": 2, "is_verified": False }, { "user_id": 3, "is_verified": False }, { "user_id": 4, "is_verified": False }, { "user_id": 5, "is_verified": False }] users = populate_user_metadata(session, user_ids, users, None) assert len(users) == 5 assert users[0]['user_id'] == 1 assert users[0][response_name_constants.track_count] == 2 assert users[0][response_name_constants.playlist_count] == 2 assert users[0][response_name_constants.album_count] == 1 assert users[0][response_name_constants.follower_count] == 0 assert users[0][response_name_constants.followee_count] == 2 assert users[0][response_name_constants.repost_count] == 3 assert users[1]['user_id'] == 2 assert users[1][response_name_constants.track_count] == 4 assert users[1][response_name_constants.playlist_count] == 1 assert users[1][response_name_constants.album_count] == 0 assert users[1][response_name_constants.follower_count] == 1 assert users[1][response_name_constants.followee_count] == 1 assert users[1][response_name_constants.repost_count] == 2 assert users[2]['user_id'] == 3 assert users[2][response_name_constants.track_count] == 3 assert users[2][response_name_constants.playlist_count] == 0 assert users[2][response_name_constants.album_count] == 1 assert users[2][response_name_constants.follower_count] == 2 assert users[2][response_name_constants.followee_count] == 0 assert users[2][response_name_constants.repost_count] == 2 assert users[3]['user_id'] == 4 assert users[3][response_name_constants.track_count] == 0 assert users[3][response_name_constants.playlist_count] == 0 assert users[3][response_name_constants.album_count] == 0 assert users[3][response_name_constants.follower_count] == 0 assert users[3][response_name_constants.followee_count] == 0 assert users[3][response_name_constants.repost_count] == 0 assert users[4]['user_id'] == 5 assert users[4][response_name_constants.track_count] == 0 assert users[4][response_name_constants.playlist_count] == 0 assert users[4][response_name_constants.album_count] == 0 assert users[4][response_name_constants.follower_count] == 0 assert users[4][response_name_constants.followee_count] == 0 assert users[4][response_name_constants.repost_count] == 0 curr_user_ids = [1, 2, 3] curr_users = [{ "user_id": 1, "is_verified": False }, { "user_id": 2, "is_verified": False }, { "user_id": 3, "is_verified": False }] users = populate_user_metadata(session, curr_user_ids, curr_users, 1) assert len(users) == 3 assert users[0]['user_id'] == 1 assert users[0][ response_name_constants.does_current_user_follow] == False assert users[0][ response_name_constants.current_user_followee_follow_count] == 0 assert users[0][response_name_constants.balance] == '0' assert users[0][ response_name_constants.associated_wallets_balance] == '0' assert users[1]['user_id'] == 2 assert users[1][ response_name_constants.does_current_user_follow] == True assert users[1][ response_name_constants.current_user_followee_follow_count] == 0 assert users[1][response_name_constants.balance] == '0' assert users[1][ response_name_constants.associated_wallets_balance] == '0' assert users[2]['user_id'] == 3 assert users[2][ response_name_constants.does_current_user_follow] == True assert users[2][ response_name_constants.current_user_followee_follow_count] == 1 assert users[2][response_name_constants.balance] == '0' assert users[2][ response_name_constants.associated_wallets_balance] == '0'
def test_index_aggregate_track_update_with_only_aggregate_track(app): """Test that aggregate_track will not be manipulated when there is no other data""" with app.app_context(): db = get_db() entities = { "aggregate_track": [ { "track_id": 1, "repost_count": 9, "save_count": 9, }, { "track_id": 2, "repost_count": 9, "save_count": 9, }, { "track_id": 3, "repost_count": 9, "save_count": 9, }, ], "indexing_checkpoints": [{ "tablename": AGGREGATE_TRACK, "last_checkpoint": 9 }], } populate_mock_db(db, entities) with db.scoped_session() as session: results: List[AggregateTrack] = ( session.query(AggregateTrack).order_by( AggregateTrack.track_id).all()) assert len(results) == 3, "Test that entities exist as expected" _update_aggregate_track(session) results: List[AggregateTrack] = ( session.query(AggregateTrack).order_by( AggregateTrack.track_id).all()) assert ( len(results) == 3 ), "Test zero-modifications since last_checkpoint is in the future" prev_id_checkpoint = get_last_indexed_checkpoint( session, AGGREGATE_TRACK) assert prev_id_checkpoint == 9 entities = { "indexing_checkpoints": [{ "tablename": AGGREGATE_TRACK, "last_checkpoint": 0 }], } populate_mock_db(db, entities) with db.scoped_session() as session: results: List[AggregateTrack] = ( session.query(AggregateTrack).order_by( AggregateTrack.track_id).all()) assert ( len(results) == 3 ), "Test that entities exist as expected, even though checkpoint has been reset" _update_aggregate_track(session) results: List[AggregateTrack] = ( session.query(AggregateTrack).order_by( AggregateTrack.track_id).all()) assert ( len(results) == 3 ), "Test that aggregate_track has not been changed due to lack of track data" for result in results: assert ( result.repost_count == 9 ), "Test that aggregate_track has not been changed due to lack of track data" assert ( result.save_count == 9 ), "Test that aggregate_track has not been changed due to lack of track data" prev_id_checkpoint = get_last_indexed_checkpoint( session, AGGREGATE_TRACK) assert prev_id_checkpoint == 0
def test_index_playlist(app): """Tests that playlists are indexed correctly""" with app.app_context(): db = get_db() cid_metadata_client = CIDMetadataClient({}) web3 = Web3() challenge_event_bus = setup_challenge_bus() update_task = UpdateTask(cid_metadata_client, web3, challenge_event_bus) with db.scoped_session() as session: # ================= Test playlist_created Event ================= event_type, entry = get_playlist_created_event() block_number = random.randint(1, 10000) block_timestamp = 1585336422 # Some sqlalchemy playlist instance playlist_record = lookup_playlist_record( update_task, session, entry, block_number, "0x" # txhash ) parse_playlist_event( None, # self - not used None, # update_task - not used entry, event_type, playlist_record, block_timestamp, session, ) assert playlist_record.playlist_owner_id == entry.args._playlistOwnerId assert playlist_record.is_private == entry.args._isPrivate assert playlist_record.is_album == entry.args._isAlbum block_datetime = datetime.utcfromtimestamp(block_timestamp) block_integer_time = int(block_timestamp) playlist_content_array = [] for track_id in entry.args._trackIds: playlist_content_array.append({ "track": track_id, "time": block_integer_time }) assert playlist_record.playlist_contents == { "track_ids": playlist_content_array } assert playlist_record.created_at == block_datetime # ================= Test playlist_name_updated Event ================= event_type, entry = get_playlist_name_updated_event() assert playlist_record.playlist_name == None parse_playlist_event( None, # self - not used None, # update_task - not used entry, event_type, playlist_record, block_timestamp, session, ) assert playlist_record.playlist_name == entry.args._updatedPlaylistName # ================= Test playlist_cover_photo_updated Event ================= event_type, entry = get_playlist_cover_photo_updated_event() parse_playlist_event( None, # self - not used None, # update_task - not used entry, event_type, playlist_record, block_timestamp, session, ) assert playlist_record.playlist_image_sizes_multihash == ( helpers.multihash_digest_to_cid( entry.args._playlistImageMultihashDigest)) assert playlist_record.playlist_image_multihash == None # ================= Test playlist_description_updated Event ================= event_type, entry = get_playlist_description_updated_event() assert playlist_record.description == None parse_playlist_event( None, # self - not used None, # update_task - not used entry, event_type, playlist_record, block_timestamp, session, ) assert playlist_record.description == entry.args._playlistDescription # ================= Test playlist_privacy_updated Event ================= event_type, entry = get_playlist_privacy_updated_event() assert playlist_record.is_private == True parse_playlist_event( None, # self - not used None, # update_task - not used entry, event_type, playlist_record, block_timestamp, session, ) assert playlist_record.is_private == entry.args._updatedIsPrivate # ================= Test playlist_track_added Event ================= event_type, entry = get_playlist_track_added_event(1, 1) parse_playlist_event( None, # self - not used None, # update_task - not used entry, event_type, playlist_record, 12, # block_timestamp, session, ) assert len(playlist_record.playlist_contents["track_ids"]) == 1 last_playlist_content = playlist_record.playlist_contents["track_ids"][ -1] assert last_playlist_content == { "track": entry.args._addedTrackId, "time": 12 } # ================= Test playlist_track_added with second track Event ================= event_type, entry = get_playlist_track_added_event(1, 2) parse_playlist_event( None, # self - not used None, # update_task - not used entry, event_type, playlist_record, 13, # block_timestamp, session, ) assert len(playlist_record.playlist_contents["track_ids"]) == 2 last_playlist_content = playlist_record.playlist_contents["track_ids"][ -1] assert last_playlist_content == { "track": entry.args._addedTrackId, "time": 13 } # ================= Test playlist_tracks_ordered Event ================= event_type, entry = get_playlist_tracks_ordered_event() parse_playlist_event( None, # self - not used None, # update_task - not used entry, event_type, playlist_record, block_timestamp, session, ) assert playlist_record.playlist_contents["track_ids"] == [ { "track": 2, "time": 13 }, { "track": 1, "time": 12 }, ] # ================= Test playlist_track_delete_event Event ================= event_type, entry = get_playlist_track_delete_event(1, 1, 12) parse_playlist_event( None, # self - not used None, # update_task - not used entry, event_type, playlist_record, block_timestamp, session, ) assert len(playlist_record.playlist_contents["track_ids"]) == 1 last_playlist_content = playlist_record.playlist_contents["track_ids"][ -1] assert playlist_record.playlist_contents["track_ids"] == [{ "track": 2, "time": 13 }] # ================= Test playlist_track_delete_event Event ================= # This should be a no-op event_type, entry = get_playlist_track_delete_event(1, 1, 12) parse_playlist_event( None, # self - not used None, # update_task - not used entry, event_type, playlist_record, block_timestamp, session, ) assert len(playlist_record.playlist_contents["track_ids"]) == 1 assert playlist_record.playlist_contents["track_ids"] == [{ "track": 2, "time": 13 }] # ================= Test playlist_deleted Event ================= event_type, entry = get_playlist_deleted_event() assert playlist_record.is_delete == False parse_playlist_event( None, # self - not used None, # update_task - not used entry, event_type, playlist_record, block_timestamp, session, ) assert playlist_record.is_delete == True
def test_get_prev_track_entries(app): """Tests that prev track entries query properly returns previous tracks""" with app.app_context(): db = get_db() test_entities = { "tracks": [ { "track_id": 1, "is_current": False, "is_unlisted": True, "remix_of": None, }, # Block 0 { "track_id": 2, "is_current": True, "is_unlisted": False, "remix_of": None, }, # Block 1 { "track_id": 3, "is_current": False, "is_unlisted": False, "remix_of": None, }, # Block 2 { "track_id": 4, "is_current": False, "is_unlisted": False, "remix_of": None, }, # Block 3 { "track_id": 5, "is_current": False, "is_unlisted": False, "remix_of": None, }, # Block 4 { "track_id": 6, "is_current": False, "is_unlisted": True, "remix_of": None, }, # Block 5 { "track_id": 1, "is_current": True, "is_unlisted": False, "remix_of": None, }, # Block 6 { "track_id": 3, "is_current": False, "is_unlisted": False, "remix_of": None, }, # Block 7 { "track_id": 6, "is_current": True, "is_unlisted": False, "remix_of": None, }, # Block 8 { "track_id": 4, "is_current": True, "is_unlisted": False, "remix_of": None, }, # Block 9 { "track_id": 3, "is_current": True, "is_unlisted": False, "remix_of": None, }, # Block 10 { "track_id": 5, "is_current": False, "is_unlisted": False, "remix_of": None, }, # Block 11 { "track_id": 5, "is_current": True, "is_unlisted": False, "remix_of": None, }, # Block 12 ], } populate_mock_db(db, test_entities) with db.scoped_session() as session: # Make sure it doesn't return tracks if none are passed in empty_entries = get_prev_track_entries(session, []) assert len(empty_entries) == 0 # Make sure that it fetches all previous tracks entries = [ Track(track_id=6, blocknumber=8), Track(track_id=6, blocknumber=8), Track(track_id=3, blocknumber=10), Track(track_id=1, blocknumber=6), Track(track_id=4, blocknumber=9), Track(track_id=5, blocknumber=12), ] prev_entries = get_prev_track_entries(session, entries) assert len(prev_entries) <= len(entries) for prev_entry in prev_entries: entry = next(e for e in entries if e.track_id == prev_entry.track_id) assert entry.track_id == prev_entry.track_id assert entry.blocknumber > prev_entry.blocknumber # previous track with id 3 should have a block number of 7, not 2 if prev_entry.track_id == 3: assert prev_entry.blocknumber == 7 # previous track with id 5 should have a block number of 11, not 4 if prev_entry.track_id == 5: assert prev_entry.blocknumber == 11 # Make sure that it properly fetches the track before the one passed single_entry = [Track(track_id=5, blocknumber=11)] prev_id_5_track = get_prev_track_entries(session, single_entry)[0] assert prev_id_5_track.track_id == 5 assert prev_id_5_track.blocknumber < 11
def test_get_repost_feed_for_user_limit_bounds(app): """ Tests that a repost feed for a user can be queried and respect a limit with deleted tracks. """ with app.app_context(): db = get_db() test_entities = { "reposts": [ # Note these reposts are in chronological order in addition # so the repost feed should pull them "backwards" for reverse chronological # sort order. { "user_id": 1, "repost_item_id": 5, "repost_type": "track" }, { "user_id": 1, "repost_item_id": 2, "repost_type": "track" }, { "user_id": 1, "repost_item_id": 3, "repost_type": "track" }, { "user_id": 1, "repost_item_id": 1, "repost_type": "track" }, { "user_id": 1, "repost_item_id": 4, "repost_type": "track" }, { "user_id": 1, "repost_item_id": 4, "repost_type": "playlist" }, { "user_id": 1, "repost_item_id": 8, "repost_type": "album" }, { "user_id": 1, "repost_item_id": 6, "repost_type": "track" }, ], "tracks": [ { "track_id": 1, "title": "track 1", "is_delete": True }, { "track_id": 2, "title": "track 2" }, { "track_id": 3, "title": "track 3" }, { "track_id": 4, "title": "track 4" }, { "track_id": 5, "title": "track 5" }, { "track_id": 6, "title": "track 6" }, { "track_id": 7, "title": "track 7" }, { "track_id": 8, "title": "track 8" }, ], "playlists": [ { "playlist_id": 1, "playlist_name": "playlist 1" }, { "playlist_id": 2, "playlist_name": "playlist 2" }, { "playlist_id": 3, "playlist_name": "playlist 3" }, { "playlist_id": 4, "playlist_name": "playlist 4" }, { "playlist_id": 5, "playlist_name": "playlist 5" }, { "playlist_id": 6, "playlist_name": "playlist 6" }, { "playlist_id": 7, "playlist_name": "playlist 7" }, { "playlist_id": 8, "playlist_name": "album 8" }, ], } populate_mock_db(db, test_entities) with db.scoped_session() as session: repost_feed = _get_repost_feed_for_user(session, 1, { "limit": 5, "offset": 0 }) # Query for 5 reposts. The problem is the 5th one was deleted, so # we only return 4 here. This is broken. # TODO fix me. assert repost_feed[0]["title"] == "track 6" assert repost_feed[1]["playlist_name"] == "album 8" assert repost_feed[2]["playlist_name"] == "playlist 4" assert repost_feed[3]["title"] == "track 4" # Should skip track 1 because it is deleted assert repost_feed[4]["title"] == "track 3"
def test_trending_challenge_job(app): with app.app_context(): db = get_db() redis_conn = redis.Redis.from_url(url=REDIS_URL) test_entities = { "tracks": [ { "track_id": 1, "owner_id": 1 }, { "track_id": 2, "owner_id": 2 }, { "track_id": 3, "owner_id": 3 }, { "track_id": 4, "owner_id": 4 }, { "track_id": 5, "owner_id": 5 }, { "track_id": 6, "owner_id": 2 }, { "track_id": 7, "owner_id": 3 }, { "track_id": 8, "owner_id": 3 }, { "track_id": 9, "is_unlisted": True, "owner_id": 3 }, { "track_id": 11, "owner_id": 1 }, { "track_id": 12, "owner_id": 2 }, { "track_id": 13, "owner_id": 3 }, { "track_id": 14, "owner_id": 4 }, { "track_id": 15, "owner_id": 5 }, ], "playlists": [ { "playlist_id": 1, "playlist_owner_id": 1, "playlist_name": "name", "description": "description", "playlist_contents": { "track_ids": [ { "track": 1, "time": 1 }, { "track": 2, "time": 2 }, { "track": 3, "time": 3 }, ] }, }, { "playlist_id": 2, "playlist_owner_id": 2, "playlist_name": "name", "description": "description", "playlist_contents": { "track_ids": [ { "track": 1, "time": 1 }, { "track": 2, "time": 2 }, { "track": 3, "time": 3 }, ] }, }, { "playlist_id": 3, "is_album": True, "playlist_owner_id": 3, "playlist_name": "name", "description": "description", "playlist_contents": { "track_ids": [ { "track": 1, "time": 1 }, { "track": 2, "time": 2 }, { "track": 3, "time": 3 }, ] }, }, { "playlist_id": 4, "playlist_owner_id": 4, "playlist_name": "name", "description": "description", "playlist_contents": { "track_ids": [ { "track": 1, "time": 1 }, { "track": 2, "time": 2 }, { "track": 3, "time": 3 }, ] }, }, { "playlist_id": 5, "playlist_owner_id": 5, "playlist_name": "name", "description": "description", "playlist_contents": { "track_ids": [ { "track": 1, "time": 1 }, { "track": 2, "time": 2 }, { "track": 3, "time": 3 }, ] }, }, ], "users": [ { "user_id": 1, "handle": "user1" }, { "user_id": 2, "handle": "user2" }, { "user_id": 3, "handle": "user3" }, { "user_id": 4, "handle": "user4" }, { "user_id": 5, "handle": "user5" }, ], "follows": [ { "follower_user_id": 1, "followee_user_id": 2, "created_at": datetime.now() - timedelta(days=8), }, { "follower_user_id": 1, "followee_user_id": 3, "created_at": datetime.now() - timedelta(days=8), }, { "follower_user_id": 2, "followee_user_id": 3, "created_at": datetime.now() - timedelta(days=8), }, { "follower_user_id": 2, "followee_user_id": 4, "created_at": datetime.now() - timedelta(days=8), }, { "follower_user_id": 3, "followee_user_id": 6, "created_at": datetime.now() - timedelta(days=8), }, { "follower_user_id": 4, "followee_user_id": 5, "created_at": datetime.now() - timedelta(days=8), }, { "follower_user_id": 5, "followee_user_id": 1, "created_at": datetime.now() - timedelta(days=8), }, { "follower_user_id": 6, "followee_user_id": 3, "created_at": datetime.now() - timedelta(days=8), }, ], "reposts": [ { "repost_item_id": 1, "repost_type": "track", "user_id": 2 }, { "repost_item_id": 1, "repost_type": "playlist", "user_id": 2 }, { "repost_item_id": 3, "repost_type": "track", "user_id": 3 }, { "repost_item_id": 1, "repost_type": "playlist", "user_id": 3 }, { "repost_item_id": 4, "repost_type": "track", "user_id": 1 }, { "repost_item_id": 5, "repost_type": "track", "user_id": 1 }, { "repost_item_id": 6, "repost_type": "track", "user_id": 1 }, ], "saves": [ { "save_item_id": 1, "save_type": "track", "user_id": 2 }, { "save_item_id": 1, "save_type": "track", "user_id": 3 }, { "save_item_id": 4, "save_type": "track", "user_id": 1 }, { "save_item_id": 5, "save_type": "track", "user_id": 1 }, { "save_item_id": 6, "save_type": "track", "user_id": 1 }, { "save_item_id": 1, "save_type": "playlist", "user_id": 4 }, { "save_item_id": 2, "save_type": "playlist", "user_id": 3 }, { "save_item_id": 3, "save_type": "playlist", "user_id": 2 }, { "save_item_id": 4, "save_type": "playlist", "user_id": 1 }, { "save_item_id": 5, "save_type": "playlist", "user_id": 2 }, ], "plays": [{ "item_id": 1 } for _ in range(55)] + [{ "item_id": 2 } for _ in range(60)] + [{ "item_id": 3 } for _ in range(70)] + [{ "item_id": 4 } for _ in range(90)] + [{ "item_id": 5 } for _ in range(80)] + [{ "item_id": 6 } for _ in range(40)] + [{ "item_id": 11 } for _ in range(200)] + [{ "item_id": 12 } for _ in range(200)] + [{ "item_id": 13 } for _ in range(200)] + [{ "item_id": 14 } for _ in range(200)] + [{ "item_id": 15 } for _ in range(200)], } populate_mock_db(db, test_entities, BLOCK_NUMBER + 1) bus = ChallengeEventBus(redis_conn) # Register events with the bus bus.register_listener( ChallengeEvent.trending_underground, trending_underground_track_challenge_manager, ) bus.register_listener(ChallengeEvent.trending_track, trending_track_challenge_manager) bus.register_listener(ChallengeEvent.trending_playlist, trending_playlist_challenge_manager) trending_date = datetime.fromisoformat("2021-08-20") with db.scoped_session() as session: _update_aggregate_plays(session) _update_aggregate_track(session) _update_aggregate_user(session) session.execute("REFRESH MATERIALIZED VIEW aggregate_interval_plays") session.execute("REFRESH MATERIALIZED VIEW trending_params") trending_track_versions = trending_strategy_factory.get_versions_for_type( TrendingType.TRACKS).keys() for version in trending_track_versions: strategy = trending_strategy_factory.get_strategy( TrendingType.TRACKS, version) if strategy.use_mat_view: strategy.update_track_score_query(session) session.commit() enqueue_trending_challenges(db, redis_conn, bus, trending_date) with db.scoped_session() as session: session.query(Challenge).filter( or_( Challenge.id == "tp", Challenge.id == "tt", Challenge.id == "tut", )).update({ "active": True, "starting_block": BLOCK_NUMBER }) bus.process_events(session) session.flush() trending_tracks = (session.query(TrendingResult).filter( TrendingResult.type == str(TrendingType.TRACKS)).all()) assert len(trending_tracks) == 5 user_trending_tracks_challenges = (session.query(UserChallenge).filter( UserChallenge.challenge_id == "tt").all()) assert len(user_trending_tracks_challenges) == 5 ranks = { "2021-08-20:1", "2021-08-20:2", "2021-08-20:3", "2021-08-20:4", "2021-08-20:5", } for challenge in user_trending_tracks_challenges: assert challenge.specifier in ranks ranks.remove(challenge.specifier) trending_playlists = (session.query(TrendingResult).filter( TrendingResult.type == str(TrendingType.PLAYLISTS)).all()) assert len(trending_playlists) == 5
def setup_search(app_module): with app_module.app_context(): db = get_db() # Import app so that it'll run migrations against the db now = datetime.now() blocks = [ Block( blockhash=hex(1), number=1, parenthash="0x01", is_current=False, ), Block( blockhash=hex(2), number=2, parenthash="0x02", is_current=False, ), Block( blockhash=hex(3), number=3, parenthash="0x03", is_current=True, ), ] tracks = [ Track( blockhash=hex(1), blocknumber=1, track_id=1, is_current=True, is_delete=False, owner_id=1, route_id="", track_segments=[], genre="", updated_at=now, created_at=now, is_unlisted=False, title="the track 1", download={"cid": None, "is_downloadable": False, "requires_follow": False}, ), Track( blockhash=hex(2), blocknumber=2, track_id=2, is_current=True, is_delete=False, owner_id=2, route_id="", track_segments=[], genre="", updated_at=now, created_at=now, is_unlisted=False, title="the track 2", download={"cid": None, "is_downloadable": True, "requires_follow": False}, ), Track( blockhash=hex(3), blocknumber=3, track_id=3, is_current=True, is_delete=False, owner_id=1, route_id="", track_segments=[], genre="", updated_at=now, created_at=now, is_unlisted=False, title="xyz", download={"cid": None, "is_downloadable": True, "requires_follow": False}, ), ] # need users for the lexeme dict to work users = [ User( blockhash=hex(1), blocknumber=1, user_id=1, is_current=True, handle="", wallet="", name="user 1", updated_at=now, created_at=now, ), User( blockhash=hex(2), blocknumber=2, user_id=2, is_current=True, handle="", name="user 2", wallet="", updated_at=now, created_at=now, ), User( blockhash=hex(3), blocknumber=3, user_id=3, is_current=True, handle="", wallet="", name="fdwea", updated_at=now, created_at=now, ), ] follows = [ Follow( blockhash=hex(1), blocknumber=1, follower_user_id=2, followee_user_id=1, is_current=True, is_delete=False, created_at=now, ) ] playlists = [ Playlist( blockhash=hex(1), blocknumber=1, playlist_id=1, playlist_owner_id=1, is_album=False, is_private=False, playlist_name="playlist 1", playlist_contents={"track_ids": [{"track": 1, "time": 1}]}, is_current=True, is_delete=False, updated_at=now, created_at=now, ), Playlist( blockhash=hex(2), blocknumber=2, playlist_id=2, playlist_owner_id=2, is_album=True, is_private=False, playlist_name="album 1", playlist_contents={"track_ids": [{"track": 2, "time": 2}]}, is_current=True, is_delete=False, updated_at=now, created_at=now, ), ] saves = [ Save( blockhash=hex(1), blocknumber=1, user_id=1, save_item_id=1, save_type=SaveType.track, created_at=now, is_current=True, is_delete=False, ), Save( blockhash=hex(1), blocknumber=1, user_id=1, save_item_id=1, save_type=SaveType.playlist, created_at=now, is_current=True, is_delete=False, ), Save( blockhash=hex(1), blocknumber=1, user_id=1, save_item_id=2, save_type=SaveType.album, created_at=now, is_current=True, is_delete=False, ), ] balances = [ UserBalance( user_id=1, balance=0, associated_wallets_balance=0, associated_sol_wallets_balance=0, waudio=0, ) ] with db.scoped_session() as session: for block in blocks: session.add(block) session.flush() for track in tracks: session.add(track) for user in users: session.add(user) session.flush() for follow in follows: session.add(follow) session.flush() for playlist in playlists: session.add(playlist) session.flush() for save in saves: session.add(save) session.flush() for balance in balances: session.add(balance) session.flush() # Refresh the lexeme matview _update_aggregate_track(session) session.execute("REFRESH MATERIALIZED VIEW track_lexeme_dict;") session.execute( UPDATE_AGGREGATE_USER_QUERY, {"prev_indexed_aggregate_block": 0} ) session.execute("REFRESH MATERIALIZED VIEW user_lexeme_dict;") session.execute("REFRESH MATERIALIZED VIEW aggregate_playlist;") session.execute("REFRESH MATERIALIZED VIEW playlist_lexeme_dict;") session.execute("REFRESH MATERIALIZED VIEW album_lexeme_dict;") try: output = subprocess.run( ["npm", "run", "dev"], env=os.environ, capture_output=True, text=True, cwd="es-indexer", timeout=5, ) raise Exception( f"Elasticsearch indexing stopped: {output.stderr}. With env: {os.environ}" ) except subprocess.TimeoutExpired as timeout: if "catchup done" not in timeout.output.decode("utf-8"): raise Exception("Elasticsearch failed to index")
def test_catches_exceptions_in_single_processor(app): """Ensure that if a single processor fails, the others still succeed""" with app.app_context(): db = get_db() redis_conn = redis.Redis.from_url(url=REDIS_URL) bus = ChallengeEventBus(redis_conn) with db.scoped_session() as session: session.add_all([ Challenge( id="test_challenge_1", type=ChallengeType.numeric, amount="5", step_count=3, active=True, ), Challenge( id="test_challenge_2", type=ChallengeType.numeric, amount="5", step_count=3, active=True, ), ]) session.commit() correct_manager = ChallengeManager("test_challenge_1", DefaultUpdater()) broken_manager = ChallengeManager("test_challenge_2", BrokenUpdater()) TEST_EVENT = "TEST_EVENT" TEST_EVENT_2 = "TEST_EVENT_2" bus.register_listener(TEST_EVENT, correct_manager) bus.register_listener(TEST_EVENT_2, broken_manager) with bus.use_scoped_dispatch_queue(): # dispatch the broken one first bus.dispatch(TEST_EVENT_2, 101, 1) bus.dispatch(TEST_EVENT, 101, 1) try: bus.process_events(session) except: # pylint: disable=W0707 raise Exception("Shouldn't have propogated error!") challenge_1_state = correct_manager.get_user_challenge_state( session, ["1"]) # Make sure that the 'correct_manager' still executes assert len(challenge_1_state) == 1 assert challenge_1_state[0].current_step_count == 1 # Make sure broken manager didn't do anything challenge_2_state = broken_manager.get_user_challenge_state( session, ["1"]) assert len(challenge_2_state) == 0 # Try the other order with bus.use_scoped_dispatch_queue(): # dispatch the correct one first bus.dispatch(TEST_EVENT, 101, 1) bus.dispatch(TEST_EVENT_2, 101, 1) try: bus.process_events(session) except: # pylint: disable=W0707 raise Exception("Shouldn't have propogated error!") challenge_1_state = correct_manager.get_user_challenge_state( session, ["1"]) assert len(challenge_1_state) == 1 assert challenge_1_state[0].current_step_count == 2 # Make sure broken manager didn't do anything challenge_2_state = broken_manager.get_user_challenge_state( session, ["1"]) assert len(challenge_2_state) == 0