def get_repost_feed_for_user(user_id): feed_results = {} db = get_db() with db.scoped_session() as session: # query all reposts by user repost_query = (session.query(Repost).filter( Repost.is_current == True, Repost.is_delete == False, Repost.user_id == user_id).order_by(desc(Repost.created_at))) reposts = paginate_query(repost_query).all() # get track reposts from above track_reposts = [ r for r in reposts if r.repost_type == RepostType.track ] # get reposted track ids repost_track_ids = [r.repost_item_id for r in track_reposts] # get playlist reposts from above playlist_reposts = [ r for r in reposts if r.repost_type == RepostType.playlist or r.repost_type == RepostType.album ] # get reposted playlist ids repost_playlist_ids = [r.repost_item_id for r in playlist_reposts] track_reposts = helpers.query_result_to_list(track_reposts) playlist_reposts = helpers.query_result_to_list(playlist_reposts) # build track/playlist id --> repost dict from repost lists track_repost_dict = { repost["repost_item_id"]: repost for repost in track_reposts } playlist_repost_dict = { repost["repost_item_id"]: repost for repost in playlist_reposts } # query tracks for repost_track_ids track_query = (session.query(Track).filter( Track.is_current == True, Track.track_id.in_(repost_track_ids)).order_by( desc(Track.created_at))) tracks = paginate_query(track_query).all() tracks = helpers.query_result_to_list(tracks) # get track ids track_ids = [track["track_id"] for track in tracks] # query playlists for repost_playlist_ids playlist_query = (session.query(Playlist).filter( Playlist.is_current == True, Playlist.is_private == False, Playlist.playlist_id.in_(repost_playlist_ids)).order_by( desc(Playlist.created_at))) playlists = paginate_query(playlist_query).all() playlists = helpers.query_result_to_list(playlists) # get playlist ids playlist_ids = [playlist["playlist_id"] for playlist in playlists] # get repost counts by track and playlist IDs repost_counts = get_repost_counts(session, False, True, track_ids + playlist_ids, None) track_repost_counts = { repost_item_id: repost_count for (repost_item_id, repost_count, repost_type) in repost_counts if repost_type == RepostType.track } playlist_repost_counts = { repost_item_id: repost_count for (repost_item_id, repost_count, repost_type) in repost_counts if repost_type in (RepostType.playlist, RepostType.album) } # get save counts for tracks and playlists save_counts = get_save_counts(session, False, True, track_ids + playlist_ids, None) track_save_counts = { save_item_id: save_count for (save_item_id, save_count, save_type) in save_counts if save_type == SaveType.track } playlist_save_counts = { save_item_id: save_count for (save_item_id, save_count, save_type) in save_counts if save_type in (SaveType.playlist, SaveType.album) } current_user_id = get_current_user_id(required=False) requested_user_is_current_user = False user_reposted_track_ids = {} user_reposted_playlist_ids = {} user_saved_track_dict = {} user_saved_playlist_dict = {} followees_track_repost_dict = {} followees_playlist_repost_dict = {} if current_user_id: # if current user = user_id, skip current_user_reposted queries and default to true if current_user_id == user_id: requested_user_is_current_user = True else: user_reposted_query = (session.query( Repost.repost_item_id, Repost.repost_type).filter( Repost.is_current == True, Repost.is_delete == False, Repost.user_id == current_user_id, or_(Repost.repost_item_id.in_(track_ids), Repost.repost_item_id.in_(playlist_ids))).all()) # generate dictionary of track id --> current user reposted status user_reposted_track_ids = { r[0]: True for r in user_reposted_query if r[1] == RepostType.track } # generate dictionary of playlist id --> current user reposted status user_reposted_playlist_ids = { r[0]: True for r in user_reposted_query if r[1] == RepostType.album or r[1] == RepostType.playlist } # build dict of tracks and playlists that current user has saved # - query saves by current user from relevant tracks/playlists user_saved_query = (session.query( Save.save_item_id, Save.save_type).filter( Save.is_current == True, Save.is_delete == False, Save.user_id == current_user_id, or_(Save.save_item_id.in_(track_ids), Save.save_item_id.in_(playlist_ids))).all()) # - build dict of track id --> current user save status user_saved_track_dict = { save[0]: True for save in user_saved_query if save[1] == SaveType.track } # - build dict of playlist id --> current user save status user_saved_playlist_dict = { save[0]: True for save in user_saved_query if save[1] == SaveType.playlist or save[1] == SaveType.album } # query current user's followees followee_user_ids = (session.query(Follow.followee_user_id).filter( Follow.follower_user_id == current_user_id, Follow.is_current == True, Follow.is_delete == False).all()) followee_user_ids = [f[0] for f in followee_user_ids] # query all followees' reposts followee_repost_query = (session.query(Repost).filter( Repost.is_current == True, Repost.is_delete == False, Repost.user_id.in_(followee_user_ids), or_(Repost.repost_item_id.in_(repost_track_ids), Repost.repost_item_id.in_(repost_playlist_ids))).order_by( desc(Repost.created_at))) followee_reposts = paginate_query(followee_repost_query).all() followee_reposts = helpers.query_result_to_list(followee_reposts) # build dict of track id --> reposts from followee track reposts for repost in followee_reposts: if repost["repost_type"] == RepostType.track: if repost[ "repost_item_id"] not in followees_track_repost_dict: followees_track_repost_dict[ repost["repost_item_id"]] = [] followees_track_repost_dict[ repost["repost_item_id"]].append(repost) # build dict of playlist id --> reposts from followee playlist reposts for repost in followee_reposts: if (repost["repost_type"] == RepostType.playlist or repost["repost_type"] == RepostType.album): if repost[ "repost_item_id"] not in followees_playlist_repost_dict: followees_playlist_repost_dict[ repost["repost_item_id"]] = [] followees_playlist_repost_dict[ repost["repost_item_id"]].append(repost) # populate metadata for track entries for track in tracks: track[response_name_constants. repost_count] = track_repost_counts.get( track["track_id"], 0) track[response_name_constants.save_count] = track_save_counts.get( track["track_id"], 0) track[response_name_constants.has_current_user_reposted] = ( True if requested_user_is_current_user else user_reposted_track_ids.get(track["track_id"], False)) track[response_name_constants. has_current_user_saved] = user_saved_track_dict.get( track["track_id"], False) track[response_name_constants. followee_reposts] = followees_track_repost_dict.get( track["track_id"], []) track[response_name_constants. activity_timestamp] = track_repost_dict[ track["track_id"]]["created_at"] for playlist in playlists: playlist[response_name_constants. repost_count] = playlist_repost_counts.get( playlist["playlist_id"], 0) playlist[ response_name_constants.save_count] = playlist_save_counts.get( playlist["playlist_id"], 0) playlist[response_name_constants.has_current_user_reposted] = ( True if requested_user_is_current_user else user_reposted_playlist_ids.get(playlist["playlist_id"], False)) playlist[response_name_constants.has_current_user_saved] = \ user_saved_playlist_dict.get(playlist["playlist_id"], False) playlist[response_name_constants.followee_reposts] = \ followees_playlist_repost_dict.get(playlist["playlist_id"], []) playlist[response_name_constants.activity_timestamp] = \ playlist_repost_dict[playlist["playlist_id"]]["created_at"] unsorted_feed = tracks + playlists # sort feed by repost timestamp desc feed_results = sorted(unsorted_feed, key=lambda entry: entry[response_name_constants. activity_timestamp], reverse=True) return api_helpers.success_response(feed_results)
def generate_trending(session, time, genre, limit, offset, strategy): score_params = strategy.get_score_params() xf = score_params["xf"] pt = score_params["pt"] nm = score_params["nm"] if "nm" in score_params else 1 # Get listen counts listen_counts = get_listen_counts(session, time, genre, limit, offset, nm) track_ids = [ track[response_name_constants.track_id] for track in listen_counts ] # Generate track id -> created_at date track_created_at_dict = { record["track_id"]: record["created_at"] for record in listen_counts } # Query repost counts repost_counts = get_repost_counts(session, False, True, track_ids, None) # Generate track_id --> repost_count mapping track_repost_counts = { repost_item_id: repost_count for (repost_item_id, repost_count, repost_type) in repost_counts if repost_type == RepostType.track } # Query repost count with respect to rolling time frame in URL (e.g. /trending/week -> window = rolling week) track_repost_counts_for_time = get_repost_counts(session, False, True, track_ids, None, None, time) # Generate track_id --> windowed_repost_count mapping track_repost_counts_for_time = { repost_item_id: repost_count for (repost_item_id, repost_count, repost_type) in track_repost_counts_for_time if repost_type == RepostType.track } # Query follower info for each track owner # Query each track owner track_owners_query = (session.query(Track.track_id, Track.owner_id).filter( Track.is_current == True, Track.is_unlisted == False, Track.stem_of == None, Track.track_id.in_(track_ids), )).all() # Generate track_id <-> owner_id mapping track_owner_dict = dict(track_owners_query) # Generate list of owner ids track_owner_list = [ owner_id for (track_id, owner_id) in track_owners_query ] # build dict of owner_id --> follower_count follower_counts = (session.query( Follow.followee_user_id, func.count(Follow.followee_user_id)).filter( Follow.is_current == True, Follow.is_delete == False, Follow.followee_user_id.in_(track_owner_list), ).group_by(Follow.followee_user_id).all()) follower_count_dict = { user_id: follower_count for (user_id, follower_count) in follower_counts if follower_count > pt } # Query save counts save_counts = get_save_counts(session, False, True, track_ids, None) # Generate track_id --> save_count mapping track_save_counts = { save_item_id: save_count for (save_item_id, save_count, save_type) in save_counts if save_type == SaveType.track } # Query save counts with respect to rolling time frame in URL (e.g. /trending/week -> window = rolling week) save_counts_for_time = get_save_counts(session, False, True, track_ids, None, None, time) # Generate track_id --> windowed_save_count mapping track_save_counts_for_time = { save_item_id: save_count for (save_item_id, save_count, save_type) in save_counts_for_time if save_type == SaveType.track } karma_query = get_karma(session, tuple(track_ids), strategy, None, False, xf, strategy) karma_counts_for_id = dict(karma_query) trending_tracks = [] for track_entry in listen_counts: track_id = track_entry[response_name_constants.track_id] # Populate repost counts track_entry[ response_name_constants.repost_count] = track_repost_counts.get( track_id, 0) # Populate repost counts with respect to time track_entry[response_name_constants. windowed_repost_count] = track_repost_counts_for_time.get( track_id, 0) # Populate save counts track_entry[ response_name_constants.save_count] = track_save_counts.get( track_id, 0) # Populate save counts with respect to time track_entry[response_name_constants. windowed_save_count] = track_save_counts_for_time.get( track_id, 0) # Populate owner follower count owner_id = track_owner_dict[track_id] owner_follow_count = follower_count_dict.get(owner_id, 0) track_entry[response_name_constants.track_owner_id] = owner_id track_entry[ response_name_constants.owner_follower_count] = owner_follow_count # Populate created at timestamps if track_id in track_created_at_dict: # datetime needs to be in isoformat for json.dumps() in `update_trending_cache()` to # properly process the dp response and add to redis cache # timespec = specifies additional components of the time to include track_entry[ response_name_constants. created_at] = track_created_at_dict[track_id].isoformat( timespec="seconds") else: track_entry[response_name_constants.created_at] = None track_entry["karma"] = karma_counts_for_id.get(track_id, 0) trending_tracks.append(track_entry) final_resp = {} final_resp["listen_counts"] = trending_tracks return final_resp
def get_scorable_playlist_data(session, time_range, strategy): """Gets data about playlists to be scored. Returns: Array<{ "playlist_id": number "created_at": string "owner_id": string "windowed_save_count": number "save_count": number "repost_count: number, "windowed_repost_count: number "listens": number (always 1) }> """ score_params = strategy.get_score_params() zq = score_params['zq'] xf = score_params['xf'] pt = score_params['pt'] mt = score_params['mt'] delta = time_delta_map.get(time_range) or time_delta_map.get('week') # Get all playlists saved within time range (windowed_save_count): # Queries by Playlists Joined with Saves, # where a given playlist was saved at least once in the past `time_delta`. # Limits to `TRENDING_LIMIT` and sorts by saves for later scoring. playlists = ( session.query( Save.save_item_id, Playlist.created_at, Playlist.playlist_owner_id, func.count(Save.save_item_id)).join( Playlist, Playlist.playlist_id == Save.save_item_id).join( AggregateUser, AggregateUser.user_id == Playlist.playlist_owner_id). filter( Save.is_current == True, Save.is_delete == False, Save.save_type == SaveType.playlist, # Albums are filtered out Save.created_at > datetime.now() - delta, Playlist.is_current == True, Playlist.is_delete == False, Playlist.is_private == False, jsonb_array_length(Playlist.playlist_contents['track_ids']) >= mt, AggregateUser.following_count < zq).group_by( Save.save_item_id, Playlist.created_at, Playlist.playlist_owner_id).order_by( desc(func.count( Save.save_item_id))).limit(TRENDING_LIMIT)).all() # Build up a map of playlist data # playlist_id -> data # Some fields initialized at zero playlist_map = { record[0]: { response_name_constants.playlist_id: record[0], response_name_constants.created_at: record[1].isoformat(timespec='seconds'), response_name_constants.owner_id: record[2], response_name_constants.windowed_save_count: record[3], response_name_constants.save_count: 0, response_name_constants.repost_count: 0, response_name_constants.windowed_repost_count: 0, response_name_constants.owner_follower_count: 0, "karma": 1, "listens": 1, } for record in playlists } playlist_ids = [record[0] for record in playlists] # map owner_id -> [playlist_id], accounting for multiple playlists with the same ID # used in follows playlist_owner_id_map = {} for (playlist_id, _, owner_id, _) in playlists: if owner_id not in playlist_owner_id_map: playlist_owner_id_map[owner_id] = [playlist_id] else: playlist_owner_id_map[owner_id].append(playlist_id) # Add repost counts repost_counts = get_repost_counts(session, False, False, playlist_ids, [RepostType.playlist]) for (playlist_id, repost_count) in repost_counts: playlist_map[playlist_id][ response_name_constants.repost_count] = repost_count # Add windowed repost counts repost_counts_for_time = get_repost_counts(session, False, False, playlist_ids, [RepostType.playlist], None, time_range) for (playlist_id, repost_count) in repost_counts_for_time: playlist_map[playlist_id][ response_name_constants.windowed_repost_count] = repost_count # Add save counts save_counts = get_save_counts(session, False, False, playlist_ids, [SaveType.playlist]) for (playlist_id, save_count) in save_counts: playlist_map[playlist_id][ response_name_constants.save_count] = save_count # Add follower counts follower_counts = (session.query( Follow.followee_user_id, func.count(Follow.followee_user_id)).filter( Follow.is_current == True, Follow.is_delete == False, Follow.followee_user_id.in_(list( playlist_owner_id_map.keys()))).group_by( Follow.followee_user_id).all()) for (followee_user_id, follower_count) in follower_counts: if follower_count >= pt: owned_playlist_ids = playlist_owner_id_map[followee_user_id] for playlist_id in owned_playlist_ids: playlist_map[playlist_id][ response_name_constants. owner_follower_count] = follower_count # Add karma karma_scores = get_karma(session, tuple(playlist_ids), None, True, xf) for (playlist_id, karma) in karma_scores: playlist_map[playlist_id]["karma"] = karma return playlist_map.values()
def generate_trending(db, time, genre, limit, offset): identity_url = shared_config['discprov']['identity_service_url'] identity_trending_endpoint = urljoin(identity_url, f"/tracks/trending/{time}") post_body = {} post_body["limit"] = limit post_body["offset"] = offset # Retrieve genre and query all tracks if required if genre is not None: # Parse encoded characters, such as Hip-Hop%252FRap -> Hip-Hop/Rap genre = unquote(genre) with db.scoped_session() as session: genre_list = get_genre_list(genre) genre_track_ids = ( session.query(Track.track_id) .filter( Track.genre.in_(genre_list), Track.is_current == True, Track.is_delete == False, Track.is_unlisted == False, Track.stem_of == None ) .all() ) genre_specific_track_ids = [record[0] for record in genre_track_ids] post_body["track_ids"] = genre_specific_track_ids # Query trending information from identity service resp = None try: resp = requests.post(identity_trending_endpoint, json=post_body) except Exception as e: # pylint: disable=W0703 logger.error( f'Error retrieving trending info - {identity_trending_endpoint}, {post_body}' ) return api_helpers.error_response(e, 500) json_resp = resp.json() if "error" in json_resp: return api_helpers.error_response(json_resp["error"], 500) listen_counts = json_resp["listenCounts"] # Convert trackId to snakeCase for track_entry in listen_counts: track_entry[response_name_constants.track_id] = track_entry['trackId'] del track_entry['trackId'] track_ids = [track[response_name_constants.track_id] for track in listen_counts] with db.scoped_session() as session: # Filter tracks to not-deleted ones so trending order is preserved not_deleted_track_ids = ( session.query(Track.track_id, Track.created_at) .filter( Track.track_id.in_(track_ids), Track.is_current == True, Track.is_delete == False, Track.is_unlisted == False, Track.stem_of == None ) .all() ) # Generate track -> created_at date track_created_at_dict = { record[0]: record[1] for record in not_deleted_track_ids } not_deleted_track_ids = set([record[0] for record in not_deleted_track_ids]) # pylint: disable=R1718 # Query repost counts repost_counts = get_repost_counts(session, False, True, not_deleted_track_ids, None) # Generate track_id --> repost_count mapping track_repost_counts = { repost_item_id: repost_count for (repost_item_id, repost_count, repost_type) in repost_counts if repost_type == RepostType.track } # Query repost count with respect to rolling time frame in URL (e.g. /trending/week -> window = rolling week) track_repost_counts_for_time = \ get_repost_counts(session, False, True, not_deleted_track_ids, None, None, time) # Generate track_id --> windowed_save_count mapping track_repost_counts_for_time = { repost_item_id: repost_count for (repost_item_id, repost_count, repost_type) in track_repost_counts_for_time if repost_type == RepostType.track } # Query follower info for each track owner # Query each track owner track_owners_query = ( session.query(Track.track_id, Track.owner_id) .filter ( Track.is_current == True, Track.is_unlisted == False, Track.stem_of == None, Track.track_id.in_(not_deleted_track_ids) ) .all() ) # Generate track_id <-> owner_id mapping track_owner_dict = {track_id: owner_id for (track_id, owner_id) in track_owners_query} # Generate list of owner ids track_owner_list = [owner_id for (track_id, owner_id) in track_owners_query] # build dict of owner_id --> follower_count follower_counts = ( session.query( Follow.followee_user_id, func.count(Follow.followee_user_id) ) .filter( Follow.is_current == True, Follow.is_delete == False, Follow.followee_user_id.in_(track_owner_list) ) .group_by(Follow.followee_user_id) .all() ) follower_count_dict = \ {user_id: follower_count for (user_id, follower_count) in follower_counts} # Query save counts save_counts = get_save_counts(session, False, True, not_deleted_track_ids, None) # Generate track_id --> save_count mapping track_save_counts = { save_item_id: save_count for (save_item_id, save_count, save_type) in save_counts if save_type == SaveType.track } # Query save counts with respect to rolling time frame in URL (e.g. /trending/week -> window = rolling week) save_counts_for_time = get_save_counts(session, False, True, not_deleted_track_ids, None, None, time) # Generate track_id --> windowed_save_count mapping track_save_counts_for_time = { save_item_id: save_count for (save_item_id, save_count, save_type) in save_counts_for_time if save_type == SaveType.track } karma_query = get_karma(session, tuple(not_deleted_track_ids)) karma_counts_for_id = {track_id: karma for (track_id, karma) in karma_query} trending_tracks = [] for track_entry in listen_counts: # Skip over deleted tracks if track_entry[response_name_constants.track_id] not in not_deleted_track_ids: continue # Populate repost counts if track_entry[response_name_constants.track_id] in track_repost_counts: track_entry[response_name_constants.repost_count] = \ track_repost_counts[track_entry[response_name_constants.track_id]] else: track_entry[response_name_constants.repost_count] = 0 # Populate repost counts with respect to time if track_entry[response_name_constants.track_id] in track_repost_counts_for_time: track_entry[response_name_constants.windowed_repost_count] = \ track_repost_counts_for_time[track_entry[response_name_constants.track_id]] else: track_entry[response_name_constants.windowed_repost_count] = 0 # Populate save counts if track_entry[response_name_constants.track_id] in track_save_counts: track_entry[response_name_constants.save_count] = \ track_save_counts[track_entry[response_name_constants.track_id]] else: track_entry[response_name_constants.save_count] = 0 # Populate save counts with respect to time if track_entry[response_name_constants.track_id] in track_save_counts_for_time: track_entry[response_name_constants.windowed_save_count] = \ track_save_counts_for_time[track_entry[response_name_constants.track_id]] else: track_entry[response_name_constants.windowed_save_count] = 0 # Populate listen counts owner_id = track_owner_dict[track_entry[response_name_constants.track_id]] owner_follow_count = 0 if owner_id in follower_count_dict: owner_follow_count = follower_count_dict[owner_id] track_entry[response_name_constants.track_owner_id] = owner_id track_entry[response_name_constants.track_owner_follower_count] = owner_follow_count # Populate created at timestamps if track_entry[response_name_constants.track_id] in track_created_at_dict: # datetime needs to be in isoformat for json.dumps() in `update_trending_cache()` to # properly process the dp response and add to redis cache # timespec = specifies additional components of the time to include track_entry[response_name_constants.created_at] = \ track_created_at_dict[track_entry[response_name_constants.track_id]] \ .isoformat(timespec='seconds') else: track_entry[response_name_constants.created_at] = None track_entry["karma"] = karma_counts_for_id[track_entry[response_name_constants.track_id]] \ if track_entry[response_name_constants.track_id] in karma_counts_for_id else 0 trending_tracks.append(track_entry) final_resp = {} final_resp['listen_counts'] = trending_tracks return final_resp
def notifications(): """ Fetches the notifications events that occurred between the given block numbers URL Params: min_block_number: (int) The start block number for querying for notifications max_block_number?: (int) The end block number for querying for notifications track_id?: (Array<int>) Array of track id for fetching the track's owner id and adding the track id to owner user id mapping to the `owners` response field NOTE: this is added for notification for listen counts Response - Json object w/ the following fields notifications: Array of notifications of shape: type: 'Follow' | 'Favorite' | 'Repost' | 'Create' | 'RemixCreate' | 'RemixCosign' blocknumber: (int) blocknumber of notification timestamp: (string) timestamp of notification initiator: (int) the user id that caused this notification metadata?: (any) additional information about the notification entity_id?: (int) the id of the target entity (ie. playlist id of a playlist that is reposted) entity_type?: (string) the type of the target entity entity_owner_id?: (int) the id of the target entity's owner (if applicable) info: Dictionary of metadata w/ min_block_number & max_block_number fields milestones: Dictionary mapping of follows/reposts/favorites (processed within the blocks params) Root fields: follower_counts: Contains a dictionary of user id => follower count (up to the max_block_number) repost_counts: Contains a dictionary tracks/albums/playlists of id to repost count favorite_counts: Contains a dictionary tracks/albums/playlists of id to favorite count owners: Dictionary containing the mapping for track id / playlist id / album -> owner user id The root keys are 'tracks', 'playlists', 'albums' and each contains the id to owner id mapping """ db = get_db_read_replica() min_block_number = request.args.get("min_block_number", type=int) max_block_number = request.args.get("max_block_number", type=int) track_ids_to_owner = [] try: track_ids_str_list = request.args.getlist("track_id") track_ids_to_owner = [int(y) for y in track_ids_str_list] except Exception as e: logger.error(f'Failed to retrieve track list {e}') # Max block number is not explicitly required (yet) if not min_block_number and min_block_number != 0: return api_helpers.error_response({'msg': 'Missing min block number'}, 500) if not max_block_number: max_block_number = min_block_number + max_block_diff elif (max_block_number - min_block_number) > max_block_diff: max_block_number = (min_block_number + max_block_diff) with db.scoped_session() as session: current_block_query = session.query(Block).filter_by(is_current=True) current_block_query_results = current_block_query.all() current_block = current_block_query_results[0] current_max_block_num = current_block.number if current_max_block_num < max_block_number: max_block_number = current_max_block_num notification_metadata = { 'min_block_number': min_block_number, 'max_block_number': max_block_number } # Retrieve milestones statistics milestone_info = {} # Cache owner info for network entities and pass in w/results owner_info = {const.tracks: {}, const.albums: {}, const.playlists: {}} # List of notifications generated from current protocol state notifications_unsorted = [] with db.scoped_session() as session: # Query relevant follow information follow_query = session.query(Follow) # Impose min block number restriction follow_query = follow_query.filter( Follow.is_current == True, Follow.is_delete == False, Follow.blocknumber > min_block_number, Follow.blocknumber <= max_block_number) follow_results = follow_query.all() # Used to retrieve follower counts for this window followed_users = [] # Represents all follow notifications follow_notifications = [] for entry in follow_results: follow_notif = { const.notification_type: const.notification_type_follow, const.notification_blocknumber: entry.blocknumber, const.notification_timestamp: entry.created_at, const.notification_initiator: entry.follower_user_id, const.notification_metadata: { const.notification_follower_id: entry.follower_user_id, const.notification_followee_id: entry.followee_user_id } } follow_notifications.append(follow_notif) # Add every user who gained a new follower followed_users.append(entry.followee_user_id) # Query count for any user w/new followers follower_counts = get_follower_count_dict(session, followed_users, max_block_number) milestone_info['follower_counts'] = follower_counts notifications_unsorted.extend(follow_notifications) # Query relevant favorite information favorites_query = session.query(Save) favorites_query = favorites_query.filter( Save.is_current == True, Save.is_delete == False, Save.blocknumber > min_block_number, Save.blocknumber <= max_block_number) favorite_results = favorites_query.all() # ID lists to query count aggregates favorited_track_ids = [] favorited_album_ids = [] favorited_playlist_ids = [] # List of favorite notifications favorite_notifications = [] favorite_remix_tracks = [] for entry in favorite_results: favorite_notif = { const.notification_type: const.notification_type_favorite, const.notification_blocknumber: entry.blocknumber, const.notification_timestamp: entry.created_at, const.notification_initiator: entry.user_id } save_type = entry.save_type save_item_id = entry.save_item_id metadata = { const.notification_entity_type: save_type, const.notification_entity_id: save_item_id } # NOTE if deleted, the favorite can still exist # TODO: Can we aggregate all owner queries and perform at once...? if save_type == SaveType.track: owner_id = get_owner_id(session, 'track', save_item_id) if not owner_id: continue metadata[const.notification_entity_owner_id] = owner_id favorited_track_ids.append(save_item_id) owner_info[const.tracks][save_item_id] = owner_id favorite_remix_tracks.append({ const.notification_blocknumber: entry.blocknumber, const.notification_timestamp: entry.created_at, 'user_id': entry.user_id, 'item_owner_id': owner_id, 'item_id': save_item_id }) elif save_type == SaveType.album: owner_id = get_owner_id(session, 'album', save_item_id) if not owner_id: continue metadata[const.notification_entity_owner_id] = owner_id favorited_album_ids.append(save_item_id) owner_info[const.albums][save_item_id] = owner_id elif save_type == SaveType.playlist: owner_id = get_owner_id(session, 'playlist', save_item_id) if not owner_id: continue metadata[const.notification_entity_owner_id] = owner_id favorited_playlist_ids.append(save_item_id) owner_info[const.playlists][save_item_id] = owner_id favorite_notif[const.notification_metadata] = metadata favorite_notifications.append(favorite_notif) notifications_unsorted.extend(favorite_notifications) track_favorite_dict = {} album_favorite_dict = {} playlist_favorite_dict = {} if favorited_track_ids: track_favorite_counts = \ get_save_counts(session, False, False, favorited_track_ids, [ SaveType.track], max_block_number) track_favorite_dict = \ {track_id: fave_count for ( track_id, fave_count) in track_favorite_counts} favorite_remix_notifications = get_cosign_remix_notifications( session, max_block_number, favorite_remix_tracks) notifications_unsorted.extend(favorite_remix_notifications) if favorited_album_ids: album_favorite_counts = \ get_save_counts(session, False, False, favorited_album_ids, [ SaveType.album], max_block_number) album_favorite_dict = \ {album_id: fave_count for ( album_id, fave_count) in album_favorite_counts} if favorited_playlist_ids: playlist_favorite_counts = \ get_save_counts(session, False, False, favorited_playlist_ids, [ SaveType.playlist], max_block_number) playlist_favorite_dict = \ {playlist_id: fave_count for ( playlist_id, fave_count) in playlist_favorite_counts} milestone_info[const.notification_favorite_counts] = {} milestone_info[const.notification_favorite_counts][ const.tracks] = track_favorite_dict milestone_info[const.notification_favorite_counts][ const.albums] = album_favorite_dict milestone_info[const.notification_favorite_counts][ const.playlists] = playlist_favorite_dict # # Query relevant repost information # repost_query = session.query(Repost) repost_query = repost_query.filter( Repost.is_current == True, Repost.is_delete == False, Repost.blocknumber > min_block_number, Repost.blocknumber <= max_block_number) repost_results = repost_query.all() # ID lists to query counts reposted_track_ids = [] reposted_album_ids = [] reposted_playlist_ids = [] # List of repost notifications repost_notifications = [] # List of repost notifications repost_remix_notifications = [] repost_remix_tracks = [] for entry in repost_results: repost_notif = { const.notification_type: const.notification_type_repost, const.notification_blocknumber: entry.blocknumber, const.notification_timestamp: entry.created_at, const.notification_initiator: entry.user_id } repost_type = entry.repost_type repost_item_id = entry.repost_item_id metadata = { const.notification_entity_type: repost_type, const.notification_entity_id: repost_item_id } if repost_type == RepostType.track: owner_id = get_owner_id(session, 'track', repost_item_id) if not owner_id: continue metadata[const.notification_entity_owner_id] = owner_id reposted_track_ids.append(repost_item_id) owner_info[const.tracks][repost_item_id] = owner_id repost_remix_tracks.append({ const.notification_blocknumber: entry.blocknumber, const.notification_timestamp: entry.created_at, 'user_id': entry.user_id, 'item_owner_id': owner_id, 'item_id': repost_item_id }) elif repost_type == RepostType.album: owner_id = get_owner_id(session, 'album', repost_item_id) if not owner_id: continue metadata[const.notification_entity_owner_id] = owner_id reposted_album_ids.append(repost_item_id) owner_info[const.albums][repost_item_id] = owner_id elif repost_type == RepostType.playlist: owner_id = get_owner_id(session, 'playlist', repost_item_id) if not owner_id: continue metadata[const.notification_entity_owner_id] = owner_id reposted_playlist_ids.append(repost_item_id) owner_info[const.playlists][repost_item_id] = owner_id repost_notif[const.notification_metadata] = metadata repost_notifications.append(repost_notif) # Append repost notifications notifications_unsorted.extend(repost_notifications) track_repost_count_dict = {} album_repost_count_dict = {} playlist_repost_count_dict = {} # Aggregate repost counts for relevant fields # Used to notify users of entity-specific milestones if reposted_track_ids: track_repost_counts = \ get_repost_counts( session, False, False, reposted_track_ids, [RepostType.track], max_block_number) track_repost_count_dict = \ {track_id: repost_count for (track_id, repost_count) in track_repost_counts} repost_remix_notifications = get_cosign_remix_notifications( session, max_block_number, repost_remix_tracks) notifications_unsorted.extend(repost_remix_notifications) if reposted_album_ids: album_repost_counts = \ get_repost_counts( session, False, False, reposted_album_ids, [RepostType.album], max_block_number) album_repost_count_dict = \ {album_id: repost_count for (album_id, repost_count) in album_repost_counts} if reposted_playlist_ids: playlist_repost_counts = \ get_repost_counts( session, False, False, reposted_playlist_ids, [RepostType.playlist], max_block_number) playlist_repost_count_dict = \ {playlist_id: repost_count for (playlist_id, repost_count) in playlist_repost_counts} milestone_info[const.notification_repost_counts] = {} milestone_info[const.notification_repost_counts][ const.tracks] = track_repost_count_dict milestone_info[const.notification_repost_counts][ const.albums] = album_repost_count_dict milestone_info[const.notification_repost_counts][ const.playlists] = playlist_repost_count_dict # Query relevant created entity notification - tracks/albums/playlists created_notifications = [] # Query relevant created tracks for remix information remix_created_notifications = [] # Aggregate track notifs tracks_query = session.query(Track) # TODO: Is it valid to use Track.is_current here? Might not be the right info... tracks_query = tracks_query.filter( Track.is_unlisted == False, Track.is_delete == False, Track.stem_of == None, Track.blocknumber > min_block_number, Track.blocknumber <= max_block_number) tracks_query = tracks_query.filter( Track.created_at == Track.updated_at) track_results = tracks_query.all() for entry in track_results: track_notif = { const.notification_type: const.notification_type_create, const.notification_blocknumber: entry.blocknumber, const.notification_timestamp: entry.created_at, const.notification_initiator: entry.owner_id, # TODO: is entity owner id necessary for tracks? const.notification_metadata: { const.notification_entity_type: 'track', const.notification_entity_id: entry.track_id, const.notification_entity_owner_id: entry.owner_id } } created_notifications.append(track_notif) if entry.remix_of: # Add notification to remix track owner parent_remix_tracks = [ t['parent_track_id'] for t in entry.remix_of['tracks'] ] remix_track_parents = (session.query( Track.owner_id, Track.track_id).filter( Track.track_id.in_(parent_remix_tracks), Track.is_unlisted == False, Track.is_delete == False, Track.is_current == True).all()) for remix_track_parent in remix_track_parents: [remix_track_parent_owner, remix_track_parent_id] = remix_track_parent remix_notif = { const.notification_type: const.notification_type_remix_create, const.notification_blocknumber: entry.blocknumber, const.notification_timestamp: entry.created_at, const.notification_initiator: entry.owner_id, # TODO: is entity owner id necessary for tracks? const.notification_metadata: { const.notification_entity_type: 'track', const.notification_entity_id: entry.track_id, const.notification_entity_owner_id: entry.owner_id, const.notification_remix_parent_track_user_id: remix_track_parent_owner, const.notification_remix_parent_track_id: remix_track_parent_id } } remix_created_notifications.append(remix_notif) # Handle track update notifications # TODO: Consider switching blocknumber for updated at? updated_tracks_query = session.query(Track) updated_tracks_query = updated_tracks_query.filter( Track.is_unlisted == False, Track.stem_of == None, Track.created_at != Track.updated_at, Track.blocknumber > min_block_number, Track.blocknumber <= max_block_number) updated_tracks = updated_tracks_query.all() for entry in updated_tracks: prev_entry_query = (session.query(Track).filter( Track.track_id == entry.track_id, Track.blocknumber < entry.blocknumber).order_by( desc(Track.blocknumber))) # Previous unlisted entry indicates transition to public, triggering a notification prev_entry = prev_entry_query.first() # Tracks that were unlisted and turned to public if prev_entry.is_unlisted == True: track_notif = { const.notification_type: const.notification_type_create, const.notification_blocknumber: entry.blocknumber, const.notification_timestamp: entry.created_at, const.notification_initiator: entry.owner_id, # TODO: is entity owner id necessary for tracks? const.notification_metadata: { const.notification_entity_type: 'track', const.notification_entity_id: entry.track_id, const.notification_entity_owner_id: entry.owner_id } } created_notifications.append(track_notif) # Tracks that were not remixes and turned into remixes if not prev_entry.remix_of and entry.remix_of: # Add notification to remix track owner parent_remix_tracks = [ t['parent_track_id'] for t in entry.remix_of['tracks'] ] remix_track_parents = (session.query( Track.owner_id, Track.track_id).filter( Track.track_id.in_(parent_remix_tracks), Track.is_unlisted == False, Track.is_delete == False, Track.is_current == True).all()) for remix_track_parent in remix_track_parents: [remix_track_parent_owner, remix_track_parent_id] = remix_track_parent remix_notif = { const.notification_type: const.notification_type_remix_create, const.notification_blocknumber: entry.blocknumber, const.notification_timestamp: entry.created_at, const.notification_initiator: entry.owner_id, # TODO: is entity owner id necessary for tracks? const.notification_metadata: { const.notification_entity_type: 'track', const.notification_entity_id: entry.track_id, const.notification_entity_owner_id: entry.owner_id, const.notification_remix_parent_track_user_id: remix_track_parent_owner, const.notification_remix_parent_track_id: remix_track_parent_id } } remix_created_notifications.append(remix_notif) notifications_unsorted.extend(remix_created_notifications) # Aggregate playlist/album notifs collection_query = session.query(Playlist) # TODO: Is it valid to use is_current here? Might not be the right info... collection_query = collection_query.filter( Playlist.is_delete == False, Playlist.is_private == False, Playlist.blocknumber > min_block_number, Playlist.blocknumber <= max_block_number) collection_query = collection_query.filter( Playlist.created_at == Playlist.updated_at) collection_results = collection_query.all() for entry in collection_results: collection_notif = { const.notification_type: const.notification_type_create, const.notification_blocknumber: entry.blocknumber, const.notification_timestamp: entry.created_at, const.notification_initiator: entry.playlist_owner_id } metadata = { const.notification_entity_id: entry.playlist_id, const.notification_entity_owner_id: entry.playlist_owner_id, const.notification_collection_content: entry.playlist_contents } if entry.is_album: metadata[const.notification_entity_type] = 'album' else: metadata[const.notification_entity_type] = 'playlist' collection_notif[const.notification_metadata] = metadata created_notifications.append(collection_notif) # Playlists that were private and turned to public aka 'published' # TODO: Consider switching blocknumber for updated at? publish_playlists_query = session.query(Playlist) publish_playlists_query = publish_playlists_query.filter( Playlist.is_private == False, Playlist.created_at != Playlist.updated_at, Playlist.blocknumber > min_block_number, Playlist.blocknumber <= max_block_number) publish_playlist_results = publish_playlists_query.all() for entry in publish_playlist_results: prev_entry_query = (session.query(Playlist).filter( Playlist.playlist_id == entry.playlist_id, Playlist.blocknumber < entry.blocknumber).order_by( desc(Playlist.blocknumber))) # Previous private entry indicates transition to public, triggering a notification prev_entry = prev_entry_query.first() if prev_entry.is_private == True: publish_playlist_notif = { const.notification_type: const.notification_type_create, const.notification_blocknumber: entry.blocknumber, const.notification_timestamp: entry.created_at, const.notification_initiator: entry.playlist_owner_id } metadata = { const.notification_entity_id: entry.playlist_id, const.notification_entity_owner_id: entry.playlist_owner_id, const.notification_collection_content: entry.playlist_contents, const.notification_entity_type: 'playlist' } publish_playlist_notif[const.notification_metadata] = metadata created_notifications.append(publish_playlist_notif) notifications_unsorted.extend(created_notifications) # Get additional owner info as requested for listen counts tracks_owner_query = (session.query(Track).filter( Track.is_current == True, Track.track_id.in_(track_ids_to_owner))) track_owner_results = tracks_owner_query.all() for entry in track_owner_results: owner = entry.owner_id track_id = entry.track_id owner_info[const.tracks][track_id] = owner # Final sort - TODO: can we sort by timestamp? sorted_notifications = \ sorted(notifications_unsorted, key=lambda i: i[const.notification_blocknumber], reverse=False) return api_helpers.success_response({ 'notifications': sorted_notifications, 'info': notification_metadata, 'milestones': milestone_info, 'owners': owner_info })
def get_scorable_track_data(session, redis_instance, strategy): """ Returns a map: { "track_id": string "created_at": string "owner_id": number "windowed_save_count": number "save_count": number "repost_count": number "windowed_repost_count": number "owner_follower_count": number "karma": number "listens": number "owner_verified": boolean } """ score_params = strategy.get_score_params() S = score_params['S'] r = score_params['r'] q = score_params['q'] o = score_params['o'] f = score_params['f'] qr = score_params['qr'] xf = score_params['xf'] pt = score_params['pt'] trending_key = make_trending_cache_key("week", None, strategy.version) track_ids = [] old_trending = get_pickled_key(redis_instance, trending_key) if old_trending: track_ids = old_trending[1] exclude_track_ids = track_ids[:qr] # Get followers follower_query = (session.query( Follow.followee_user_id.label('user_id'), User.is_verified.label('is_verified'), func.count(Follow.followee_user_id).label('follower_count')).join( User, User.user_id == Follow.followee_user_id).filter( Follow.is_current == True, Follow.is_delete == False, User.is_current == True, Follow.created_at < (datetime.now() - timedelta(days=f))).group_by( Follow.followee_user_id, User.is_verified)).subquery() base_query = (session.query( AggregatePlays.play_item_id.label('track_id'), follower_query.c.user_id, follower_query.c.follower_count, AggregatePlays.count, Track.created_at, follower_query.c.is_verified).join( Track, Track.track_id == AggregatePlays.play_item_id).join( follower_query, follower_query.c.user_id == Track.owner_id).join( AggregateUser, AggregateUser.user_id == Track.owner_id).filter( Track.is_current == True, Track.is_delete == False, Track.is_unlisted == False, Track.stem_of == None, Track.track_id.notin_(exclude_track_ids), Track.created_at >= (datetime.now() - timedelta(days=o)), follower_query.c.follower_count < S, follower_query.c.follower_count >= pt, AggregateUser.following_count < r, AggregatePlays.count >= q)).all() tracks_map = { record[0]: { "track_id": record[0], "created_at": record[4].isoformat(timespec='seconds'), "owner_id": record[1], "windowed_save_count": 0, "save_count": 0, "repost_count": 0, "windowed_repost_count": 0, "owner_follower_count": record[2], "karma": 1, "listens": record[3], "owner_verified": record[5] } for record in base_query } track_ids = [record[0] for record in base_query] # Get all the extra values repost_counts = get_repost_counts(session, False, False, track_ids, [RepostType.track]) windowed_repost_counts = get_repost_counts(session, False, False, track_ids, [RepostType.track], None, "week") save_counts = get_save_counts(session, False, False, track_ids, [SaveType.track]) windowed_save_counts = get_save_counts(session, False, False, track_ids, [SaveType.track], None, "week") karma_scores = get_karma(session, tuple(track_ids), None, False, xf) # Associate all the extra data for (track_id, repost_count) in repost_counts: tracks_map[track_id]["repost_count"] = repost_count for (track_id, repost_count) in windowed_repost_counts: tracks_map[track_id]["windowed_repost_count"] = repost_count for (track_id, save_count) in save_counts: tracks_map[track_id]["save_count"] = save_count for (track_id, save_count) in windowed_save_counts: tracks_map[track_id]["windowed_save_count"] = save_count for (track_id, karma) in karma_scores: tracks_map[track_id]["karma"] = karma return list(tracks_map.values())
def notifications(): """ Fetches the notifications events that occurred between the given block numbers URL Params: min_block_number: (int) The start block number for querying for notifications max_block_number?: (int) The end block number for querying for notifications track_id?: (Array<int>) Array of track id for fetching the track's owner id and adding the track id to owner user id mapping to the `owners` response field NOTE: this is added for notification for listen counts Response - Json object w/ the following fields notifications: Array of notifications of shape: type: 'Follow' | 'Favorite' | 'Repost' | 'Create' | 'RemixCreate' | 'RemixCosign' | 'PlaylistUpdate' blocknumber: (int) blocknumber of notification timestamp: (string) timestamp of notification initiator: (int) the user id that caused this notification metadata?: (any) additional information about the notification entity_id?: (int) the id of the target entity (ie. playlist id of a playlist that is reposted) entity_type?: (string) the type of the target entity entity_owner_id?: (int) the id of the target entity's owner (if applicable) playlist_update_timestamp?: (string) timestamp of last update of a given playlist playlist_update_users?: (array<int>) user ids which favorited a given playlist info: Dictionary of metadata w/ min_block_number & max_block_number fields milestones: Dictionary mapping of follows/reposts/favorites (processed within the blocks params) Root fields: follower_counts: Contains a dictionary of user id => follower count (up to the max_block_number) repost_counts: Contains a dictionary tracks/albums/playlists of id to repost count favorite_counts: Contains a dictionary tracks/albums/playlists of id to favorite count owners: Dictionary containing the mapping for track id / playlist id / album -> owner user id The root keys are 'tracks', 'playlists', 'albums' and each contains the id to owner id mapping """ db = get_db_read_replica() min_block_number = request.args.get("min_block_number", type=int) max_block_number = request.args.get("max_block_number", type=int) track_ids_to_owner = [] try: track_ids_str_list = request.args.getlist("track_id") track_ids_to_owner = [int(y) for y in track_ids_str_list] except Exception as e: logger.error(f"Failed to retrieve track list {e}") # Max block number is not explicitly required (yet) if not min_block_number and min_block_number != 0: return api_helpers.error_response({"msg": "Missing min block number"}, 400) if not max_block_number: max_block_number = min_block_number + max_block_diff elif (max_block_number - min_block_number) > max_block_diff: max_block_number = min_block_number + max_block_diff with db.scoped_session() as session: current_block_query = session.query(Block).filter_by(is_current=True) current_block_query_results = current_block_query.all() current_block = current_block_query_results[0] current_max_block_num = current_block.number if current_max_block_num < max_block_number: max_block_number = current_max_block_num notification_metadata = { "min_block_number": min_block_number, "max_block_number": max_block_number, } # Retrieve milestones statistics milestone_info = {} # Cache owner info for network entities and pass in w/results owner_info = {const.tracks: {}, const.albums: {}, const.playlists: {}} start_time = datetime.now() logger.info(f"notifications.py | start_time ${start_time}") # List of notifications generated from current protocol state notifications_unsorted = [] with db.scoped_session() as session: # # Query relevant follow information # follow_query = session.query(Follow) # Impose min block number restriction follow_query = follow_query.filter( Follow.is_current == True, Follow.is_delete == False, Follow.blocknumber > min_block_number, Follow.blocknumber <= max_block_number, ) follow_results = follow_query.all() # Used to retrieve follower counts for this window followed_users = [] # Represents all follow notifications follow_notifications = [] for entry in follow_results: follow_notif = { const.notification_type: const.notification_type_follow, const.notification_blocknumber: entry.blocknumber, const.notification_timestamp: entry.created_at, const.notification_initiator: entry.follower_user_id, const.notification_metadata: { const.notification_follower_id: entry.follower_user_id, const.notification_followee_id: entry.followee_user_id, }, } follow_notifications.append(follow_notif) # Add every user who gained a new follower followed_users.append(entry.followee_user_id) # Query count for any user w/new followers follower_counts = get_follower_count_dict( session, followed_users, max_block_number ) milestone_info["follower_counts"] = follower_counts notifications_unsorted.extend(follow_notifications) logger.info(f"notifications.py | followers at {datetime.now() - start_time}") # # Query relevant favorite information # favorites_query = session.query(Save) favorites_query = favorites_query.filter( Save.is_current == True, Save.is_delete == False, Save.blocknumber > min_block_number, Save.blocknumber <= max_block_number, ) favorite_results = favorites_query.all() # ID lists to query count aggregates favorited_track_ids = [] favorited_album_ids = [] favorited_playlist_ids = [] # List of favorite notifications favorite_notifications = [] favorite_remix_tracks = [] for entry in favorite_results: favorite_notif = { const.notification_type: const.notification_type_favorite, const.notification_blocknumber: entry.blocknumber, const.notification_timestamp: entry.created_at, const.notification_initiator: entry.user_id, } save_type = entry.save_type save_item_id = entry.save_item_id metadata = { const.notification_entity_type: save_type, const.notification_entity_id: save_item_id, } # NOTE if deleted, the favorite can still exist # TODO: Can we aggregate all owner queries and perform at once...? if save_type == SaveType.track: owner_id = get_owner_id(session, "track", save_item_id) if not owner_id: continue metadata[const.notification_entity_owner_id] = owner_id favorited_track_ids.append(save_item_id) owner_info[const.tracks][save_item_id] = owner_id favorite_remix_tracks.append( { const.notification_blocknumber: entry.blocknumber, const.notification_timestamp: entry.created_at, "user_id": entry.user_id, "item_owner_id": owner_id, "item_id": save_item_id, } ) elif save_type == SaveType.album: owner_id = get_owner_id(session, "album", save_item_id) if not owner_id: continue metadata[const.notification_entity_owner_id] = owner_id favorited_album_ids.append(save_item_id) owner_info[const.albums][save_item_id] = owner_id elif save_type == SaveType.playlist: owner_id = get_owner_id(session, "playlist", save_item_id) if not owner_id: continue metadata[const.notification_entity_owner_id] = owner_id favorited_playlist_ids.append(save_item_id) owner_info[const.playlists][save_item_id] = owner_id favorite_notif[const.notification_metadata] = metadata favorite_notifications.append(favorite_notif) notifications_unsorted.extend(favorite_notifications) track_favorite_dict = {} album_favorite_dict = {} playlist_favorite_dict = {} if favorited_track_ids: track_favorite_counts = get_save_counts( session, False, False, favorited_track_ids, [SaveType.track], max_block_number, ) track_favorite_dict = dict(track_favorite_counts) favorite_remix_notifications = get_cosign_remix_notifications( session, max_block_number, favorite_remix_tracks ) notifications_unsorted.extend(favorite_remix_notifications) if favorited_album_ids: album_favorite_counts = get_save_counts( session, False, False, favorited_album_ids, [SaveType.album], max_block_number, ) album_favorite_dict = dict(album_favorite_counts) if favorited_playlist_ids: playlist_favorite_counts = get_save_counts( session, False, False, favorited_playlist_ids, [SaveType.playlist], max_block_number, ) playlist_favorite_dict = dict(playlist_favorite_counts) milestone_info[const.notification_favorite_counts] = {} milestone_info[const.notification_favorite_counts][ const.tracks ] = track_favorite_dict milestone_info[const.notification_favorite_counts][ const.albums ] = album_favorite_dict milestone_info[const.notification_favorite_counts][ const.playlists ] = playlist_favorite_dict logger.info(f"notifications.py | favorites at {datetime.now() - start_time}") # # Query relevant tier change information # balance_change_query = session.query(UserBalanceChange) # Impose min block number restriction balance_change_query = balance_change_query.filter( UserBalanceChange.blocknumber > min_block_number, UserBalanceChange.blocknumber <= max_block_number, ) balance_change_results = balance_change_query.all() tier_change_notifications = [] for entry in balance_change_results: prev = int(entry.previous_balance) current = int(entry.current_balance) # Check for a tier change and add to tier_change_notification tier = None if prev < 100000 <= current: tier = "platinum" elif prev < 10000 <= current: tier = "gold" elif prev < 100 <= current: tier = "silver" elif prev < 10 <= current: tier = "bronze" if tier is not None: tier_change_notif = { const.notification_type: const.notification_type_tier_change, const.notification_blocknumber: entry.blocknumber, const.notification_timestamp: datetime.now(), const.notification_initiator: entry.user_id, const.notification_metadata: { const.notification_tier: tier, }, } tier_change_notifications.append(tier_change_notif) notifications_unsorted.extend(tier_change_notifications) logger.info( f"notifications.py | balance change at {datetime.now() - start_time}" ) # # Query relevant repost information # repost_query = session.query(Repost) repost_query = repost_query.filter( Repost.is_current == True, Repost.is_delete == False, Repost.blocknumber > min_block_number, Repost.blocknumber <= max_block_number, ) repost_results = repost_query.all() # ID lists to query counts reposted_track_ids = [] reposted_album_ids = [] reposted_playlist_ids = [] # List of repost notifications repost_notifications = [] # List of repost notifications repost_remix_notifications = [] repost_remix_tracks = [] for entry in repost_results: repost_notif = { const.notification_type: const.notification_type_repost, const.notification_blocknumber: entry.blocknumber, const.notification_timestamp: entry.created_at, const.notification_initiator: entry.user_id, } repost_type = entry.repost_type repost_item_id = entry.repost_item_id metadata = { const.notification_entity_type: repost_type, const.notification_entity_id: repost_item_id, } if repost_type == RepostType.track: owner_id = get_owner_id(session, "track", repost_item_id) if not owner_id: continue metadata[const.notification_entity_owner_id] = owner_id reposted_track_ids.append(repost_item_id) owner_info[const.tracks][repost_item_id] = owner_id repost_remix_tracks.append( { const.notification_blocknumber: entry.blocknumber, const.notification_timestamp: entry.created_at, "user_id": entry.user_id, "item_owner_id": owner_id, "item_id": repost_item_id, } ) elif repost_type == RepostType.album: owner_id = get_owner_id(session, "album", repost_item_id) if not owner_id: continue metadata[const.notification_entity_owner_id] = owner_id reposted_album_ids.append(repost_item_id) owner_info[const.albums][repost_item_id] = owner_id elif repost_type == RepostType.playlist: owner_id = get_owner_id(session, "playlist", repost_item_id) if not owner_id: continue metadata[const.notification_entity_owner_id] = owner_id reposted_playlist_ids.append(repost_item_id) owner_info[const.playlists][repost_item_id] = owner_id repost_notif[const.notification_metadata] = metadata repost_notifications.append(repost_notif) # Append repost notifications notifications_unsorted.extend(repost_notifications) track_repost_count_dict = {} album_repost_count_dict = {} playlist_repost_count_dict = {} # Aggregate repost counts for relevant fields # Used to notify users of entity-specific milestones if reposted_track_ids: track_repost_counts = get_repost_counts( session, False, False, reposted_track_ids, [RepostType.track], max_block_number, ) track_repost_count_dict = dict(track_repost_counts) repost_remix_notifications = get_cosign_remix_notifications( session, max_block_number, repost_remix_tracks ) notifications_unsorted.extend(repost_remix_notifications) if reposted_album_ids: album_repost_counts = get_repost_counts( session, False, False, reposted_album_ids, [RepostType.album], max_block_number, ) album_repost_count_dict = dict(album_repost_counts) if reposted_playlist_ids: playlist_repost_counts = get_repost_counts( session, False, False, reposted_playlist_ids, [RepostType.playlist], max_block_number, ) playlist_repost_count_dict = dict(playlist_repost_counts) milestone_info[const.notification_repost_counts] = {} milestone_info[const.notification_repost_counts][ const.tracks ] = track_repost_count_dict milestone_info[const.notification_repost_counts][ const.albums ] = album_repost_count_dict milestone_info[const.notification_repost_counts][ const.playlists ] = playlist_repost_count_dict # Query relevant created entity notification - tracks/albums/playlists created_notifications = [] logger.info(f"notifications.py | reposts at {datetime.now() - start_time}") # # Query relevant created tracks for remix information # remix_created_notifications = [] # Aggregate track notifs tracks_query = session.query(Track) # TODO: Is it valid to use Track.is_current here? Might not be the right info... tracks_query = tracks_query.filter( Track.is_unlisted == False, Track.is_delete == False, Track.stem_of == None, Track.blocknumber > min_block_number, Track.blocknumber <= max_block_number, ) tracks_query = tracks_query.filter(Track.created_at == Track.updated_at) track_results = tracks_query.all() for entry in track_results: track_notif = { const.notification_type: const.notification_type_create, const.notification_blocknumber: entry.blocknumber, const.notification_timestamp: entry.created_at, const.notification_initiator: entry.owner_id, # TODO: is entity owner id necessary for tracks? const.notification_metadata: { const.notification_entity_type: "track", const.notification_entity_id: entry.track_id, const.notification_entity_owner_id: entry.owner_id, }, } created_notifications.append(track_notif) if entry.remix_of: # Add notification to remix track owner parent_remix_tracks = [ t["parent_track_id"] for t in entry.remix_of["tracks"] ] remix_track_parents = ( session.query(Track.owner_id, Track.track_id) .filter( Track.track_id.in_(parent_remix_tracks), Track.is_unlisted == False, Track.is_delete == False, Track.is_current == True, ) .all() ) for remix_track_parent in remix_track_parents: [ remix_track_parent_owner, remix_track_parent_id, ] = remix_track_parent remix_notif = { const.notification_type: const.notification_type_remix_create, const.notification_blocknumber: entry.blocknumber, const.notification_timestamp: entry.created_at, const.notification_initiator: entry.owner_id, # TODO: is entity owner id necessary for tracks? const.notification_metadata: { const.notification_entity_type: "track", const.notification_entity_id: entry.track_id, const.notification_entity_owner_id: entry.owner_id, const.notification_remix_parent_track_user_id: remix_track_parent_owner, const.notification_remix_parent_track_id: remix_track_parent_id, }, } remix_created_notifications.append(remix_notif) logger.info(f"notifications.py | remixes at {datetime.now() - start_time}") # Handle track update notifications # TODO: Consider switching blocknumber for updated at? updated_tracks_query = session.query(Track) updated_tracks_query = updated_tracks_query.filter( Track.is_unlisted == False, Track.stem_of == None, Track.created_at != Track.updated_at, Track.blocknumber > min_block_number, Track.blocknumber <= max_block_number, ) updated_tracks = updated_tracks_query.all() prev_tracks = get_prev_track_entries(session, updated_tracks) for prev_entry in prev_tracks: entry = next(t for t in updated_tracks if t.track_id == prev_entry.track_id) logger.info( f"notifications.py | single track update {entry.track_id} {entry.blocknumber} {datetime.now() - start_time}" ) # Tracks that were unlisted and turned to public if prev_entry.is_unlisted == True: logger.info( f"notifications.py | single track update to public {datetime.now() - start_time}" ) track_notif = { const.notification_type: const.notification_type_create, const.notification_blocknumber: entry.blocknumber, const.notification_timestamp: entry.created_at, const.notification_initiator: entry.owner_id, # TODO: is entity owner id necessary for tracks? const.notification_metadata: { const.notification_entity_type: "track", const.notification_entity_id: entry.track_id, const.notification_entity_owner_id: entry.owner_id, }, } created_notifications.append(track_notif) # Tracks that were not remixes and turned into remixes if not prev_entry.remix_of and entry.remix_of: # Add notification to remix track owner parent_remix_tracks = [ t["parent_track_id"] for t in entry.remix_of["tracks"] ] remix_track_parents = ( session.query(Track.owner_id, Track.track_id) .filter( Track.track_id.in_(parent_remix_tracks), Track.is_unlisted == False, Track.is_delete == False, Track.is_current == True, ) .all() ) logger.info( f"notifications.py | single track update parents {remix_track_parents} {datetime.now() - start_time}" ) for remix_track_parent in remix_track_parents: [ remix_track_parent_owner, remix_track_parent_id, ] = remix_track_parent remix_notif = { const.notification_type: const.notification_type_remix_create, const.notification_blocknumber: entry.blocknumber, const.notification_timestamp: entry.created_at, const.notification_initiator: entry.owner_id, # TODO: is entity owner id necessary for tracks? const.notification_metadata: { const.notification_entity_type: "track", const.notification_entity_id: entry.track_id, const.notification_entity_owner_id: entry.owner_id, const.notification_remix_parent_track_user_id: remix_track_parent_owner, const.notification_remix_parent_track_id: remix_track_parent_id, }, } remix_created_notifications.append(remix_notif) notifications_unsorted.extend(remix_created_notifications) logger.info( f"notifications.py | track updates at {datetime.now() - start_time}" ) # Aggregate playlist/album notifs collection_query = session.query(Playlist) # TODO: Is it valid to use is_current here? Might not be the right info... collection_query = collection_query.filter( Playlist.is_delete == False, Playlist.is_private == False, Playlist.blocknumber > min_block_number, Playlist.blocknumber <= max_block_number, ) collection_query = collection_query.filter( Playlist.created_at == Playlist.updated_at ) collection_results = collection_query.all() for entry in collection_results: collection_notif = { const.notification_type: const.notification_type_create, const.notification_blocknumber: entry.blocknumber, const.notification_timestamp: entry.created_at, const.notification_initiator: entry.playlist_owner_id, } metadata = { const.notification_entity_id: entry.playlist_id, const.notification_entity_owner_id: entry.playlist_owner_id, const.notification_collection_content: entry.playlist_contents, } if entry.is_album: metadata[const.notification_entity_type] = "album" else: metadata[const.notification_entity_type] = "playlist" collection_notif[const.notification_metadata] = metadata created_notifications.append(collection_notif) # Playlists that were private and turned to public aka 'published' # TODO: Consider switching blocknumber for updated at? publish_playlists_query = session.query(Playlist) publish_playlists_query = publish_playlists_query.filter( Playlist.is_private == False, Playlist.created_at != Playlist.updated_at, Playlist.blocknumber > min_block_number, Playlist.blocknumber <= max_block_number, ) publish_playlist_results = publish_playlists_query.all() for entry in publish_playlist_results: prev_entry_query = ( session.query(Playlist) .filter( Playlist.playlist_id == entry.playlist_id, Playlist.blocknumber < entry.blocknumber, ) .order_by(desc(Playlist.blocknumber)) ) # Previous private entry indicates transition to public, triggering a notification prev_entry = prev_entry_query.first() if prev_entry.is_private == True: publish_playlist_notif = { const.notification_type: const.notification_type_create, const.notification_blocknumber: entry.blocknumber, const.notification_timestamp: entry.created_at, const.notification_initiator: entry.playlist_owner_id, } metadata = { const.notification_entity_id: entry.playlist_id, const.notification_entity_owner_id: entry.playlist_owner_id, const.notification_collection_content: entry.playlist_contents, const.notification_entity_type: "playlist", } publish_playlist_notif[const.notification_metadata] = metadata created_notifications.append(publish_playlist_notif) notifications_unsorted.extend(created_notifications) logger.info(f"notifications.py | playlists at {datetime.now() - start_time}") # Get additional owner info as requested for listen counts tracks_owner_query = session.query(Track).filter( Track.is_current == True, Track.track_id.in_(track_ids_to_owner) ) track_owner_results = tracks_owner_query.all() for entry in track_owner_results: owner = entry.owner_id track_id = entry.track_id owner_info[const.tracks][track_id] = owner logger.info( f"notifications.py | owner info at {datetime.now() - start_time}, owners {len(track_owner_results)}" ) # Get playlist updates today = date.today() thirty_days_ago = today - timedelta(days=30) thirty_days_ago_time = datetime( thirty_days_ago.year, thirty_days_ago.month, thirty_days_ago.day, 0, 0, 0 ) playlist_update_query = session.query(Playlist) playlist_update_query = playlist_update_query.filter( Playlist.is_current == True, Playlist.is_delete == False, Playlist.last_added_to >= thirty_days_ago_time, Playlist.blocknumber > min_block_number, Playlist.blocknumber <= max_block_number, ) playlist_update_results = playlist_update_query.all() logger.info( f"notifications.py | get playlist updates at {datetime.now() - start_time}, playlist updates {len(playlist_update_results)}" ) # Represents all playlist update notifications playlist_update_notifications = [] playlist_update_notifs_by_playlist_id = {} for entry in playlist_update_results: playlist_update_notifs_by_playlist_id[entry.playlist_id] = { const.notification_type: const.notification_type_playlist_update, const.notification_blocknumber: entry.blocknumber, const.notification_timestamp: entry.created_at, const.notification_initiator: entry.playlist_owner_id, const.notification_metadata: { const.notification_entity_id: entry.playlist_id, const.notification_entity_type: "playlist", const.notification_playlist_update_timestamp: entry.last_added_to, }, } # get all favorited playlists # playlists may have been favorited outside the blocknumber bounds # e.g. before the min_block_number playlist_favorites_query = session.query(Save) playlist_favorites_query = playlist_favorites_query.filter( Save.is_current == True, Save.is_delete == False, Save.save_type == SaveType.playlist, Save.save_item_id.in_(playlist_update_notifs_by_playlist_id.keys()), ) playlist_favorites_results = playlist_favorites_query.all() logger.info( f"notifications.py | get playlist favorites {datetime.now() - start_time}, playlist favorites {len(playlist_favorites_results)}" ) # dictionary of playlist id => users that favorited said playlist # e.g. { playlist1: [user1, user2, ...], ... } # we need this dictionary to know which users need to be notified of a playlist update users_that_favorited_playlists_dict = {} for result in playlist_favorites_results: if result.save_item_id in users_that_favorited_playlists_dict: users_that_favorited_playlists_dict[result.save_item_id].append( result.user_id ) else: users_that_favorited_playlists_dict[result.save_item_id] = [ result.user_id ] logger.info( f"notifications.py | computed users that favorited dict {datetime.now() - start_time}" ) for playlist_id in users_that_favorited_playlists_dict: # TODO: We probably do not need this check because we are filtering # playlist_favorites_query to only matching ids if playlist_id not in playlist_update_notifs_by_playlist_id: continue playlist_update_notif = playlist_update_notifs_by_playlist_id[playlist_id] playlist_update_notif[const.notification_metadata].update( { const.notification_playlist_update_users: users_that_favorited_playlists_dict[ playlist_id ] } ) playlist_update_notifications.append(playlist_update_notif) notifications_unsorted.extend(playlist_update_notifications) logger.info( f"notifications.py | all playlist updates at {datetime.now() - start_time}" ) # Final sort - TODO: can we sort by timestamp? sorted_notifications = sorted( notifications_unsorted, key=lambda i: i[const.notification_blocknumber], reverse=False, ) logger.info( f"notifications.py | sorted notifications {datetime.now() - start_time}" ) return api_helpers.success_response( { "notifications": sorted_notifications, "info": notification_metadata, "milestones": milestone_info, "owners": owner_info, } )
def notifications(): db = get_db_read_replica() min_block_number = request.args.get("min_block_number", type=int) max_block_number = request.args.get("max_block_number", type=int) track_ids_to_owner = [] try: track_ids_str_list = request.args.getlist("track_id") track_ids_to_owner = [int(y) for y in track_ids_str_list] except Exception as e: logger.error(f'Failed to retrieve track list {e}') # Max block number is not explicitly required (yet) if not min_block_number and min_block_number != 0: return api_helpers.error_response({'msg': 'Missing min block number'}, 500) if not max_block_number: max_block_number = min_block_number + max_block_diff elif (max_block_number - min_block_number) > max_block_diff: max_block_number = (min_block_number + max_block_diff) with db.scoped_session() as session: current_block_query = session.query(Block).filter_by(is_current=True) current_block_query_results = current_block_query.all() current_block = current_block_query_results[0] current_max_block_num = current_block.number if current_max_block_num < max_block_number: max_block_number = current_max_block_num notification_metadata = { 'min_block_number': min_block_number, 'max_block_number': max_block_number } # Retrieve milestones statistics milestone_info = {} # Cache owner info for network entities and pass in w/results owner_info = {const.tracks: {}, const.albums: {}, const.playlists: {}} # List of notifications generated from current protocol state notifications_unsorted = [] with db.scoped_session() as session: # Query relevant follow information follow_query = session.query(Follow) # Impose min block number restriction follow_query = follow_query.filter( Follow.is_current == True, Follow.is_delete == False, Follow.blocknumber > min_block_number, Follow.blocknumber <= max_block_number) follow_results = follow_query.all() # Used to retrieve follower counts for this window followed_users = [] # Represents all follow notifications follow_notifications = [] for entry in follow_results: follow_notif = { const.notification_type: \ const.notification_type_follow, const.notification_blocknumber: entry.blocknumber, const.notification_timestamp: entry.created_at, const.notification_initiator: entry.follower_user_id, const.notification_metadata: { const.notification_follower_id: entry.follower_user_id, const.notification_followee_id: entry.followee_user_id } } follow_notifications.append(follow_notif) # Add every user who gained a new follower followed_users.append(entry.followee_user_id) # Query count for any user w/new followers follower_counts = get_follower_count_dict(session, followed_users, max_block_number) milestone_info['follower_counts'] = follower_counts notifications_unsorted.extend(follow_notifications) # Query relevant favorite information favorites_query = session.query(Save) favorites_query = favorites_query.filter( Save.is_current == True, Save.is_delete == False, Save.blocknumber > min_block_number, Save.blocknumber <= max_block_number) favorite_results = favorites_query.all() # ID lists to query count aggregates favorited_track_ids = [] favorited_album_ids = [] favorited_playlist_ids = [] # List of favorite notifications favorite_notifications = [] for entry in favorite_results: favorite_notif = { const.notification_type: \ const.notification_type_favorite, const.notification_blocknumber: entry.blocknumber, const.notification_timestamp: entry.created_at, const.notification_initiator: entry.user_id } save_type = entry.save_type save_item_id = entry.save_item_id metadata = { const.notification_entity_type: save_type, const.notification_entity_id: save_item_id } # NOTE if deleted, the favorite can still exist # TODO: Can we aggregate all owner queries and perform at once...? if save_type == SaveType.track: owner_id = get_owner_id(session, 'track', save_item_id) if not owner_id: continue metadata[const.notification_entity_owner_id] = owner_id favorited_track_ids.append(save_item_id) owner_info[const.tracks][save_item_id] = owner_id elif save_type == SaveType.album: owner_id = get_owner_id(session, 'album', save_item_id) if not owner_id: continue metadata[const.notification_entity_owner_id] = owner_id favorited_album_ids.append(save_item_id) owner_info[const.albums][save_item_id] = owner_id elif save_type == SaveType.playlist: owner_id = get_owner_id(session, 'playlist', save_item_id) if not owner_id: continue metadata[const.notification_entity_owner_id] = owner_id favorited_playlist_ids.append(save_item_id) owner_info[const.playlists][save_item_id] = owner_id favorite_notif[const.notification_metadata] = metadata favorite_notifications.append(favorite_notif) notifications_unsorted.extend(favorite_notifications) track_favorite_dict = {} album_favorite_dict = {} playlist_favorite_dict = {} if favorited_track_ids: track_favorite_counts = \ get_save_counts(session, False, False, favorited_track_ids, [SaveType.track], max_block_number) track_favorite_dict = \ {track_id: fave_count for (track_id, fave_count) in track_favorite_counts} if favorited_album_ids: album_favorite_counts = \ get_save_counts(session, False, False, favorited_album_ids, [SaveType.album], max_block_number) album_favorite_dict = \ {album_id: fave_count for (album_id, fave_count) in album_favorite_counts} if favorited_playlist_ids: playlist_favorite_counts = \ get_save_counts(session, False, False, favorited_playlist_ids, [SaveType.playlist], max_block_number) playlist_favorite_dict = \ {playlist_id: fave_count for (playlist_id, fave_count) in playlist_favorite_counts} milestone_info[const.notification_favorite_counts] = {} milestone_info[const.notification_favorite_counts][ const.tracks] = track_favorite_dict milestone_info[const.notification_favorite_counts][ const.albums] = album_favorite_dict milestone_info[const.notification_favorite_counts][ const.playlists] = playlist_favorite_dict # # Query relevant repost information # repost_query = session.query(Repost) repost_query = repost_query.filter( Repost.is_current == True, Repost.is_delete == False, Repost.blocknumber > min_block_number, Repost.blocknumber <= max_block_number) repost_results = repost_query.all() # ID lists to query counts reposted_track_ids = [] reposted_album_ids = [] reposted_playlist_ids = [] # List of repost notifications repost_notifications = [] for entry in repost_results: repost_notif = { const.notification_type: \ const.notification_type_repost, const.notification_blocknumber: entry.blocknumber, const.notification_timestamp: entry.created_at, const.notification_initiator: entry.user_id } repost_type = entry.repost_type repost_item_id = entry.repost_item_id metadata = { const.notification_entity_type: repost_type, const.notification_entity_id: repost_item_id } if repost_type == RepostType.track: owner_id = get_owner_id(session, 'track', repost_item_id) if not owner_id: continue metadata[const.notification_entity_owner_id] = owner_id reposted_track_ids.append(repost_item_id) owner_info[const.tracks][repost_item_id] = owner_id elif repost_type == RepostType.album: owner_id = get_owner_id(session, 'album', repost_item_id) if not owner_id: continue metadata[const.notification_entity_owner_id] = owner_id reposted_album_ids.append(repost_item_id) owner_info[const.albums][repost_item_id] = owner_id elif repost_type == RepostType.playlist: owner_id = get_owner_id(session, 'playlist', repost_item_id) if not owner_id: continue metadata[const.notification_entity_owner_id] = owner_id reposted_playlist_ids.append(repost_item_id) owner_info[const.playlists][repost_item_id] = owner_id repost_notif[const.notification_metadata] = metadata repost_notifications.append(repost_notif) # Append repost notifications notifications_unsorted.extend(repost_notifications) track_repost_count_dict = {} album_repost_count_dict = {} playlist_repost_count_dict = {} # Aggregate repost counts for relevant fields # Used to notify users of entity-specific milestones if reposted_track_ids: track_repost_counts = \ get_repost_counts( session, False, False, reposted_track_ids, [RepostType.track], max_block_number) track_repost_count_dict = \ {track_id: repost_count \ for (track_id, repost_count) in track_repost_counts} if reposted_album_ids: album_repost_counts = \ get_repost_counts( session, False, False, reposted_album_ids, [RepostType.album], max_block_number) album_repost_count_dict = \ {album_id: repost_count \ for (album_id, repost_count) in album_repost_counts} if reposted_playlist_ids: playlist_repost_counts = \ get_repost_counts( session, False, False, reposted_playlist_ids, [RepostType.playlist], max_block_number) playlist_repost_count_dict = \ {playlist_id: repost_count \ for (playlist_id, repost_count) in playlist_repost_counts} milestone_info[const.notification_repost_counts] = {} milestone_info[const.notification_repost_counts][ const.tracks] = track_repost_count_dict milestone_info[const.notification_repost_counts][ const.albums] = album_repost_count_dict milestone_info[const.notification_repost_counts][ const.playlists] = playlist_repost_count_dict # Query relevant created entity notification - tracks/albums/playlists created_notifications = [] # Aggregate track notifs tracks_query = session.query(Track) # TODO: Is it valid to use Track.is_current here? Might not be the right info... tracks_query = tracks_query.filter( Track.is_unlisted == False, Track.is_delete == False, Track.blocknumber > min_block_number, Track.blocknumber <= max_block_number) tracks_query = tracks_query.filter( Track.created_at == Track.updated_at) track_results = tracks_query.all() for entry in track_results: track_notif = { const.notification_type: \ const.notification_type_create, const.notification_blocknumber: entry.blocknumber, const.notification_timestamp: entry.created_at, const.notification_initiator: entry.owner_id, # TODO: is entity owner id necessary for tracks? const.notification_metadata: { const.notification_entity_type: 'track', const.notification_entity_id: entry.track_id, const.notification_entity_owner_id: entry.owner_id } } created_notifications.append(track_notif) # Aggregate playlist/album notifs collection_query = session.query(Playlist) # TODO: Is it valid to use is_current here? Might not be the right info... collection_query = collection_query.filter( Playlist.is_delete == False, Playlist.is_private == False, Playlist.blocknumber > min_block_number, Playlist.blocknumber <= max_block_number) collection_query = collection_query.filter( Playlist.created_at == Playlist.updated_at) collection_results = collection_query.all() for entry in collection_results: collection_notif = { const.notification_type: \ const.notification_type_create, const.notification_blocknumber: entry.blocknumber, const.notification_timestamp: entry.created_at, const.notification_initiator: entry.playlist_owner_id } metadata = { const.notification_entity_id: entry.playlist_id, const.notification_entity_owner_id: entry.playlist_owner_id, const.notification_collection_content: entry.playlist_contents } if entry.is_album: metadata[const.notification_entity_type] = 'album' else: metadata[const.notification_entity_type] = 'playlist' collection_notif[const.notification_metadata] = metadata created_notifications.append(collection_notif) # Playlists that were private and turned to public aka 'published' # TODO: Consider switching blocknumber for updated at? publish_playlists_query = session.query(Playlist) publish_playlists_query = publish_playlists_query.filter( Playlist.is_private == False, Playlist.created_at != Playlist.updated_at, Playlist.blocknumber > min_block_number, Playlist.blocknumber <= max_block_number) publish_playlist_results = publish_playlists_query.all() for entry in publish_playlist_results: prev_entry_query = (session.query(Playlist).filter( Playlist.playlist_id == entry.playlist_id, Playlist.blocknumber < entry.blocknumber).order_by( desc(Playlist.blocknumber))) # Previous private entry indicates transition to public, triggering a notification prev_entry = prev_entry_query.first() if prev_entry.is_private == True: publish_playlist_notif = { const.notification_type: \ const.notification_type_create, const.notification_blocknumber: entry.blocknumber, const.notification_timestamp: entry.created_at, const.notification_initiator: entry.playlist_owner_id } metadata = { const.notification_entity_id: entry.playlist_id, const.notification_entity_owner_id: entry.playlist_owner_id, const.notification_collection_content: entry.playlist_contents, const.notification_entity_type: 'playlist' } publish_playlist_notif[const.notification_metadata] = metadata created_notifications.append(publish_playlist_notif) notifications_unsorted.extend(created_notifications) # Get additional owner info as requested for listen counts tracks_owner_query = (session.query(Track).filter( Track.is_current == True, Track.track_id.in_(track_ids_to_owner))) track_owner_results = tracks_owner_query.all() for entry in track_owner_results: owner = entry.owner_id track_id = entry.track_id owner_info[const.tracks][track_id] = owner # Final sort - TODO: can we sort by timestamp? sorted_notifications = \ sorted(notifications_unsorted, key=lambda i: i[const.notification_blocknumber], reverse=False) return api_helpers.success_response({ 'notifications': sorted_notifications, 'info': notification_metadata, 'milestones': milestone_info, 'owners': owner_info })
def trending(time): identity_url = shared_config['discprov']['identity_service_url'] identity_trending_endpoint = urljoin(identity_url, f"/tracks/trending/{time}") (limit, offset) = get_pagination_vars() queryparams = {} queryparams["limit"] = limit queryparams["offset"] = offset resp = None try: resp = requests.get(identity_trending_endpoint, params=queryparams) except Exception as e: logger.error( f'Error retrieving trending info - {identity_trending_endpoint}, {queryparams}' ) raise e json_resp = resp.json() if "error" in json_resp: return api_helpers.error_response(json_resp["error"], 500) listen_counts = json_resp["listenCounts"] # Convert trackId to snakeCase for track_entry in listen_counts: track_entry[response_name_constants.track_id] = track_entry['trackId'] del track_entry['trackId'] track_ids = [ track[response_name_constants.track_id] for track in listen_counts ] db = get_db() with db.scoped_session() as session: # Filter tracks to not-deleted ones so trending order is preserved not_deleted_track_ids = (session.query(Track.track_id).filter( Track.track_id.in_(track_ids), Track.is_current == True, Track.is_delete == False).all()) not_deleted_track_ids = set( [record[0] for record in not_deleted_track_ids]) # Query repost counts repost_counts = get_repost_counts(session, False, True, not_deleted_track_ids, None) track_repost_counts = { repost_item_id: repost_count for (repost_item_id, repost_count, repost_type) in repost_counts if repost_type == RepostType.track } # Query follower info for each track owner # Query each track owner track_owners_query = (session.query( Track.track_id, Track.owner_id).filter( Track.is_current == True, Track.track_id.in_(not_deleted_track_ids)).all()) # Generate track_id <-> owner_id mapping track_owner_dict = { track_id: owner_id for (track_id, owner_id) in track_owners_query } # Generate list of owner ids track_owner_list = [ owner_id for (track_id, owner_id) in track_owners_query ] # build dict of owner_id --> follower_count follower_counts = (session.query( Follow.followee_user_id, func.count(Follow.followee_user_id)).filter( Follow.is_current == True, Follow.is_delete == False, Follow.followee_user_id.in_(track_owner_list)).group_by( Follow.followee_user_id).all()) follower_count_dict = \ {user_id: follower_count for (user_id, follower_count) in follower_counts} save_counts = get_save_counts(session, False, True, not_deleted_track_ids, None) track_save_counts = { save_item_id: save_count for (save_item_id, save_count, save_type) in save_counts if save_type == SaveType.track } trending = [] for track_entry in listen_counts: # Skip over deleted tracks if (track_entry[response_name_constants.track_id] not in not_deleted_track_ids): continue # Populate repost counts if track_entry[ response_name_constants.track_id] in track_repost_counts: track_entry[response_name_constants.repost_count] = \ track_repost_counts[track_entry[response_name_constants.track_id]] else: track_entry[response_name_constants.repost_count] = 0 # Populate save counts if track_entry[ response_name_constants.track_id] in track_save_counts: track_entry[response_name_constants.save_count] = \ track_save_counts[track_entry[response_name_constants.track_id]] else: track_entry[response_name_constants.save_count] = 0 # Populate listen counts owner_id = track_owner_dict[track_entry[ response_name_constants.track_id]] owner_follow_count = 0 if owner_id in follower_count_dict: owner_follow_count = follower_count_dict[owner_id] track_entry[response_name_constants.track_owner_id] = owner_id track_entry[response_name_constants. track_owner_follower_count] = owner_follow_count trending.append(track_entry) final_resp = {} final_resp['listen_counts'] = trending return api_helpers.success_response(final_resp)