def _get_underground_trending_with_session(
    session: Session,
    args: GetUndergroundTrendingTrackcArgs,
    strategy,
    use_request_context=True,
):
    current_user_id = args.get("current_user_id", None)
    limit, offset = args.get("limit"), args.get("offset")
    key = make_underground_trending_cache_key(strategy.version)

    (tracks, track_ids) = use_redis_cache(
        key, None, make_get_unpopulated_tracks(session, redis_conn, strategy)
    )

    # Apply limit + offset early to reduce the amount of
    # population work we have to do
    if limit is not None and offset is not None:
        track_ids = track_ids[offset : limit + offset]

    tracks = populate_track_metadata(session, track_ids, tracks, current_user_id)

    tracks_map = {track["track_id"]: track for track in tracks}

    # Re-sort the populated tracks b/c it loses sort order in sql query
    sorted_tracks = [tracks_map[track_id] for track_id in track_ids]
    user_id_list = get_users_ids(sorted_tracks)
    users = get_users_by_id(session, user_id_list, current_user_id, use_request_context)
    for track in sorted_tracks:
        user = users[track["owner_id"]]
        if user:
            track["user"] = user
    sorted_tracks = list(map(extend_track, sorted_tracks))
    return sorted_tracks
Ejemplo n.º 2
0
def get_trending_tracks(args, strategy):
    """Gets trending by getting the currently cached tracks and then populating them."""
    db = get_db_read_replica()
    with db.scoped_session() as session:
        current_user_id, genre, time = args.get("current_user_id"), args.get(
            "genre"), args.get("time", "week")
        time_range = "week" if time not in ["week", "month", "year"] else time
        key = make_trending_cache_key(time_range, genre, strategy.version)

        # Will try to hit cached trending from task, falling back
        # to generating it here if necessary and storing it with no TTL
        (tracks, track_ids) = use_redis_cache(
            key, None,
            make_generate_unpopulated_trending(session, genre, time_range,
                                               strategy))

        # populate track metadata
        tracks = populate_track_metadata(session, track_ids, tracks,
                                         current_user_id)
        tracks_map = {track['track_id']: track for track in tracks}

        # Re-sort the populated tracks b/c it loses sort order in sql query
        sorted_tracks = [tracks_map[track_id] for track_id in track_ids]

        if args.get("with_users", False):
            user_id_list = get_users_ids(sorted_tracks)
            users = get_users_by_id(session, user_id_list, current_user_id)
            for track in sorted_tracks:
                user = users[track['owner_id']]
                if user:
                    track['user'] = user
        return sorted_tracks
def _get_underground_trending(args, strategy):
    db = get_db_read_replica()
    with db.scoped_session() as session:
        current_user_id = args.get("current_user_id", None)
        limit, offset = args.get("limit"), args.get("offset")
        key = make_underground_trending_cache_key(strategy.version)

        (tracks, track_ids) = use_redis_cache(
            key, None, make_get_unpopulated_tracks(session, redis, strategy))

        # Apply limit + offset early to reduce the amount of
        # population work we have to do
        if limit is not None and offset is not None:
            track_ids = track_ids[offset:limit + offset]

        tracks = populate_track_metadata(session, track_ids, tracks,
                                         current_user_id)

        tracks_map = {track['track_id']: track for track in tracks}

        # Re-sort the populated tracks b/c it loses sort order in sql query
        sorted_tracks = [tracks_map[track_id] for track_id in track_ids]
        user_id_list = get_users_ids(sorted_tracks)
        users = get_users_by_id(session, user_id_list, current_user_id)
        for track in sorted_tracks:
            user = users[track['owner_id']]
            if user:
                track['user'] = user
        sorted_tracks = list(map(extend_track, sorted_tracks))
        return sorted_tracks
Ejemplo n.º 4
0
def get_full_trending_playlists(request, args, strategy):
    offset, limit = format_offset(args), format_limit(args, TRENDING_LIMIT)
    current_user_id, time = args.get("user_id"), args.get("time", "week")
    time = "week" if time not in ["week", "month", "year"] else time

    # If we have a user_id, we call into `get_trending_playlist`
    # which fetches the cached unpopulated tracks and then
    # populates metadata. Otherwise, just
    # retrieve the last cached value.
    #
    # If current_user_id,
    # apply limit + offset inside the cached calculation.
    # Otherwise, apply it here.
    if current_user_id:
        args = {
            'time': time,
            'with_tracks': True,
            'limit': limit,
            'offset': offset
        }
        decoded = decode_string_id(current_user_id)
        args["current_user_id"] = decoded
        playlists = get_trending_playlists(args, strategy)
    else:
        args = {
            'time': time,
            'with_tracks': True,
        }
        key = get_trending_cache_key(to_dict(request.args), request.path)
        playlists = use_redis_cache(
            key, TRENDING_TTL_SEC,
            lambda: get_trending_playlists(args, strategy))
        playlists = playlists[offset:limit + offset]

    return playlists
Ejemplo n.º 5
0
def get_time_trending(cache_args, time, limit):
    time_params = {**cache_args, 'time':time}
    time_cache_key = extract_key(request_cache_path, time_params.items())
    time_trending = use_redis_cache(time_cache_key, TRENDING_TTL_SEC, lambda: get_trending(time_params))
    time_trending_track_ids = [{"track_id": track['track_id']} for track in time_trending]
    time_trending_track_ids = time_trending_track_ids[:limit]
    return time_trending_track_ids
def get_full_recommended_tracks(request, args, strategy):
    # Attempt to use the cached tracks list
    if args["user_id"] is not None:
        full_recommended = get_recommended_tracks(args, strategy)
    else:
        key = get_trending_cache_key(to_dict(request.args), request.path)
        full_recommended = use_redis_cache(
            key, TRENDING_TTL_SEC,
            lambda: get_recommended_tracks(args, strategy))
    return full_recommended
Ejemplo n.º 7
0
def get_trending_tracks(args):
    limit, offset, current_user_id = args.get("limit"), args.get(
        "offset"), args.get("current_user_id")

    db = get_db_read_replica()

    time = args.get('time')
    query_time = None if time not in ["day", "week", "month", "year"] else time

    with db.scoped_session() as session:

        def get_unpopulated_trending():
            trending_tracks = generate_trending(session, query_time,
                                                args.get('genre', None), limit,
                                                offset)

            track_scores = [
                z(time, track) for track in trending_tracks['listen_counts']
            ]
            sorted_track_scores = sorted(track_scores,
                                         key=lambda k: k['score'],
                                         reverse=True)

            track_ids = [track['track_id'] for track in sorted_track_scores]

            tracks = get_unpopulated_tracks(session, track_ids)
            return (tracks, track_ids)

        # get scored trending tracks, either
        # through cached redis value, or through `get_unpopulated_trending`
        cache_keys = {"genre": args.get("genre"), "time": args.get("time")}
        key = extract_key(f"generated-trending:{request.path}",
                          cache_keys.items())
        (tracks, track_ids) = use_redis_cache(key, SCORES_CACHE_DURATION_SEC,
                                              get_unpopulated_trending)

        # populate track metadata
        tracks = populate_track_metadata(session, track_ids, tracks,
                                         current_user_id)
        tracks_map = {track['track_id']: track for track in tracks}

        # Re-sort the populated tracks b/c it loses sort order in sql query
        sorted_tracks = [tracks_map[track_id] for track_id in track_ids]

        if args.get("with_users", False):
            user_id_list = get_users_ids(sorted_tracks)
            users = get_users_by_id(session, user_id_list, current_user_id)
            for track in sorted_tracks:
                user = users[track['owner_id']]
                if user:
                    track['user'] = user
        return sorted_tracks
Ejemplo n.º 8
0
def get_full_trending(request, args, strategy):
    offset = format_offset(args)
    limit = format_limit(args, TRENDING_LIMIT)
    key = get_trending_cache_key(to_dict(request.args), request.path)

    # Attempt to use the cached tracks list
    if args['user_id'] is not None:
        full_trending = get_trending(args, strategy)
    else:
        full_trending = use_redis_cache(key, TRENDING_TTL_SEC,
                                        lambda: get_trending(args, strategy))
    trending_tracks = full_trending[offset:limit + offset]
    return trending_tracks
Ejemplo n.º 9
0
    def get(self):
        args = full_random_track_parser.parse_args()
        limit = format_limit(args, default_limit=DEFAULT_RANDOM_LIMIT)
        args['limit'] = max(TRENDING_LIMIT, limit)
        key = self.get_cache_key()

        # Attempt to use the cached tracks list
        if args['user_id'] is not None:
            full_random = get_random_tracks(args)
        else:
            full_random = use_redis_cache(key, TRENDING_TTL_SEC,
                                          lambda: get_random_tracks(args))
        random = full_random[:limit]
        return success_response(random)
Ejemplo n.º 10
0
    def get(self):
        args = full_trending_parser.parse_args()
        offset = format_offset(args)
        limit = format_limit(args, TRENDING_LIMIT)
        key = self.get_cache_key()

        # Attempt to use the cached tracks list
        if args['user_id'] is not None:
            full_trending = get_trending(args)
        else:
            full_trending = use_redis_cache(key, TRENDING_TTL_SEC,
                                            lambda: get_trending(args))
        trending = full_trending[offset:limit + offset]
        return success_response(trending)
Ejemplo n.º 11
0
def get_remix_track_parents(args):
    """Fetch remix parents for a given track.

    Args:
        args:dict
        args.track_id: track id
        args.limit: limit
        args.offset: offset
        args.with_users: with users
        args.current_user_id: current user ID
    """
    track_id = args.get("track_id")
    current_user_id = args.get("current_user_id")
    limit = args.get("limit")
    offset = args.get("offset")
    db = get_db_read_replica()

    with db.scoped_session() as session:

        def get_unpopulated_remix_parents():
            base_query = (
                session.query(Track)
                .join(
                    Remix,
                    and_(
                        Remix.parent_track_id == Track.track_id,
                        Remix.child_track_id == track_id,
                    ),
                )
                .filter(Track.is_current == True, Track.is_unlisted == False)
                .order_by(desc(Track.created_at), desc(Track.track_id))
            )

            tracks = add_query_pagination(base_query, limit, offset).all()
            tracks = helpers.query_result_to_list(tracks)
            track_ids = list(map(lambda track: track["track_id"], tracks))
            return (tracks, track_ids)

        key = make_cache_key(args)
        (tracks, track_ids) = use_redis_cache(
            key,
            UNPOPULATED_REMIX_PARENTS_CACHE_DURATION_SEC,
            get_unpopulated_remix_parents,
        )

        tracks = populate_track_metadata(session, track_ids, tracks, current_user_id)
        if args.get("with_users", False):
            add_users_to_tracks(session, tracks, current_user_id)

    return tracks
def get_time_trending(cache_args, time, limit, strategy):
    time_params = {**cache_args, "time": time}

    path = request_cache_path
    if strategy.version != DEFAULT_TRENDING_VERSIONS[TrendingType.TRACKS]:
        path += f"/{strategy.version.value}"

    time_cache_key = extract_key(path, time_params.items())
    time_trending = use_redis_cache(
        time_cache_key, TRENDING_TTL_SEC,
        lambda: get_trending(time_params, strategy))
    time_trending_track_ids = [{
        "track_id": track["track_id"]
    } for track in time_trending]
    time_trending_track_ids = time_trending_track_ids[:limit]
    return time_trending_track_ids
def get_underground_trending(request, args, strategy):
    offset, limit = format_offset(args), format_limit(args, TRENDING_LIMIT)
    current_user_id = args.get("user_id")
    args = {'limit': limit, 'offset': offset}

    # If user ID, let _get_underground_trending
    # handle caching + limit + offset
    if current_user_id:
        decoded = decode_string_id(current_user_id)
        args["current_user_id"] = decoded
        trending = _get_underground_trending(args, strategy)
    else:
        # If no user ID, fetch all cached tracks
        # and perform pagination here, passing
        # no args so we get the full list of tracks.
        key = get_trending_cache_key(to_dict(request.args), request.path)
        trending = use_redis_cache(
            key, TRENDING_TTL_SEC,
            lambda: _get_underground_trending({}, strategy))
        trending = trending[offset:limit + offset]
    return trending
def _get_trending_tracks_with_session(session: Session,
                                      args: GetTrendingTracksArgs,
                                      strategy: BaseTrendingStrategy):
    current_user_id, genre, time = (
        args.get("current_user_id"),
        args.get("genre"),
        args.get("time", "week"),
    )
    time_range = "week" if time not in ["week", "month", "year", "allTime"
                                        ] else time
    key = make_trending_cache_key(time_range, genre, strategy.version)

    # Will try to hit cached trending from task, falling back
    # to generating it here if necessary and storing it with no TTL
    (tracks, track_ids) = use_redis_cache(
        key,
        None,
        make_generate_unpopulated_trending(session, genre, time_range,
                                           strategy),
    )

    # populate track metadata
    tracks = populate_track_metadata(session, track_ids, tracks,
                                     current_user_id)
    tracks_map = {track["track_id"]: track for track in tracks}

    # Re-sort the populated tracks b/c it loses sort order in sql query
    sorted_tracks = [tracks_map[track_id] for track_id in track_ids]

    if args.get("with_users", False):
        user_id_list = get_users_ids(sorted_tracks)
        users = get_users_by_id(session, user_id_list, current_user_id)
        for track in sorted_tracks:
            user = users[track["owner_id"]]
            if user:
                track["user"] = user
    return sorted_tracks
def get_remixes_of(args):
    track_id = args.get("track_id")
    current_user_id = args.get("current_user_id")
    limit, offset = args.get("limit"), args.get("offset")
    db = get_db_read_replica()

    with db.scoped_session() as session:

        def get_unpopulated_remixes():

            # Fetch the parent track to get the track's owner id
            parent_track_res = get_unpopulated_tracks(session, [track_id],
                                                      False, False)

            if not parent_track_res or parent_track_res[0] is None:
                raise exceptions.ArgumentError("Invalid track_id provided")

            parent_track = parent_track_res[0]
            track_owner_id = parent_track['owner_id']

            # Create subquery for save counts for sorting
            save_count_subquery = create_save_count_subquery(
                session, SaveType.track)

            # Create subquery for repost counts for sorting
            repost_count_subquery = create_repost_count_subquery(
                session, RepostType.track)

            # Get the 'children' remix tracks
            # Use the track owner id to fetch reposted/saved tracks returned first
            base_query = (
                session.query(
                    Track
                )
                .join(
                    Remix,
                    and_(
                        Remix.child_track_id == Track.track_id,
                        Remix.parent_track_id == track_id
                    )
                ).outerjoin(
                    Save,
                    and_(
                        Save.save_item_id == Track.track_id,
                        Save.save_type == SaveType.track,
                        Save.is_current == True,
                        Save.is_delete == False,
                        Save.user_id == track_owner_id
                    )
                ).outerjoin(
                    Repost,
                    and_(
                        Repost.repost_item_id == Track.track_id,
                        Repost.user_id == track_owner_id,
                        Repost.repost_type == RepostType.track,
                        Repost.is_current == True,
                        Repost.is_delete == False
                    )
                ).outerjoin(
                    repost_count_subquery,
                    repost_count_subquery.c['id'] == Track.track_id
                ).outerjoin(
                    save_count_subquery,
                    save_count_subquery.c['id'] == Track.track_id
                )
                .filter(
                    Track.is_current == True,
                    Track.is_delete == False,
                    Track.is_unlisted == False
                )
                # 1. Co-signed tracks ordered by save + repost count
                # 2. Other tracks ordered by save + repost count
                .order_by(
                    desc(
                        # If there is no "co-sign" for the track (no repost or save from the parent owner),
                        # defer to secondary sort
                        case(
                            [
                                (and_(Repost.created_at == None,
                                      Save.created_at == None), 0),
                            ],
                            else_=(
                                func.coalesce(repost_count_subquery.c.repost_count, 0) + \
                                func.coalesce(save_count_subquery.c.save_count, 0)
                            )
                        )
                    ),
                    # Order by saves + reposts
                    desc(
                        func.coalesce(repost_count_subquery.c.repost_count, 0) + \
                        func.coalesce(save_count_subquery.c.save_count, 0)
                    ),
                    # Ties, pick latest track id
                    desc(Track.track_id)
                )
            )

            (tracks, count) = add_query_pagination(base_query, limit, offset,
                                                   True, True)
            tracks = tracks.all()
            tracks = helpers.query_result_to_list(tracks)
            track_ids = list(map(lambda track: track["track_id"], tracks))
            return (tracks, track_ids, count)

        key = make_cache_key(args)
        (tracks, track_ids,
         count) = use_redis_cache(key, UNPOPULATED_REMIXES_CACHE_DURATION_SEC,
                                  get_unpopulated_remixes)

        tracks = populate_track_metadata(session, track_ids, tracks,
                                         current_user_id)
        if args.get("with_users", False):
            add_users_to_tracks(session, tracks, current_user_id)

    return {'tracks': tracks, 'count': count}
Ejemplo n.º 16
0
def get_tracks_including_unlisted(args):
    """Fetch a track, allowing unlisted.

    Args:
        args: dict
        args.identifiers: array of { handle, id, url_title} dicts
        args.current_user_id: optional current user ID
        args.filter_deleted: filter deleted tracks
        args.with_users: include users in unlisted tracks
    """
    tracks = []
    identifiers = args["identifiers"]
    for i in identifiers:
        helpers.validate_arguments(i, ["handle", "id", "url_title"])

    current_user_id = args.get("current_user_id")
    db = get_db_read_replica()
    with db.scoped_session() as session:

        def get_unpopulated_track():
            base_query = session.query(Track)
            filter_cond = []

            # Create filter conditions as a list of `and` clauses
            for i in identifiers:
                filter_cond.append(
                    and_(Track.is_current == True, Track.track_id == i["id"]))

            # Pass array of `and` clauses into an `or` clause as destructured *args
            base_query = base_query.filter(or_(*filter_cond))

            # Allow filtering of deletes
            # Note: There is no standard for boolean url parameters, and any value (including 'false')
            # will be evaluated as true, so an explicit check is made for true
            if "filter_deleted" in args:
                filter_deleted = args.get("filter_deleted")
                if filter_deleted:
                    base_query = base_query.filter(Track.is_delete == False)

            # Perform the query
            # TODO: pagination is broken with unlisted tracks
            query_results = paginate_query(base_query).all()
            tracks = helpers.query_result_to_list(query_results)

            # Mapping of track_id -> track object from request;
            # used to check route_id when iterating through identifiers
            identifiers_map = {track["id"]: track for track in identifiers}

            # If the track is unlisted and the generated route_id does not match the route_id in db,
            # filter track out from response
            def filter_fn(track):
                input_track = identifiers_map[track["track_id"]]
                route_id = helpers.create_track_route_id(
                    input_track["url_title"], input_track["handle"])

                return not track["is_unlisted"] or track["route_id"] == route_id

            tracks = list(filter(filter_fn, tracks))

            track_ids = list(map(lambda track: track["track_id"], tracks))
            return (tracks, track_ids)

        key = make_cache_key(args)
        (tracks,
         track_ids) = use_redis_cache(key,
                                      UNPOPULATED_TRACK_CACHE_DURATION_SEC,
                                      get_unpopulated_track)

        # Add users
        if args.get("with_users", False):
            user_id_list = get_users_ids(tracks)
            users = get_users_by_id(session, user_id_list, current_user_id)
            for track in tracks:
                user = users[track["owner_id"]]
                if user:
                    track["user"] = user
        # Populate metadata
        tracks = populate_track_metadata(session, track_ids, tracks,
                                         current_user_id)

    return tracks
Ejemplo n.º 17
0
def get_trending_playlists(args, strategy):
    """Returns Trending Playlists. Checks Redis cache for unpopulated playlists."""
    db = get_db_read_replica()
    with db.scoped_session() as session:
        current_user_id = args.get("current_user_id", None)
        with_tracks = args.get("with_tracks", False)
        time = args.get("time")
        limit, offset = args.get("limit"), args.get("offset")
        key = make_trending_cache_key(time, strategy.version)

        # Get unpopulated playlists,
        # cached if it exists.
        (playlists, playlist_ids) = use_redis_cache(
            key, None, make_get_unpopulated_playlists(session, time, strategy))

        # Apply limit + offset early to reduce the amount of
        # population work we have to do
        if limit is not None and offset is not None:
            playlists = playlists[offset:limit + offset]
            playlist_ids = playlist_ids[offset:limit + offset]

        # Populate playlist metadata
        playlists = populate_playlist_metadata(
            session, playlist_ids, playlists,
            [RepostType.playlist, RepostType.album],
            [SaveType.playlist, SaveType.album], current_user_id)

        trimmed_track_ids = None
        for playlist in playlists:
            playlist["track_count"] = len(playlist["tracks"])
            playlist["tracks"] = playlist["tracks"][:PLAYLIST_TRACKS_LIMIT]
            # Trim track_ids, which ultimately become added_timestamps
            # and need to match the tracks.
            trimmed_track_ids = {
                track["track_id"]
                for track in playlist["tracks"]
            }
            playlist_track_ids = playlist["playlist_contents"]["track_ids"]
            playlist_track_ids = list(
                filter(lambda track_id: track_id["track"] in trimmed_track_ids,
                       playlist_track_ids))
            playlist["playlist_contents"]["track_ids"] = playlist_track_ids

        playlists_map = {
            playlist['playlist_id']: playlist
            for playlist in playlists
        }

        if with_tracks:
            # populate track metadata
            tracks = []
            for playlist in playlists:
                playlist_tracks = playlist["tracks"]
                tracks.extend(playlist_tracks)
            track_ids = [track["track_id"] for track in tracks]
            populated_tracks = populate_track_metadata(session, track_ids,
                                                       tracks, current_user_id)

            # Add users if necessary
            add_users_to_tracks(session, populated_tracks, current_user_id)

            # Re-associate tracks with playlists
            # track_id -> populated_track
            populated_track_map = {
                track["track_id"]: track
                for track in populated_tracks
            }
            for playlist in playlists_map.values():
                for i in range(len(playlist["tracks"])):
                    track_id = playlist["tracks"][i]["track_id"]
                    populated = populated_track_map[track_id]
                    playlist["tracks"][i] = populated
                playlist["tracks"] = list(map(extend_track,
                                              playlist["tracks"]))

        # re-sort playlists to original order, because populate_playlist_metadata
        # unsorts.
        sorted_playlists = [
            playlists_map[playlist_id] for playlist_id in playlist_ids
        ]

        # Add users to playlists
        user_id_list = get_users_ids(sorted_playlists)
        users = get_users_by_id(session, user_id_list, current_user_id)
        for playlist in sorted_playlists:
            user = users[playlist['playlist_owner_id']]
            if user:
                playlist['user'] = user

        # Extend the playlists
        playlists = list(map(extend_playlist, playlists))
        return sorted_playlists
Ejemplo n.º 18
0
def get_playlists(args):
    playlists = []
    current_user_id = args.get("current_user_id")

    db = get_db_read_replica()
    with db.scoped_session() as session:

        def get_unpopulated_playlists():
            playlist_query = (session.query(Playlist).filter(
                Playlist.is_current == True))

            # playlist ids filter if the optional query param is passed in
            if "playlist_id" in args:
                playlist_id_list = args.get("playlist_id")
                try:
                    playlist_query = playlist_query.filter(
                        Playlist.playlist_id.in_(playlist_id_list))
                except ValueError as e:
                    raise exceptions.ArgumentError(
                        "Invalid value found in playlist id list", e)

            if "user_id" in args:
                user_id = args.get("user_id")
                # user id filter if the optional query param is passed in
                playlist_query = playlist_query.filter(
                    Playlist.playlist_owner_id == user_id)

            # If no current_user_id, never show hidden playlists
            if not current_user_id:
                playlist_query = playlist_query.filter(
                    Playlist.is_private == False)

            # Filter out deletes unless we're fetching explicitly by id
            if "playlist_id" not in args:
                playlist_query = playlist_query.filter(
                    Playlist.is_delete == False)

            playlist_query = playlist_query.order_by(desc(Playlist.created_at))
            playlists = paginate_query(playlist_query).all()
            playlists = helpers.query_result_to_list(playlists)

            # if we passed in a current_user_id, filter out all privte playlists where
            # the owner_id doesn't match the current_user_id
            if current_user_id:
                playlists = list(
                    filter(
                        lambda playlist: (not playlist["is_private"]) or
                        playlist["playlist_owner_id"] == current_user_id,
                        playlists))

            # retrieve playlist ids list
            playlist_ids = list(
                map(lambda playlist: playlist["playlist_id"], playlists))

            return (playlists, playlist_ids)

        try:
            # Get unpopulated playlists, either via
            # redis cache or via get_unpopulated_playlists
            key = make_cache_key(args)

            (playlists, playlist_ids) = use_redis_cache(
                key, UNPOPULATED_PLAYLIST_CACHE_DURATION_SEC,
                get_unpopulated_playlists)

            # bundle peripheral info into playlist results
            playlists = populate_playlist_metadata(
                session, playlist_ids, playlists,
                [RepostType.playlist, RepostType.album],
                [SaveType.playlist, SaveType.album], current_user_id)

            if args.get("with_users", False):
                user_id_list = get_users_ids(playlists)
                users = get_users_by_id(session, user_id_list, current_user_id)
                for playlist in playlists:
                    user = users[playlist['playlist_owner_id']]
                    if user:
                        playlist['user'] = user

        except sqlalchemy.orm.exc.NoResultFound:
            pass
    return playlists
Ejemplo n.º 19
0
def refresh_cnodes_from_identity(self):
    registered_cnodes = use_redis_cache(
        'registered_cnodes_from_identity', 30,
        lambda: fetch_cnode_endpoints_from_identity(self))
    return registered_cnodes