Esempio n. 1
0
def validate_arguments(req_args, expected_args):
    if req_args is None:
        raise exceptions.ArgumentError("No arguments present.")
    all_exist = reduce(
        (lambda acc, cur: cur in req_args and acc), expected_args, True)
    if not all_exist:
        raise exceptions.ArgumentError("Not all required arguments exist.")
Esempio n. 2
0
def parse_sort_param(base_query, model, whitelist_sort_params):
    sort = request.args.get("sort")
    if not sort:
        return base_query

    params = sort.split(',')
    try:
        params = {
            param[0]: param[1]
            for param in [p.split(':') for p in params]
        }
    except IndexError:
        raise exceptions.ArgumentError(
            "Need to specify :asc or :desc on all parameters")
    order_bys = []
    for field in params.keys():
        if field not in whitelist_sort_params:
            raise exceptions.ArgumentError('Parameter %s is invalid in sort' %
                                           field)
        attr = getattr(model, field)
        if params[field] == 'desc':
            attr = attr.desc()
        else:
            attr = attr.asc()
        order_bys.append(attr)

    return base_query.order_by(*order_bys)
def get_top_followee_windowed(type, window, args):
    if type != "track":
        raise exceptions.ArgumentError(
            "Invalid type provided, must be one of 'track'")

    valid_windows = ["week", "month", "year"]
    if not window or window not in valid_windows:
        raise exceptions.ArgumentError(
            f"Invalid window provided, must be one of {valid_windows}")

    limit = args.get("limit", 25)

    current_user_id = args.get("user_id")
    db = get_db_read_replica()
    with db.scoped_session() as session:

        followee_user_ids = session.query(Follow.followee_user_id).filter(
            Follow.follower_user_id == current_user_id,
            Follow.is_current == True,
            Follow.is_delete == False,
        )
        followee_user_ids_subquery = followee_user_ids.subquery()

        # Queries for tracks joined against followed users and counts
        tracks_query = (
            session.query(Track, ).join(
                followee_user_ids_subquery,
                Track.owner_id ==
                followee_user_ids_subquery.c.followee_user_id,
            ).join(AggregateTrack, Track.track_id == AggregateTrack.track_id).
            filter(
                Track.is_current == True,
                Track.is_delete == False,
                Track.is_unlisted == False,
                Track.stem_of == None,
                # Query only tracks created `window` time ago (week, month, etc.)
                Track.created_at >= text(f"NOW() - interval '1 {window}'"),
            ).order_by(
                desc(AggregateTrack.repost_count + AggregateTrack.save_count),
                desc(Track.track_id),
            ).limit(limit))

        tracks_query_results = tracks_query.all()
        tracks = helpers.query_result_to_list(tracks_query_results)
        track_ids = list(map(lambda track: track["track_id"], tracks))

        # Bundle peripheral info into track results
        tracks = populate_track_metadata(session, track_ids, tracks,
                                         current_user_id)

        if args.get("with_users", False):
            user_id_list = get_users_ids(tracks)
            users = get_users_by_id(session, user_id_list)
            for track in tracks:
                user = users[track["owner_id"]]
                if user:
                    track["user"] = user

    return tracks
Esempio n. 4
0
def get_current_user_id(required=True):
    user_id_header = 'X-User-ID'
    uid = request.headers.get(user_id_header)
    try:
        if uid:
            uid = int(uid)
    except ValueError:
        raise exceptions.ArgumentError("must be valid integer")
    if required and not uid:
        raise exceptions.ArgumentError(
            "Need to include valid X-User-ID header")

    return uid
Esempio n. 5
0
def _get_trailing_app_metrics(session, args):
    limit, time_range = args.get("limit"), args.get("time_range")

    if time_range == "week":
        query = session.query(AppMetricsTrailingWeek)
        route_query = session.query(RouteMetricsTrailingWeek)
    elif time_range == "month":
        query = session.query(AppMetricsTrailingMonth)
        route_query = session.query(RouteMetricsTrailingMonth)
    elif time_range == "all_time":
        query = session.query(AppMetricsAllTime)
        route_query = session.query(RouteMetricsAllTime)
    else:
        raise exceptions.ArgumentError("Invalid time_range")

    query = (query.order_by(desc('count')).limit(limit).all())

    route_query = route_query.first()

    metrics = list(map(lambda m: {"name": m.name, "count": m.count}, query))

    # add unknown count, inserted sorted by count
    existing_count = reduce(lambda x, y: x + y["count"], metrics, 0)
    unknown_count = route_query.count - existing_count
    for i, metric in enumerate(metrics[:]):
        if unknown_count > metric['count'] or i == len(metrics):
            metrics.insert(i, {
                'name': 'unknown',
                'count': unknown_count,
            })
            break

    return metrics
def get_previously_private_playlists(args):
    db = get_db_read_replica()
    with db.scoped_session() as session:
        if "date" not in args:
            raise exceptions.ArgumentError(
                "'date' required to query for retrieving previously private playlists"
            )

        date = args.get("date")

        playlist_after_date = (session.query(
            Playlist.playlist_id,
            Playlist.updated_at).distinct(Playlist.playlist_id).filter(
                Playlist.is_private == False,
                Playlist.updated_at >= date).subquery())

        playlist_before_date = (session.query(
            Playlist.playlist_id,
            Playlist.updated_at).distinct(Playlist.playlist_id).filter(
                Playlist.is_private == True,
                Playlist.updated_at < date).subquery())

        previously_private_results = session.query(
            playlist_before_date.c['playlist_id']).join(
                playlist_after_date,
                playlist_after_date.c['playlist_id'] ==
                playlist_before_date.c['playlist_id'],
            ).all()

        playlist_ids = [result[0] for result in previously_private_results]

    return {'ids': playlist_ids}
def get_previously_unlisted_tracks(args):
    db = get_db_read_replica()
    with db.scoped_session() as session:
        if "date" not in args:
            raise exceptions.ArgumentError(
                "'date' required to query for retrieving previously unlisted tracks"
            )

        date = args.get("date")

        tracks_after_date = (session.query(
            Track.track_id, Track.updated_at).distinct(Track.track_id).filter(
                Track.is_unlisted == False,
                Track.updated_at >= date).subquery())

        tracks_before_date = (session.query(
            Track.track_id, Track.updated_at).distinct(Track.track_id).filter(
                Track.is_unlisted == True, Track.updated_at < date).subquery())

        previously_unlisted_results = (session.query(
            tracks_before_date.c["track_id"]).join(
                tracks_after_date,
                tracks_after_date.c["track_id"] ==
                tracks_before_date.c["track_id"],
            ).all())

        track_ids = [result[0] for result in previously_unlisted_results]

    return {"ids": track_ids}
Esempio n. 8
0
def get_playlists():
    playlists = []
    current_user_id = get_current_user_id(required=False)
    filter_out_private_playlists = True

    db = get_db()
    with db.scoped_session() as session:
        try:
            playlist_query = (session.query(Playlist).filter(
                Playlist.is_current == True))

            # playlist ids filter if the optional query param is passed in
            if "playlist_id" in request.args:
                playlist_id_str_list = request.args.getlist("playlist_id")
                playlist_id_list = []
                try:
                    playlist_id_list = [int(y) for y in playlist_id_str_list]
                    playlist_query = playlist_query.filter(
                        Playlist.playlist_id.in_(playlist_id_list))
                except ValueError as e:
                    raise exceptions.ArgumentError(
                        "Invalid value found in playlist id list", e)

            if "user_id" in request.args:
                user_id = request.args.get("user_id", type=int)
                # user id filter if the optional query param is passed in
                playlist_query = playlist_query.filter(
                    Playlist.playlist_owner_id == user_id)

                # if the current user is the same as the user passed in through the query param then we're trying
                # to get playlists for, check if the users are the same. if they are the same, the current user is
                # trying to request their own playlists, so allow them to see private playlists
                if current_user_id and user_id and (int(current_user_id)
                                                    == int(user_id)):
                    filter_out_private_playlists = False

            if filter_out_private_playlists:
                playlist_query = playlist_query.filter(
                    Playlist.is_private == False)

            playlist_query = playlist_query.order_by(desc(Playlist.created_at))
            playlists = paginate_query(playlist_query).all()
            playlists = helpers.query_result_to_list(playlists)

            # retrieve playlist ids list
            playlist_ids = list(
                map(lambda playlist: playlist["playlist_id"], playlists))

            current_user_id = get_current_user_id(required=False)

            # bundle peripheral info into playlist results
            playlists = populate_playlist_metadata(
                session, playlist_ids, playlists,
                [RepostType.playlist, RepostType.album],
                [SaveType.playlist, SaveType.album], current_user_id)

        except sqlalchemy.orm.exc.NoResultFound:
            pass

    return api_helpers.success_response(playlists)
Esempio n. 9
0
def search(isAutocomplete):
    searchStr = request.args.get("query", type=str)
    if not searchStr:
        raise exceptions.ArgumentError("Invalid value for parameter 'query'")
    searchStr = searchStr.replace('&', 'and')  # when creating query table, we substitute this too

    (limit, offset) = get_pagination_vars()

    results = {
        'tracks': [],
        'users': [],
        'playlists': [],
        'albums': [],
        'saved_tracks': [],
        'followed_users': [],
        'saved_playlists': [],
        'saved_albums': [],
    }

    if searchStr:
        db = get_db()
        with db.scoped_session() as session:
            results['tracks'] = track_search_query(session, searchStr, limit, offset, False, isAutocomplete)
            results['users'] = user_search_query(session, searchStr, limit, offset, False, isAutocomplete)
            results['playlists'] = playlist_search_query(
                session,
                searchStr,
                limit,
                offset,
                False,
                False,
                isAutocomplete
            )
            results['albums'] = playlist_search_query(session, searchStr, limit, offset, True, False, isAutocomplete)

            results['saved_tracks'] = track_search_query(session, searchStr, limit, offset, True, isAutocomplete)
            results['followed_users'] = user_search_query(session, searchStr, limit, offset, True, isAutocomplete)
            results['saved_playlists'] = playlist_search_query(
                session,
                searchStr,
                limit,
                offset,
                False,
                True,
                isAutocomplete
            )
            results['saved_albums'] = playlist_search_query(
                session,
                searchStr,
                limit,
                offset,
                True,
                True,
                isAutocomplete
            )

    return api_helpers.success_response(results)
Esempio n. 10
0
def get_creator_node_users():
    try:
        if "creator_node_endpoint" not in request.args:
            raise exceptions.ArgumentError("Missing creator_node_endpoint")
        cnode_url = request.args.get("creator_node_endpoint")
        users = get_users_cnode(cnode_url)
        return api_helpers.success_response(users)
    except exceptions.ArgumentError as e:
        return api_helpers.error_response(str(e), 400)
def get_top_followee_saves(saveType, args):
    if saveType != 'track':
        raise exceptions.ArgumentError(
            "Invalid type provided, must be one of 'track'")

    limit = args.get('limit', 25)

    current_user_id = get_current_user_id()
    db = get_db_read_replica()
    with db.scoped_session() as session:
        # Construct a subquery of all followees
        followee_user_ids = (session.query(Follow.followee_user_id).filter(
            Follow.follower_user_id == current_user_id,
            Follow.is_current == True, Follow.is_delete == False))
        followee_user_ids_subquery = followee_user_ids.subquery()

        # Construct a subquery of all saves from followees aggregated by id
        save_count = (session.query(
            Save.save_item_id,
            func.count(Save.save_item_id).label(
                response_name_constants.save_count)).join(
                    followee_user_ids_subquery, Save.user_id ==
                    followee_user_ids_subquery.c.followee_user_id).filter(
                        Save.is_current == True,
                        Save.is_delete == False,
                        Save.save_type == saveType,
                    ).group_by(Save.save_item_id).order_by(
                        desc(response_name_constants.save_count)).limit(limit))
        save_count_subquery = save_count.subquery()

        # Query for tracks joined against followee save counts
        tracks_query = (session.query(Track, ).join(
            save_count_subquery,
            Track.track_id == save_count_subquery.c.save_item_id).filter(
                Track.is_current == True,
                Track.is_delete == False,
                Track.is_unlisted == False,
                Track.stem_of == None,
            ))

        tracks_query_results = tracks_query.all()
        tracks = helpers.query_result_to_list(tracks_query_results)
        track_ids = list(map(lambda track: track['track_id'], tracks))

        # bundle peripheral info into track results
        tracks = populate_track_metadata(session, track_ids, tracks,
                                         current_user_id)

        if args.get('with_users', False):
            user_id_list = get_users_ids(tracks)
            users = get_users_by_id(session, user_id_list)
            for track in tracks:
                user = users[track['owner_id']]
                if user:
                    track['user'] = user

    return tracks
Esempio n. 12
0
def get_saves(save_type):
    save_query_type = None
    if save_type == 'albums':
        save_query_type = SaveType.album
    elif save_type == 'playlists':
        save_query_type = SaveType.playlist
    elif save_type == 'tracks':
        save_query_type = SaveType.track
    else:
        raise exceptions.ArgumentError("Invalid save type provided")

    save_results = []
    current_user_id = get_current_user_id()
    db = get_db()
    with db.scoped_session() as session:
        query = (
            session.query(Save)
            .filter(
                Save.user_id == current_user_id,
                Save.is_current == True,
                Save.is_delete == False,
                Save.save_type == save_query_type
            )
        )
        # filter out saves for deleted entries
        if save_type == 'albums':
            query = query.filter(
                Save.save_item_id.in_(
                    session.query(Playlist.playlist_id).filter(
                        Playlist.is_album == True,
                        Playlist.is_current == True
                    )
                )
            )
        elif save_type == 'playlists':
            query = query.filter(
                Save.save_item_id.in_(
                    session.query(Playlist.playlist_id).filter(
                        Playlist.is_album == False,
                        Playlist.is_current == True
                    )
                )
            )
        elif save_type == 'tracks':
            query = query.filter(
                Save.save_item_id.in_(
                    session.query(Track.track_id).filter(
                        Track.is_current == True
                    )
                )
            )

        query_results = paginate_query(query).all()
        save_results = helpers.query_result_to_list(query_results)
    return api_helpers.success_response(save_results)
Esempio n. 13
0
        def get_users_and_ids():

            can_use_shared_cache = (
                "id" in args and
                "is_creator" not in args and
                "wallet" not in args and
                "min_block_number" not in args and
                "handle" not in args
            )

            if can_use_shared_cache:
                users = get_unpopulated_users(session, args.get("id"))
                ids = list(map(lambda user: user["user_id"], users))
                return (users, ids)

            # Create initial query
            base_query = session.query(User)
            # Don't return the user if they have no wallet or handle (user creation did not finish properly on chain)
            base_query = base_query.filter(
                User.is_current == True, User.wallet != None, User.handle != None)

            # Process filters
            if "is_creator" in args:
                base_query = base_query.filter(User.is_creator == args.get("is_creator"))
            if "wallet" in args:
                wallet = args.get("wallet")
                wallet = wallet.lower()
                if len(wallet) == 42:
                    base_query = base_query.filter_by(wallet=wallet)
                    base_query = base_query.order_by(asc(User.created_at))
                else:
                    logger.warning("Invalid wallet length")
            if "handle" in args:
                handle = args.get("handle").lower()
                base_query = base_query.filter_by(handle_lc=handle)

            # Conditionally process an array of users
            if "id" in args:
                user_id_list = args.get("id")
                try:
                    base_query = base_query.filter(User.user_id.in_(user_id_list))
                except ValueError as e:
                    raise exceptions.ArgumentError(
                        "Invalid value found in user id list", e)
            if "min_block_number" in args:
                base_query = base_query.filter(
                    User.blocknumber >= args.get("min_block_number")
                )
            users = paginate_query(base_query).all()
            users = helpers.query_result_to_list(users)

            user_ids = list(map(lambda user: user["user_id"], users))

            return (users, user_ids)
Esempio n. 14
0
def get_users():
    users = []
    db = get_db()
    with db.scoped_session() as session:
        # Create initial query
        base_query = session.query(User)
        # Don't return the user if they have no wallet or handle (user creation did not finish properly on chain)
        base_query = base_query.filter(User.is_current == True,
                                       User.wallet != None,
                                       User.handle != None)

        # Process filters
        if "is_creator" in request.args:
            is_creator_flag = request.args.get("is_creator") == "true"
            base_query = base_query.filter(User.is_creator == is_creator_flag)
        if "wallet" in request.args:
            wallet = request.args.get("wallet")
            wallet = wallet.lower()
            if len(wallet) == 42:
                base_query = base_query.filter_by(wallet=wallet)
            else:
                logger.warning("Invalid wallet length")
        if "handle" in request.args:
            handle = request.args.get("handle").lower()
            base_query = base_query.filter_by(handle_lc=handle)

        # Conditionally process an array of users
        if "id" in request.args:
            user_id_str_list = request.args.getlist("id")
            user_id_list = []
            try:
                user_id_list = [int(y) for y in user_id_str_list]
                base_query = base_query.filter(User.user_id.in_(user_id_list))
            except ValueError as e:
                raise exceptions.ArgumentError(
                    "Invalid value found in user id list", e)
        if "min_block_number" in request.args:
            min_block_number = request.args.get("min_block_number", type=int)
            base_query = base_query.filter(
                User.blocknumber >= min_block_number)
        users = paginate_query(base_query).all()
        users = helpers.query_result_to_list(users)

        user_ids = list(map(lambda user: user["user_id"], users))

        current_user_id = get_current_user_id(required=False)

        # bundle peripheral info into user results
        users = populate_user_metadata(session, user_ids, users,
                                       current_user_id)

    return api_helpers.success_response(users)
Esempio n. 15
0
def get_sol_play(sol_tx_signature):
    if not sol_tx_signature:
        raise exceptions.ArgumentError("Missing tx signature")

    db = get_db_read_replica()
    sol_play = None
    with db.scoped_session() as session:
        base_query = (session.query(Play).filter(
            Play.signature == sol_tx_signature))
        query_results = base_query.first()
        if query_results:
            sol_play = helpers.model_to_dictionary(query_results)

    return sol_play
Esempio n. 16
0
def _get_aggregate_app_metrics(session, time_range, limit):
    today = date.today()
    seven_days_ago = today - timedelta(days=7)
    thirty_days_ago = today - timedelta(days=30)

    if time_range == "week":
        query = (
            session.query(
                AggregateDailyAppNameMetrics.application_name,
                func.sum(AggregateDailyAppNameMetrics.count).label("count"),
            )
            .filter(seven_days_ago <= AggregateDailyAppNameMetrics.timestamp)
            .filter(AggregateDailyAppNameMetrics.timestamp < today)
            .group_by(AggregateDailyAppNameMetrics.application_name)
            .order_by(desc("count"), asc(AggregateDailyAppNameMetrics.application_name))
            .limit(limit)
            .all()
        )
    elif time_range == "month":
        query = (
            session.query(
                AggregateDailyAppNameMetrics.application_name,
                func.sum(AggregateDailyAppNameMetrics.count).label("count"),
            )
            .filter(thirty_days_ago <= AggregateDailyAppNameMetrics.timestamp)
            .filter(AggregateDailyAppNameMetrics.timestamp < today)
            .group_by(AggregateDailyAppNameMetrics.application_name)
            .order_by(desc("count"), asc(AggregateDailyAppNameMetrics.application_name))
            .limit(limit)
            .all()
        )
    elif time_range == "all_time":
        query = (
            session.query(
                AggregateMonthlyAppNameMetrics.application_name,
                func.sum(AggregateMonthlyAppNameMetrics.count).label("count"),
            )
            .filter(AggregateMonthlyAppNameMetrics.timestamp < today)
            .group_by(AggregateMonthlyAppNameMetrics.application_name)
            .order_by(
                desc("count"), asc(AggregateMonthlyAppNameMetrics.application_name)
            )
            .limit(limit)
            .all()
        )
    else:
        raise exceptions.ArgumentError("Invalid time_range")

    return [{"name": item[0], "count": item[1]} for item in query]
Esempio n. 17
0
        def get_unpopulated_playlists():
            playlist_query = session.query(Playlist).filter(
                Playlist.is_current == True)

            # playlist ids filter if the optional query param is passed in
            if "playlist_id" in args:
                playlist_id_list = args.get("playlist_id")
                try:
                    playlist_query = playlist_query.filter(
                        Playlist.playlist_id.in_(playlist_id_list))
                except ValueError as e:
                    raise exceptions.ArgumentError(
                        "Invalid value found in playlist id list", e)

            if "user_id" in args:
                user_id = args.get("user_id")
                # user id filter if the optional query param is passed in
                playlist_query = playlist_query.filter(
                    Playlist.playlist_owner_id == user_id)

            # If no current_user_id, never show hidden playlists
            if not current_user_id:
                playlist_query = playlist_query.filter(
                    Playlist.is_private == False)

            # Filter out deletes unless we're fetching explicitly by id
            if "playlist_id" not in args:
                playlist_query = playlist_query.filter(
                    Playlist.is_delete == False)

            playlist_query = playlist_query.order_by(desc(Playlist.created_at))
            playlists = paginate_query(playlist_query).all()
            playlists = helpers.query_result_to_list(playlists)

            # if we passed in a current_user_id, filter out all privte playlists where
            # the owner_id doesn't match the current_user_id
            if current_user_id:
                playlists = list(
                    filter(
                        lambda playlist: (not playlist["is_private"]) or
                        playlist["playlist_owner_id"] == current_user_id,
                        playlists,
                    ))

            # retrieve playlist ids list
            playlist_ids = list(
                map(lambda playlist: playlist["playlist_id"], playlists))

            return (playlists, playlist_ids)
Esempio n. 18
0
def get_saves(save_type, user_id):
    save_query_type = None
    if save_type == "albums":
        save_query_type = SaveType.album
    elif save_type == "playlists":
        save_query_type = SaveType.playlist
    elif save_type == "tracks":
        save_query_type = SaveType.track
    else:
        raise exceptions.ArgumentError("Invalid save type provided")

    save_results = []
    db = get_db_read_replica()
    with db.scoped_session() as session:
        query = session.query(Save).filter(
            Save.user_id == user_id,
            Save.is_current == True,
            Save.is_delete == False,
            Save.save_type == save_query_type,
        )
        # filter out saves for deleted entries
        if save_type == "albums":
            query = query.filter(
                Save.save_item_id.in_(
                    session.query(Playlist.playlist_id).filter(
                        Playlist.is_album == True,
                        Playlist.is_current == True)))
        elif save_type == "playlists":
            query = query.filter(
                Save.save_item_id.in_(
                    session.query(Playlist.playlist_id).filter(
                        Playlist.is_album == False,
                        Playlist.is_current == True)))
        elif save_type == "tracks":
            query = query.filter(
                Save.save_item_id.in_(
                    session.query(
                        Track.track_id).filter(Track.is_current == True)))

        query_results = paginate_query(query).all()
        save_results = helpers.query_result_to_list(query_results)

    return save_results
def get_max_id(type):
    if type not in ['track', 'playlist', 'user']:
        raise exceptions.ArgumentError(
            "Invalid type provided, must be one of 'track', 'playlist', 'user'"
        )

    db = get_db_read_replica()
    with db.scoped_session() as session:
        if type == 'track':
            latest = (session.query(func.max(
                Track.track_id)).filter(Track.is_unlisted == False).scalar())
            return latest

        if type == 'playlist':
            latest = (session.query(func.max(Playlist.playlist_id)).filter(
                Playlist.is_private == False).scalar())
            return latest

        # user
        latest = (session.query(func.max(User.user_id)).scalar())
        return latest
def search_tags():
    search_str = request.args.get("query", type=str)
    current_user_id = get_current_user_id(required=False)
    if not search_str:
        raise exceptions.ArgumentError("Invalid value for parameter 'query'")

    user_tag_count = request.args.get("user_tag_count", type=str)
    if not user_tag_count:
        user_tag_count = "2"

    kind = request.args.get("kind", type=str, default="all")
    validSearchKinds = [SearchKind.all, SearchKind.tracks, SearchKind.users]
    try:
        searchKind = SearchKind[kind]
        if searchKind not in validSearchKinds:
            raise Exception
    except Exception:
        return api_helpers.error_response(
            f"Invalid value for parameter 'kind' must be in {[k.name for k in validSearchKinds]}",
            400,
        )

    results = {}

    (limit, offset) = get_pagination_vars()
    db = get_db_read_replica()
    with db.scoped_session() as session:
        if searchKind in [SearchKind.all, SearchKind.tracks]:
            results["tracks"] = search_track_tags(
                session,
                {
                    "search_str": search_str,
                    "current_user_id": current_user_id,
                    "limit": limit,
                    "offset": offset,
                },
            )

        if searchKind in [SearchKind.all, SearchKind.users]:
            results["users"] = search_user_tags(
                session,
                {
                    "search_str": search_str,
                    "current_user_id": current_user_id,
                    "user_tag_count": user_tag_count,
                    "limit": limit,
                    "offset": offset,
                },
            )

    # Add personalized results for a given user
    if current_user_id:
        if searchKind in [SearchKind.all, SearchKind.tracks]:
            # Query saved tracks for the current user that contain this tag
            track_ids = [track["track_id"] for track in results["tracks"]]

            saves_query = (
                session.query(Save.save_item_id)
                .filter(
                    Save.is_current == True,
                    Save.is_delete == False,
                    Save.save_type == SaveType.track,
                    Save.user_id == current_user_id,
                    Save.save_item_id.in_(track_ids),
                )
                .all()
            )
            saved_track_ids = {i[0] for i in saves_query}
            saved_tracks = list(
                filter(
                    lambda track: track["track_id"] in saved_track_ids,
                    results["tracks"],
                )
            )
            results["saved_tracks"] = saved_tracks

        if searchKind in [SearchKind.all, SearchKind.users]:
            # Query followed users that have referenced this tag
            user_ids = [user["user_id"] for user in results["users"]]
            followed_user_query = (
                session.query(Follow.followee_user_id)
                .filter(
                    Follow.is_current == True,
                    Follow.is_delete == False,
                    Follow.follower_user_id == current_user_id,
                    Follow.followee_user_id.in_(user_ids),
                )
                .all()
            )
            followed_user_ids = {i[0] for i in followed_user_query}
            followed_users = list(
                filter(
                    lambda user: user["user_id"] in followed_user_ids, results["users"]
                )
            )
            results["followed_users"] = followed_users

    return api_helpers.success_response(results)
def get_users_account(args):
    db = get_db_read_replica()
    with db.scoped_session() as session:
        # Create initial query
        base_query = session.query(User)
        # Don't return the user if they have no wallet or handle (user creation did not finish properly on chain)
        base_query = base_query.filter(
            User.is_current == True, User.wallet != None, User.handle != None)

        if "wallet" not in args:
            raise exceptions.ArgumentError("Missing wallet param")

        wallet = args.get("wallet")
        wallet = wallet.lower()
        if len(wallet) == 42:
            base_query = base_query.filter_by(wallet=wallet)
            base_query = base_query.order_by(asc(User.created_at))
        else:
            raise exceptions.ArgumentError("Invalid wallet length")

        # If user cannot be found, exit early and return empty response
        user = base_query.first()
        if not user:
            return None

        user = helpers.model_to_dictionary(user)
        user_id = user['user_id']

        # bundle peripheral info into user results
        users = populate_user_metadata(
            session, [user_id], [user], user_id, True)
        user = users[0]

        # Get saved playlists / albums ids
        saved_query = session.query(Save.save_item_id).filter(
            Save.user_id == user_id,
            Save.is_current == True,
            Save.is_delete == False,
            or_(Save.save_type == SaveType.playlist,
                Save.save_type == SaveType.album)
        )

        saved_query_results = saved_query.all()
        save_collection_ids = [item[0] for item in saved_query_results]

        # Get Playlist/Albums saved or owned by the user
        playlist_query = session.query(Playlist).filter(
            or_(
                and_(Playlist.is_current == True, Playlist.is_delete ==
                     False, Playlist.playlist_owner_id == user_id),
                and_(Playlist.is_current == True, Playlist.is_delete ==
                     False, Playlist.playlist_id.in_(save_collection_ids))
            )
        ).order_by(desc(Playlist.created_at))
        playlists = playlist_query.all()
        playlists = helpers.query_result_to_list(playlists)

        playlist_owner_ids = list(
            set([playlist['playlist_owner_id'] for playlist in playlists]))

        # Get Users for the Playlist/Albums
        user_query = session.query(User).filter(
            and_(User.is_current == True, User.user_id.in_(playlist_owner_ids))
        )
        users = user_query.all()
        users = helpers.query_result_to_list(users)
        user_map = {}

        stripped_playlists = []
        # Map the users to the playlists/albums
        for playlist_owner in users:
            user_map[playlist_owner['user_id']] = playlist_owner
        for playlist in playlists:
            playlist_owner = user_map[playlist['playlist_owner_id']]
            stripped_playlists.append({
                'id': playlist['playlist_id'],
                'name': playlist['playlist_name'],
                'is_album': playlist['is_album'],
                'user': {'id': playlist_owner['user_id'], 'handle': playlist_owner['handle']}
            })
        user['playlists'] = stripped_playlists

    return user
Esempio n. 22
0
def get_top_followee_windowed(type, window, args):
    if type != 'track':
        raise exceptions.ArgumentError(
            "Invalid type provided, must be one of 'track'"
        )

    valid_windows = ['week', 'month', 'year']
    if not window or window not in valid_windows:
        raise exceptions.ArgumentError(
            "Invalid window provided, must be one of {}".format(valid_windows)
        )

    limit = args.get('limit', 25)

    current_user_id = get_current_user_id()
    db = get_db_read_replica()
    with db.scoped_session() as session:
        # Construct a subquery to get the summed save + repost count for the `type`
        count_subquery = create_save_repost_count_subquery(session, type)

        followee_user_ids = (
            session.query(Follow.followee_user_id)
            .filter(
                Follow.follower_user_id == current_user_id,
                Follow.is_current == True,
                Follow.is_delete == False
            )
        )
        followee_user_ids_subquery = followee_user_ids.subquery()

        # Queries for tracks joined against followed users and counts
        tracks_query = (
            session.query(
                Track,
            )
            .join(
                followee_user_ids_subquery,
                Track.owner_id == followee_user_ids_subquery.c.followee_user_id
            )
            .join(
                count_subquery,
                Track.track_id == count_subquery.c['id']
            )
            .filter(
                Track.is_current == True,
                Track.is_delete == False,
                Track.is_unlisted == False,
                Track.stem_of == None,
                # Query only tracks created `window` time ago (week, month, etc.)
                Track.created_at >= text(
                    "NOW() - interval '1 {}'".format(window)),
            )
            .order_by(
                desc(count_subquery.c['count']),
                desc(Track.track_id)
            )
            .limit(limit)
        )

        tracks_query_results = tracks_query.all()
        tracks = helpers.query_result_to_list(tracks_query_results)
        track_ids = list(map(lambda track: track['track_id'], tracks))

        # Bundle peripheral info into track results
        tracks = populate_track_metadata(
            session, track_ids, tracks, current_user_id)

        if args.get('with_users', False):
            user_id_list = get_users_ids(tracks)
            users = get_users_by_id(session, user_id_list)
            for track in tracks:
                user = users[track['owner_id']]
                if user:
                    track['user'] = user

    return tracks
        def get_unpopulated_remixes():

            # Fetch the parent track to get the track's owner id
            parent_track_res = get_unpopulated_tracks(session, [track_id],
                                                      False, False)

            if not parent_track_res or parent_track_res[0] is None:
                raise exceptions.ArgumentError("Invalid track_id provided")

            parent_track = parent_track_res[0]
            track_owner_id = parent_track['owner_id']

            # Create subquery for save counts for sorting
            save_count_subquery = create_save_count_subquery(
                session, SaveType.track)

            # Create subquery for repost counts for sorting
            repost_count_subquery = create_repost_count_subquery(
                session, RepostType.track)

            # Get the 'children' remix tracks
            # Use the track owner id to fetch reposted/saved tracks returned first
            base_query = (
                session.query(
                    Track
                )
                .join(
                    Remix,
                    and_(
                        Remix.child_track_id == Track.track_id,
                        Remix.parent_track_id == track_id
                    )
                ).outerjoin(
                    Save,
                    and_(
                        Save.save_item_id == Track.track_id,
                        Save.save_type == SaveType.track,
                        Save.is_current == True,
                        Save.is_delete == False,
                        Save.user_id == track_owner_id
                    )
                ).outerjoin(
                    Repost,
                    and_(
                        Repost.repost_item_id == Track.track_id,
                        Repost.user_id == track_owner_id,
                        Repost.repost_type == RepostType.track,
                        Repost.is_current == True,
                        Repost.is_delete == False
                    )
                ).outerjoin(
                    repost_count_subquery,
                    repost_count_subquery.c['id'] == Track.track_id
                ).outerjoin(
                    save_count_subquery,
                    save_count_subquery.c['id'] == Track.track_id
                )
                .filter(
                    Track.is_current == True,
                    Track.is_delete == False,
                    Track.is_unlisted == False
                )
                # 1. Co-signed tracks ordered by save + repost count
                # 2. Other tracks ordered by save + repost count
                .order_by(
                    desc(
                        # If there is no "co-sign" for the track (no repost or save from the parent owner),
                        # defer to secondary sort
                        case(
                            [
                                (and_(Repost.created_at == None,
                                      Save.created_at == None), 0),
                            ],
                            else_=(
                                func.coalesce(repost_count_subquery.c.repost_count, 0) + \
                                func.coalesce(save_count_subquery.c.save_count, 0)
                            )
                        )
                    ),
                    # Order by saves + reposts
                    desc(
                        func.coalesce(repost_count_subquery.c.repost_count, 0) + \
                        func.coalesce(save_count_subquery.c.save_count, 0)
                    ),
                    # Ties, pick latest track id
                    desc(Track.track_id)
                )
            )

            (tracks, count) = add_query_pagination(base_query, limit, offset,
                                                   True, True)
            tracks = tracks.all()
            tracks = helpers.query_result_to_list(tracks)
            track_ids = list(map(lambda track: track["track_id"], tracks))
            return (tracks, track_ids, count)
def get_cid_source(cid):
    """
    Returns the CID source (e.g. CID is a metadata hash, a cover photo, a track segment, etc.)

    Args: the observed CID
    """
    if cid is None:
        raise exceptions.ArgumentError("Input CID is invalid")

    have_lock = False
    update_lock = redis.lock("get_cid_source_lock", blocking_timeout=25)

    try:
        # Attempt to acquire lock - do not block if unable to acquire
        have_lock = update_lock.acquire(blocking=False)
        response = []
        if have_lock:
            db = db_session.get_db_read_replica()
            with db.scoped_session() as session:
                # Check to see if CID is of any type but a segment
                cid_source_res = sqlalchemy.text("""
                    WITH cid_const AS (VALUES (:cid))
                    SELECT * FROM
                    (
                        (
                            SELECT
                                "user_id" as "id",
                                'users' as "table_name",
                                'metadata_multihash' as "type",
                                "is_current"
                            FROM "users" WHERE (table cid_const) = "metadata_multihash"
                        )
                        UNION ALL
                        (
                            SELECT
                                "user_id" as "id",
                                'users' as "table_name",
                                'profile_cover_images' as "type",
                                "is_current"
                            FROM
                                "users"
                            WHERE
                                (table cid_const) in (
                                    "profile_picture",
                                    "cover_photo",
                                    "profile_picture_sizes",
                                    "cover_photo_sizes"
                                )
                        )
                        UNION ALL
                        (
                                SELECT
                                "playlist_id" as "id",
                                'playlists' as "table_name",
                                'playlist_image_multihash' as "type",
                                "is_current"
                                FROM
                                    "playlists"
                                WHERE
                                    (table cid_const) in (
                                        "playlist_image_sizes_multihash",
                                        "playlist_image_multihash"
                                    )
                        )
                        UNION ALL
                        (
                            SELECT
                                "track_id" as "id",
                                'tracks' as "table_name",
                                'track_metadata' as "type",
                                "is_current"
                            FROM
                                "tracks"
                            WHERE
                                (table cid_const) = "metadata_multihash"
                        )
                        UNION ALL
                        (
                            SELECT
                                "track_id" as "id",
                                'tracks' as "table_name",
                                'cover_art_size' as "type",
                                "is_current"
                            FROM
                                "tracks"
                            WHERE
                                (table cid_const) = "cover_art_sizes"
                        )
                    ) as "outer"
                    """)
                cid_source = session.execute(cid_source_res, {
                    "cid": cid
                }).fetchall()

                # If something is found, set response
                if len(cid_source) != 0:
                    response = [dict(row) for row in cid_source]

                # If CID was not found, check to see if it is a type segment
                if len(response) == 0:
                    cid_source_res = sqlalchemy.text("""
                        WITH cid_const AS (VALUES (:cid))
                            SELECT
                                "track_id" as "id",
                                'tracks' as "table_name",
                                'segment' as "type",
                                "is_current"
                            FROM
                                (
                                    SELECT
                                        jb -> 'duration' as "d",
                                        jb -> 'multihash' :: varchar as "cid",
                                        "track_id",
                                        "is_current"
                                    FROM
                                        (
                                            SELECT
                                                jsonb_array_elements("track_segments") as "jb",
                                                "track_id",
                                                "is_current"
                                            FROM
                                                "tracks"
                                        ) as a
                                ) as a2
                            WHERE
                                "cid" ? (table cid_const)
                        """)

                    cid_source = session.execute(cid_source_res, {
                        "cid": cid
                    }).fetchall()

                    # If something is found, set response
                    if len(cid_source) != 0:
                        response = [dict(row) for row in cid_source]
        else:
            logger.warning(
                "get_cid_source | Failed to acquire get_cid_source_lock")

        return response
    except Exception as e:
        logger.error("get_cid_source | Error with query: %s", exc_info=True)
        raise e
    finally:
        if have_lock:
            update_lock.release()
def _get_aggregate_route_metrics(session, time_range, bucket_size):
    today = date.today()
    seven_days_ago = today - timedelta(days=7)
    thirty_days_ago = today - timedelta(days=30)
    first_day_of_month = today.replace(day=1)

    if time_range == "week":
        if bucket_size == "day":
            unique_counts = (
                session.query(
                    AggregateDailyUniqueUsersMetrics.timestamp,
                    AggregateDailyUniqueUsersMetrics.count,
                    AggregateDailyUniqueUsersMetrics.summed_count,
                )
                .filter(seven_days_ago <= AggregateDailyUniqueUsersMetrics.timestamp)
                .filter(AggregateDailyUniqueUsersMetrics.timestamp < today)
                .order_by(asc("timestamp"))
                .all()
            )
            unique_count_records = ft.reduce(
                lambda acc, curr: acc.update(
                    {str(curr[0]): {"unique": curr[1], "summed_unique": curr[2] or 0}}
                )
                or acc,
                unique_counts,
                {},
            )

            total_counts = (
                session.query(
                    AggregateDailyTotalUsersMetrics.timestamp,
                    AggregateDailyTotalUsersMetrics.count,
                )
                .filter(seven_days_ago <= AggregateDailyTotalUsersMetrics.timestamp)
                .filter(AggregateDailyTotalUsersMetrics.timestamp < today)
                .order_by(asc("timestamp"))
                .all()
            )
            total_count_records = ft.reduce(
                lambda acc, curr: acc.update({str(curr[0]): curr[1]}) or acc,
                total_counts,
                {},
            )

            metrics = []
            for timestamp, counts in unique_count_records.items():
                if timestamp in total_count_records:
                    metrics.append(
                        {
                            "timestamp": timestamp,
                            "unique_count": counts["unique"],
                            "summed_unique_count": counts["summed_unique"],
                            "total_count": total_count_records[timestamp],
                        }
                    )
            return metrics
        raise exceptions.ArgumentError("Invalid bucket_size for time_range")
    if time_range == "month":
        if bucket_size == "day":
            unique_counts = (
                session.query(
                    AggregateDailyUniqueUsersMetrics.timestamp,
                    AggregateDailyUniqueUsersMetrics.count,
                    AggregateDailyUniqueUsersMetrics.summed_count,
                )
                .filter(thirty_days_ago <= AggregateDailyUniqueUsersMetrics.timestamp)
                .filter(AggregateDailyUniqueUsersMetrics.timestamp < today)
                .order_by(asc("timestamp"))
                .all()
            )
            unique_count_records = ft.reduce(
                lambda acc, curr: acc.update(
                    {str(curr[0]): {"unique": curr[1], "summed_unique": curr[2] or 0}}
                )
                or acc,
                unique_counts,
                {},
            )

            total_counts = (
                session.query(
                    AggregateDailyTotalUsersMetrics.timestamp,
                    AggregateDailyTotalUsersMetrics.count,
                )
                .filter(thirty_days_ago <= AggregateDailyTotalUsersMetrics.timestamp)
                .filter(AggregateDailyTotalUsersMetrics.timestamp < today)
                .order_by(asc("timestamp"))
                .all()
            )
            total_count_records = ft.reduce(
                lambda acc, curr: acc.update({str(curr[0]): curr[1]}) or acc,
                total_counts,
                {},
            )

            metrics = []
            for timestamp, counts in unique_count_records.items():
                if timestamp in total_count_records:
                    metrics.append(
                        {
                            "timestamp": timestamp,
                            "unique_count": counts["unique"],
                            "summed_unique_count": counts["summed_unique"],
                            "total_count": total_count_records[timestamp],
                        }
                    )
            return metrics
        if bucket_size == "week":
            unique_counts = (
                session.query(
                    func.date_trunc(
                        bucket_size, AggregateDailyUniqueUsersMetrics.timestamp
                    ).label("timestamp"),
                    func.sum(AggregateDailyUniqueUsersMetrics.count).label("count"),
                    func.sum(AggregateDailyUniqueUsersMetrics.summed_count).label(
                        "summed_count"
                    ),
                )
                .filter(thirty_days_ago <= AggregateDailyUniqueUsersMetrics.timestamp)
                .filter(AggregateDailyUniqueUsersMetrics.timestamp < today)
                .group_by(
                    func.date_trunc(
                        bucket_size, AggregateDailyUniqueUsersMetrics.timestamp
                    )
                )
                .order_by(asc("timestamp"))
                .all()
            )
            unique_count_records = ft.reduce(
                lambda acc, curr: acc.update(
                    {str(curr[0]): {"unique": curr[1], "summed_unique": curr[2] or 0}}
                )
                or acc,
                unique_counts,
                {},
            )

            total_counts = (
                session.query(
                    func.date_trunc(
                        bucket_size, AggregateDailyTotalUsersMetrics.timestamp
                    ).label("timestamp"),
                    func.sum(AggregateDailyTotalUsersMetrics.count).label("count"),
                )
                .filter(thirty_days_ago <= AggregateDailyTotalUsersMetrics.timestamp)
                .filter(AggregateDailyTotalUsersMetrics.timestamp < today)
                .group_by(
                    func.date_trunc(
                        bucket_size, AggregateDailyTotalUsersMetrics.timestamp
                    )
                )
                .order_by(asc("timestamp"))
                .all()
            )
            total_count_records = ft.reduce(
                lambda acc, curr: acc.update({str(curr[0]): curr[1]}) or acc,
                total_counts,
                {},
            )

            metrics = []
            for timestamp, counts in unique_count_records.items():
                if timestamp in total_count_records:
                    metrics.append(
                        {
                            "timestamp": timestamp,
                            "unique_count": counts["unique"],
                            "summed_unique_count": counts["summed_unique"],
                            "total_count": total_count_records[timestamp],
                        }
                    )
            return metrics
        raise exceptions.ArgumentError("Invalid bucket_size for time_range")
    if time_range == "all_time":
        if bucket_size == "month":
            unique_counts = (
                session.query(
                    AggregateMonthlyUniqueUsersMetrics.timestamp,
                    AggregateMonthlyUniqueUsersMetrics.count,
                    AggregateMonthlyUniqueUsersMetrics.summed_count,
                )
                .filter(
                    AggregateMonthlyUniqueUsersMetrics.timestamp < first_day_of_month
                )
                .order_by(asc("timestamp"))
                .all()
            )
            unique_count_records = ft.reduce(
                lambda acc, curr: acc.update(
                    {str(curr[0]): {"unique": curr[1], "summed_unique": curr[2] or 0}}
                )
                or acc,
                unique_counts,
                {},
            )

            total_counts = (
                session.query(
                    AggregateMonthlyTotalUsersMetrics.timestamp,
                    AggregateMonthlyTotalUsersMetrics.count,
                )
                .filter(
                    AggregateMonthlyTotalUsersMetrics.timestamp < first_day_of_month
                )
                .order_by(asc("timestamp"))
                .all()
            )
            total_count_records = ft.reduce(
                lambda acc, curr: acc.update({str(curr[0]): curr[1]}) or acc,
                total_counts,
                {},
            )

            metrics = []
            for timestamp, counts in unique_count_records.items():
                if timestamp in total_count_records:
                    metrics.append(
                        {
                            "timestamp": timestamp,
                            "unique_count": counts["unique"],
                            "summed_unique_count": counts["summed_unique"],
                            "total_count": total_count_records[timestamp],
                        }
                    )
            return metrics
        if bucket_size == "week":
            unique_counts = (
                session.query(
                    func.date_trunc(
                        bucket_size, AggregateDailyUniqueUsersMetrics.timestamp
                    ).label("timestamp"),
                    func.sum(AggregateDailyUniqueUsersMetrics.count).label("count"),
                    func.sum(AggregateDailyUniqueUsersMetrics.summed_count).label(
                        "summed_count"
                    ),
                )
                .filter(AggregateDailyUniqueUsersMetrics.timestamp < today)
                .group_by(
                    func.date_trunc(
                        bucket_size, AggregateDailyUniqueUsersMetrics.timestamp
                    )
                )
                .order_by(asc("timestamp"))
                .all()
            )
            unique_count_records = ft.reduce(
                lambda acc, curr: acc.update(
                    {str(curr[0]): {"unique": curr[1], "summed_unique": curr[2] or 0}}
                )
                or acc,
                unique_counts,
                {},
            )

            total_counts = (
                session.query(
                    func.date_trunc(
                        bucket_size, AggregateDailyTotalUsersMetrics.timestamp
                    ).label("timestamp"),
                    func.sum(AggregateDailyTotalUsersMetrics.count).label("count"),
                )
                .filter(AggregateDailyTotalUsersMetrics.timestamp < today)
                .group_by(
                    func.date_trunc(
                        bucket_size, AggregateDailyTotalUsersMetrics.timestamp
                    )
                )
                .order_by(asc("timestamp"))
                .all()
            )
            total_count_records = ft.reduce(
                lambda acc, curr: acc.update({str(curr[0]): curr[1]}) or acc,
                total_counts,
                {},
            )

            metrics = []
            for timestamp, counts in unique_count_records.items():
                if timestamp in total_count_records:
                    metrics.append(
                        {
                            "timestamp": timestamp,
                            "unique_count": counts["unique"],
                            "summed_unique_count": counts["summed_unique"],
                            "total_count": total_count_records[timestamp],
                        }
                    )
            return metrics
        raise exceptions.ArgumentError("Invalid bucket_size for time_range")
    raise exceptions.ArgumentError("Invalid time_range")
def get_top_playlists(kind, args):
    current_user_id = get_current_user_id(required=False)

    # Argument parsing and checking
    if kind not in ("playlist", "album"):
        raise exceptions.ArgumentError(
            "Invalid kind provided, must be one of 'playlist', 'album'"
        )

    limit = args.get("limit", 16)
    mood = args.get("mood", None)

    if "filter" in args:
        query_filter = args.get("filter")
        if query_filter != "followees":
            raise exceptions.ArgumentError(
                "Invalid filter provided, must be one of 'followees'"
            )
        if query_filter == "followees":
            if not current_user_id:
                raise exceptions.ArgumentError(
                    "User id required to query for followees"
                )
    else:
        query_filter = None

    db = get_db_read_replica()
    with db.scoped_session() as session:

        # If filtering by followees, set the playlist view to be only playlists from
        # users that the current user follows.
        if query_filter == "followees":
            playlists_to_query = create_followee_playlists_subquery(
                session, current_user_id
            )
        # Otherwise, just query all playlists
        else:
            playlists_to_query = session.query(Playlist).subquery()

        # Create a decayed-score view of the playlists
        playlist_query = (
            session.query(
                playlists_to_query,
                (AggregatePlaylist.repost_count + AggregatePlaylist.save_count).label(
                    "count"
                ),
                decayed_score(
                    AggregatePlaylist.repost_count + AggregatePlaylist.save_count,
                    playlists_to_query.c.created_at,
                ).label("score"),
            )
            .select_from(playlists_to_query)
            .join(
                AggregatePlaylist,
                AggregatePlaylist.playlist_id == playlists_to_query.c.playlist_id,
            )
            .filter(
                playlists_to_query.c.is_current == True,
                playlists_to_query.c.is_delete == False,
                playlists_to_query.c.is_private == False,
                playlists_to_query.c.is_album == (kind == "album"),
            )
        )

        # Filter by mood (no-op if no mood is provided)
        playlist_query = filter_to_playlist_mood(
            session, mood, playlist_query, playlists_to_query
        )

        # Order and limit the playlist query by score
        playlist_query = playlist_query.order_by(
            desc("score"), desc(playlists_to_query.c.playlist_id)
        ).limit(limit)

        playlist_results = playlist_query.all()

        # Unzip query results into playlists and scores
        score_map = {}  # playlist_id : score
        playlists = []
        if playlist_results:
            for result in playlist_results:
                # The playlist is the portion of the query result before repost_count and score
                playlist = result[0:-2]
                score = result[-1]

                # Convert the playlist row tuple into a dictionary keyed by column name
                playlist = helpers.tuple_to_model_dictionary(playlist, Playlist)
                score_map[playlist["playlist_id"]] = score
                playlists.append(playlist)

        playlist_ids = list(map(lambda playlist: playlist["playlist_id"], playlists))

        # Bundle peripheral info into playlist results
        playlists = populate_playlist_metadata(
            session,
            playlist_ids,
            playlists,
            [RepostType.playlist, RepostType.album],
            [SaveType.playlist, SaveType.album],
            current_user_id,
        )
        # Add scores into the response
        for playlist in playlists:
            playlist["score"] = score_map[playlist["playlist_id"]]

        if args.get("with_users", False):
            user_id_list = get_users_ids(playlists)
            users = get_users_by_id(session, user_id_list)
            for playlist in playlists:
                user = users[playlist["playlist_owner_id"]]
                if user:
                    playlist["user"] = user

    return playlists
Esempio n. 27
0
def _get_aggregate_route_metrics(session, time_range, bucket_size):
    today = date.today()
    seven_days_ago = today - timedelta(days=7)
    thirty_days_ago = today - timedelta(days=30)
    first_day_of_month = today.replace(day=1)

    if time_range == 'week':
        if bucket_size == 'day':
            unique_counts = (session.query(
                AggregateDailyUniqueUsersMetrics.timestamp,
                AggregateDailyUniqueUsersMetrics.count,
                AggregateDailyUniqueUsersMetrics.summed_count
            ).filter(
                seven_days_ago <= AggregateDailyUniqueUsersMetrics.timestamp
            ).filter(
                AggregateDailyUniqueUsersMetrics.timestamp < today).order_by(
                    asc('timestamp')).all())
            unique_count_records = ft.reduce(lambda acc, curr: \
                acc.update({str(curr[0]): {'unique': curr[1], 'summed_unique': curr[2] or 0}}) \
                    or acc, unique_counts, {})

            total_counts = (session.query(
                AggregateDailyTotalUsersMetrics.timestamp,
                AggregateDailyTotalUsersMetrics.count).filter(
                    seven_days_ago <= AggregateDailyTotalUsersMetrics.timestamp
                ).filter(AggregateDailyTotalUsersMetrics.timestamp < today).
                            order_by(asc('timestamp')).all())
            total_count_records = ft.reduce(lambda acc, curr: \
                acc.update({str(curr[0]): curr[1]}) or acc, total_counts, {})

            metrics = []
            for timestamp, counts in unique_count_records.items():
                if timestamp in total_count_records:
                    metrics.append({
                        'timestamp':
                        timestamp,
                        'unique_count':
                        counts['unique'],
                        'summed_unique_count':
                        counts['summed_unique'],
                        'total_count':
                        total_count_records[timestamp]
                    })
            return metrics
        raise exceptions.ArgumentError("Invalid bucket_size for time_range")
    if time_range == 'month':
        if bucket_size == 'day':
            unique_counts = (session.query(
                AggregateDailyUniqueUsersMetrics.timestamp,
                AggregateDailyUniqueUsersMetrics.count,
                AggregateDailyUniqueUsersMetrics.summed_count
            ).filter(
                thirty_days_ago <= AggregateDailyUniqueUsersMetrics.timestamp
            ).filter(
                AggregateDailyUniqueUsersMetrics.timestamp < today).order_by(
                    asc('timestamp')).all())
            unique_count_records = ft.reduce(lambda acc, curr: \
                acc.update({str(curr[0]): {'unique': curr[1], 'summed_unique': curr[2] or 0}}) \
                    or acc, unique_counts, {})

            total_counts = (session.query(
                AggregateDailyTotalUsersMetrics.timestamp,
                AggregateDailyTotalUsersMetrics.count
            ).filter(
                thirty_days_ago <= AggregateDailyTotalUsersMetrics.timestamp
            ).filter(
                AggregateDailyTotalUsersMetrics.timestamp < today).order_by(
                    asc('timestamp')).all())
            total_count_records = ft.reduce(lambda acc, curr: \
                acc.update({str(curr[0]): curr[1]}) or acc, total_counts, {})

            metrics = []
            for timestamp, counts in unique_count_records.items():
                if timestamp in total_count_records:
                    metrics.append({
                        'timestamp':
                        timestamp,
                        'unique_count':
                        counts['unique'],
                        'summed_unique_count':
                        counts['summed_unique'],
                        'total_count':
                        total_count_records[timestamp]
                    })
            return metrics
        if bucket_size == 'week':
            unique_counts = (session.query(
                func.date_trunc(
                    bucket_size,
                    AggregateDailyUniqueUsersMetrics.timestamp).label(
                        'timestamp'),
                func.sum(
                    AggregateDailyUniqueUsersMetrics.count).label('count'),
                func.sum(AggregateDailyUniqueUsersMetrics.summed_count).label(
                    'summed_count')
            ).filter(
                thirty_days_ago <= AggregateDailyUniqueUsersMetrics.timestamp
            ).filter(
                AggregateDailyUniqueUsersMetrics.timestamp < today).group_by(
                    func.date_trunc(
                        bucket_size,
                        AggregateDailyUniqueUsersMetrics.timestamp)).order_by(
                            asc('timestamp')).all())
            unique_count_records = ft.reduce(lambda acc, curr: \
                acc.update({str(curr[0]): {'unique': curr[1], 'summed_unique': curr[2] or 0}}) \
                    or acc, unique_counts, {})

            total_counts = (session.query(
                func.date_trunc(
                    bucket_size,
                    AggregateDailyTotalUsersMetrics.timestamp).label(
                        'timestamp'),
                func.sum(AggregateDailyTotalUsersMetrics.count).label('count')
            ).filter(
                thirty_days_ago <= AggregateDailyTotalUsersMetrics.timestamp
            ).filter(
                AggregateDailyTotalUsersMetrics.timestamp < today).group_by(
                    func.date_trunc(
                        bucket_size,
                        AggregateDailyTotalUsersMetrics.timestamp)).order_by(
                            asc('timestamp')).all())
            total_count_records = ft.reduce(lambda acc, curr: \
                acc.update({str(curr[0]): curr[1]}) or acc, total_counts, {})

            metrics = []
            for timestamp, counts in unique_count_records.items():
                if timestamp in total_count_records:
                    metrics.append({
                        'timestamp':
                        timestamp,
                        'unique_count':
                        counts['unique'],
                        'summed_unique_count':
                        counts['summed_unique'],
                        'total_count':
                        total_count_records[timestamp]
                    })
            return metrics
        raise exceptions.ArgumentError("Invalid bucket_size for time_range")
    if time_range == 'all_time':
        if bucket_size == 'month':
            unique_counts = (session.query(
                AggregateMonthlyUniqueUsersMetrics.timestamp,
                AggregateMonthlyUniqueUsersMetrics.count,
                AggregateMonthlyUniqueUsersMetrics.summed_count).filter(
                    AggregateMonthlyUniqueUsersMetrics.timestamp <
                    first_day_of_month).order_by(asc('timestamp')).all())
            unique_count_records = ft.reduce(lambda acc, curr: \
                acc.update({str(curr[0]): {'unique': curr[1], 'summed_unique': curr[2] or 0}}) \
                    or acc, unique_counts, {})

            total_counts = (session.query(
                AggregateMonthlyTotalUsersMetrics.timestamp,
                AggregateMonthlyTotalUsersMetrics.count).filter(
                    AggregateMonthlyTotalUsersMetrics.timestamp <
                    first_day_of_month).order_by(asc('timestamp')).all())
            total_count_records = ft.reduce(lambda acc, curr: \
                acc.update({str(curr[0]): curr[1]}) or acc, total_counts, {})

            metrics = []
            for timestamp, counts in unique_count_records.items():
                if timestamp in total_count_records:
                    metrics.append({
                        'timestamp':
                        timestamp,
                        'unique_count':
                        counts['unique'],
                        'summed_unique_count':
                        counts['summed_unique'],
                        'total_count':
                        total_count_records[timestamp]
                    })
            return metrics
        if bucket_size == 'week':
            unique_counts = (session.query(
                func.date_trunc(
                    bucket_size,
                    AggregateDailyUniqueUsersMetrics.timestamp).label(
                        'timestamp'),
                func.sum(
                    AggregateDailyUniqueUsersMetrics.count).label('count'),
                func.sum(AggregateDailyUniqueUsersMetrics.summed_count).label(
                    'summed_count')
            ).filter(
                AggregateDailyUniqueUsersMetrics.timestamp < today).group_by(
                    func.date_trunc(
                        bucket_size,
                        AggregateDailyUniqueUsersMetrics.timestamp)).order_by(
                            asc('timestamp')).all())
            unique_count_records = ft.reduce(lambda acc, curr: \
                acc.update({str(curr[0]): {'unique': curr[1], 'summed_unique': curr[2] or 0}}) \
                    or acc, unique_counts, {})

            total_counts = (session.query(
                func.date_trunc(
                    bucket_size,
                    AggregateDailyTotalUsersMetrics.timestamp).label(
                        'timestamp'),
                func.sum(AggregateDailyTotalUsersMetrics.count).label('count')
            ).filter(
                AggregateDailyTotalUsersMetrics.timestamp < today).group_by(
                    func.date_trunc(
                        bucket_size,
                        AggregateDailyTotalUsersMetrics.timestamp)).order_by(
                            asc('timestamp')).all())
            total_count_records = ft.reduce(lambda acc, curr: \
                acc.update({str(curr[0]): curr[1]}) or acc, total_counts, {})

            metrics = []
            for timestamp, counts in unique_count_records.items():
                if timestamp in total_count_records:
                    metrics.append({
                        'timestamp':
                        timestamp,
                        'unique_count':
                        counts['unique'],
                        'summed_unique_count':
                        counts['summed_unique'],
                        'total_count':
                        total_count_records[timestamp]
                    })
            return metrics
        raise exceptions.ArgumentError("Invalid bucket_size for time_range")
    raise exceptions.ArgumentError("Invalid time_range")
Esempio n. 28
0
def get_playlists(args):
    playlists = []
    current_user_id = get_current_user_id(required=False)
    filter_out_private_playlists = True

    db = get_db_read_replica()
    with db.scoped_session() as session:
        try:
            playlist_query = (
                session.query(Playlist)
                .filter(Playlist.is_current == True)
            )

            # playlist ids filter if the optional query param is passed in
            if "playlist_id" in args:
                playlist_id_list = args.get("playlist_id")
                try:
                    playlist_query = playlist_query.filter(Playlist.playlist_id.in_(playlist_id_list))
                except ValueError as e:
                    raise exceptions.ArgumentError("Invalid value found in playlist id list", e)

            if "user_id" in args:
                user_id = args.get("user_id")
                # user id filter if the optional query param is passed in
                playlist_query = playlist_query.filter(
                    Playlist.playlist_owner_id == user_id
                )

                # if the current user is the same as the user passed in through the query param then we're trying
                # to get playlists for, check if the users are the same. if they are the same, the current user is
                # trying to request their own playlists, so allow them to see private playlists
                if current_user_id and user_id and (int(current_user_id) == int(user_id)):
                    filter_out_private_playlists = False

            if filter_out_private_playlists:
                playlist_query = playlist_query.filter(
                    Playlist.is_private == False
                )

            # Filter out deletes unless we're fetching explicitly by id
            if "playlist_id" not in args:
                playlist_query = playlist_query.filter(
                    Playlist.is_delete == False
                )

            playlist_query = playlist_query.order_by(desc(Playlist.created_at))
            playlists = paginate_query(playlist_query).all()
            playlists = helpers.query_result_to_list(playlists)

            # retrieve playlist ids list
            playlist_ids = list(map(lambda playlist: playlist["playlist_id"], playlists))

            current_user_id = get_current_user_id(required=False)

            # bundle peripheral info into playlist results
            playlists = populate_playlist_metadata(
                session,
                playlist_ids,
                playlists,
                [RepostType.playlist, RepostType.album],
                [SaveType.playlist, SaveType.album],
                current_user_id
            )

            if args.get("with_users", False):
                user_id_list = get_users_ids(playlists)
                users = get_users_by_id(session, user_id_list)
                for playlist in playlists:
                    user = users[playlist['playlist_owner_id']]
                    if user:
                        playlist['user'] = user

        except sqlalchemy.orm.exc.NoResultFound:
            pass
    return playlists
Esempio n. 29
0
def search_tags():
    search_str = request.args.get("query", type=str)
    current_user_id = get_current_user_id(required=False)
    if not search_str:
        raise exceptions.ArgumentError("Invalid value for parameter 'query'")

    user_tag_count = request.args.get("user_tag_count", type=str)
    if not user_tag_count:
        user_tag_count = "2"

    kind = request.args.get("kind", type=str, default="all")
    validSearchKinds = [SearchKind.all, SearchKind.tracks, SearchKind.users]
    try:
        searchKind = SearchKind[kind]
        if searchKind not in validSearchKinds:
            raise Exception
    except Exception:
        return api_helpers.error_response(
            "Invalid value for parameter 'kind' must be in %s" %
            [k.name for k in validSearchKinds], 400)

    results = {}

    (limit, offset) = get_pagination_vars()
    like_tags_str = str.format('%{}%', search_str)
    db = get_db_read_replica()
    with db.scoped_session() as session:
        if (searchKind in [SearchKind.all, SearchKind.tracks]):
            track_res = sqlalchemy.text(f"""
                select distinct(track_id)
                from
                (
                    select
                        strip(to_tsvector(tracks.tags)) as tagstrip,
                        track_id
                    from
                        tracks
                    where
                        (tags like :like_tags_query)
                        and (is_current is true)
                        and (is_delete is false)
                        and (is_unlisted is false)
                        and (stem_of is NULL)
                    order by
                        updated_at desc
                ) as t
                    where
                    tagstrip @@ to_tsquery(:query);
                """)
            track_ids = session.execute(track_res, {
                "query": search_str,
                "like_tags_query": like_tags_str
            }).fetchall()

            # track_ids is list of tuples - simplify to 1-D list
            track_ids = [i[0] for i in track_ids]

            tracks = (session.query(Track).filter(
                Track.is_current == True,
                Track.is_delete == False,
                Track.is_unlisted == False,
                Track.stem_of == None,
                Track.track_id.in_(track_ids),
            ).all())

            tracks = helpers.query_result_to_list(tracks)
            track_play_counts = get_track_play_counts(track_ids)

            tracks = populate_track_metadata(session, track_ids, tracks,
                                             current_user_id)

            for track in tracks:
                track_id = track["track_id"]
                track[response_name_constants.
                      play_count] = track_play_counts.get(track_id, 0)

            play_count_sorted_tracks = \
                sorted(
                    tracks, key=lambda i: i[response_name_constants.play_count], reverse=True)

            # Add pagination parameters to track and user results
            play_count_sorted_tracks = \
                play_count_sorted_tracks[slice(offset, offset + limit, 1)]

            results['tracks'] = play_count_sorted_tracks

        if (searchKind in [SearchKind.all, SearchKind.users]):
            user_res = sqlalchemy.text(f"""
                select * from
                (
                    select
                        count(track_id),
                        owner_id
                    from
                    (
                        select
                            strip(to_tsvector(tracks.tags)) as tagstrip,
                            track_id,
                            owner_id
                        from
                            tracks
                        where
                            (tags like :like_tags_query)
                            and (is_current is true)
                            and (is_unlisted is false)
                            and (stem_of is NULL)
                        order by
                            updated_at desc
                    ) as t
                    where
                            tagstrip @@ to_tsquery(:query)
                    group by
                            owner_id
                    order by
                            count desc
                ) as usr
                where
                    usr.count >= :user_tag_count;
                """)
            user_ids = session.execute(
                user_res, {
                    "query": search_str,
                    "like_tags_query": like_tags_str,
                    "user_tag_count": user_tag_count
                }).fetchall()

            # user_ids is list of tuples - simplify to 1-D list
            user_ids = [i[1] for i in user_ids]

            users = (session.query(User).filter(
                User.is_current == True, User.user_id.in_(user_ids)).all())
            users = helpers.query_result_to_list(users)

            users = populate_user_metadata(session, user_ids, users,
                                           current_user_id)

            followee_sorted_users = \
                sorted(
                    users, key=lambda i: i[response_name_constants.follower_count], reverse=True)

            followee_sorted_users = \
                followee_sorted_users[slice(offset, offset + limit, 1)]

            results['users'] = followee_sorted_users

    # Add personalized results for a given user
    if current_user_id:
        if (searchKind in [SearchKind.all, SearchKind.tracks]):
            # Query saved tracks for the current user that contain this tag
            saves_query = (session.query(Save.save_item_id).filter(
                Save.is_current == True, Save.is_delete == False,
                Save.save_type == SaveType.track,
                Save.user_id == current_user_id,
                Save.save_item_id.in_(track_ids)).all())
            saved_track_ids = [i[0] for i in saves_query]
            saved_tracks = (session.query(Track).filter(
                Track.is_current == True,
                Track.is_delete == False,
                Track.is_unlisted == False,
                Track.stem_of == None,
                Track.track_id.in_(saved_track_ids),
            ).all())
            saved_tracks = helpers.query_result_to_list(saved_tracks)
            for saved_track in saved_tracks:
                saved_track_id = saved_track["track_id"]
                saved_track[response_name_constants.play_count] = \
                    track_play_counts.get(saved_track_id, 0)
            saved_tracks = \
                populate_track_metadata(
                    session, saved_track_ids, saved_tracks, current_user_id)

            # Sort and paginate
            play_count_sorted_saved_tracks = \
                sorted(
                    saved_tracks, key=lambda i: i[response_name_constants.play_count], reverse=True)

            play_count_sorted_saved_tracks = \
                play_count_sorted_saved_tracks[slice(
                    offset, offset + limit, 1)]

            results['saved_tracks'] = play_count_sorted_saved_tracks

        if (searchKind in [SearchKind.all, SearchKind.users]):
            # Query followed users that have referenced this tag
            followed_user_query = (session.query(
                Follow.followee_user_id).filter(
                    Follow.is_current == True, Follow.is_delete == False,
                    Follow.follower_user_id == current_user_id,
                    Follow.followee_user_id.in_(user_ids)).all())
            followed_user_ids = [i[0] for i in followed_user_query]
            followed_users = (session.query(User).filter(
                User.is_current == True,
                User.user_id.in_(followed_user_ids)).all())
            followed_users = helpers.query_result_to_list(followed_users)
            followed_users = \
                populate_user_metadata(
                    session,
                    followed_user_ids,
                    followed_users,
                    current_user_id
                )

            followed_users_followee_sorted = \
                sorted(
                    followed_users,
                    key=lambda i: i[response_name_constants.follower_count],
                    reverse=True)

            followed_users_followee_sorted = \
                followed_users_followee_sorted[slice(
                    offset, offset + limit, 1)]

            results['followed_users'] = followed_users_followee_sorted

    return api_helpers.success_response(results)
Esempio n. 30
0
def search_tags():
    search_str = request.args.get("query", type=str)
    current_user_id = get_current_user_id(required=False)
    if not search_str:
        raise exceptions.ArgumentError("Invalid value for parameter 'query'")

    user_tag_count = request.args.get("user_tag_count", type=str)
    if not user_tag_count:
        user_tag_count = "2"

    kind = request.args.get("kind", type=str, default="all")
    validSearchKinds = [SearchKind.all, SearchKind.tracks, SearchKind.users]
    try:
        searchKind = SearchKind[kind]
        if searchKind not in validSearchKinds:
            raise Exception
    except Exception:
        return api_helpers.error_response(
            "Invalid value for parameter 'kind' must be in %s" %
            [k.name for k in validSearchKinds], 400)

    results = {}

    (limit, offset) = get_pagination_vars()
    db = get_db_read_replica()
    with db.scoped_session() as session:
        if (searchKind in [SearchKind.all, SearchKind.tracks]):
            results['tracks'] = search_track_tags(
                session, {
                    'search_str': search_str,
                    'current_user_id': current_user_id,
                    'limit': limit,
                    'offset': offset
                })

        if (searchKind in [SearchKind.all, SearchKind.users]):
            results['users'] = search_user_tags(
                session, {
                    'search_str': search_str,
                    'current_user_id': current_user_id,
                    "user_tag_count": user_tag_count,
                    'limit': limit,
                    'offset': offset
                })

    # Add personalized results for a given user
    if current_user_id:
        if (searchKind in [SearchKind.all, SearchKind.tracks]):
            # Query saved tracks for the current user that contain this tag
            track_ids = [track['track_id'] for track in results['tracks']]
            track_play_counts = {
                track['track_id']: track[response_name_constants.play_count]
                for track in results['tracks']
            }

            saves_query = (session.query(Save.save_item_id).filter(
                Save.is_current == True, Save.is_delete == False,
                Save.save_type == SaveType.track,
                Save.user_id == current_user_id,
                Save.save_item_id.in_(track_ids)).all())
            saved_track_ids = [i[0] for i in saves_query]
            saved_tracks = (session.query(Track).filter(
                Track.is_current == True,
                Track.is_delete == False,
                Track.is_unlisted == False,
                Track.stem_of == None,
                Track.track_id.in_(saved_track_ids),
            ).all())
            saved_tracks = helpers.query_result_to_list(saved_tracks)
            for saved_track in saved_tracks:
                saved_track_id = saved_track["track_id"]
                saved_track[response_name_constants.play_count] = \
                    track_play_counts.get(saved_track_id, 0)
            saved_tracks = \
                populate_track_metadata(
                    session, saved_track_ids, saved_tracks, current_user_id)

            # Sort and paginate
            play_count_sorted_saved_tracks = \
                sorted(
                    saved_tracks, key=lambda i: i[response_name_constants.play_count], reverse=True)

            play_count_sorted_saved_tracks = \
                play_count_sorted_saved_tracks[slice(
                    offset, offset + limit, 1)]

            results['saved_tracks'] = play_count_sorted_saved_tracks

        if (searchKind in [SearchKind.all, SearchKind.users]):
            # Query followed users that have referenced this tag
            user_ids = [user['user_id'] for user in results['users']]
            followed_user_query = (session.query(
                Follow.followee_user_id).filter(
                    Follow.is_current == True, Follow.is_delete == False,
                    Follow.follower_user_id == current_user_id,
                    Follow.followee_user_id.in_(user_ids)).all())
            followed_user_ids = [i[0] for i in followed_user_query]
            followed_users = get_unpopulated_users(session, followed_user_ids)
            followed_users = \
                populate_user_metadata(
                    session,
                    followed_user_ids,
                    followed_users,
                    current_user_id
                )

            followed_users_followee_sorted = \
                sorted(
                    followed_users,
                    key=lambda i: i[response_name_constants.follower_count],
                    reverse=True)

            followed_users_followee_sorted = \
                followed_users_followee_sorted[slice(
                    offset, offset + limit, 1)]

            results['followed_users'] = followed_users_followee_sorted

    return api_helpers.success_response(results)