def mappings(mbid=None): """Get mappings to Spotify for a specified MusicBrainz ID. Returns: List containing Spotify URIs that are mapped to specified MBID. """ if _base_url is None: flash(lazy_gettext(_UNAVAILABLE_MSG), "warning") return [] resp = cache.get(mbid, _CACHE_NAMESPACE) if not resp: try: session = requests.Session() session.mount(_base_url, HTTPAdapter(max_retries=2)) resp = session.post(_base_url + 'mapping', headers={ 'Content-Type': 'application/json' }, data=json.dumps({'mbid': mbid})).json().get('mappings') except RequestException: flash( lazy_gettext( "Spotify mapping server is unavailable. You will not see an embedded player." ), "warning") return [] cache.set(key=mbid, namespace=_CACHE_NAMESPACE, val=resp) return resp
def review_list_handler(): """Get list of reviews. :json uuid entity_id: UUID of the release group that is being reviewed :json string entity_type: One of the supported reviewable entities. 'release_group' or 'event' etc. **(optional)** :query user_id: user's UUID **(optional)** :query sort: ``rating`` or ``created`` **(optional)** :query limit: results limit, min is 0, max is 50, default is 50 **(optional)** :query offset: result offset, default is 0 **(optional)** :query language: language code (ISO 639-1) **(optional)** :resheader Content-Type: *application/json* """ # TODO: This checking is added to keep old clients working and needs to be removed. release_group = Parser.uuid('uri', 'release_group', optional=True) if release_group: entity_id = release_group entity_type = 'release_group' else: entity_id = Parser.uuid('uri', 'entity_id', optional=True) entity_type = Parser.string('uri', 'entity_type', valid_values=ENTITY_TYPES, optional=True) user_id = Parser.uuid('uri', 'user_id', optional=True) sort = Parser.string('uri', 'sort', valid_values=['rating', 'created'], optional=True) limit = Parser.int('uri', 'limit', min=1, max=50, optional=True) or 50 offset = Parser.int('uri', 'offset', optional=True) or 0 language = Parser.string('uri', 'language', min=2, max=3, optional=True) if language and language not in supported_languages: raise InvalidRequest(desc='Unsupported language') # TODO(roman): Ideally caching logic should live inside the model. Otherwise it # becomes hard to track all this stuff. cache_key = cache.gen_key('list', entity_id, user_id, sort, limit, offset, language) cached_result = cache.get(cache_key, Review.CACHE_NAMESPACE) if cached_result: reviews = cached_result['reviews'] count = cached_result['count'] else: reviews, count = Review.list( entity_id=entity_id, entity_type=entity_type, user_id=user_id, sort=sort, limit=limit, offset=offset, language=language, ) reviews = [p.to_dict() for p in reviews] cache.set(cache_key, { 'reviews': reviews, 'count': count, }, namespace=Review.CACHE_NAMESPACE) return jsonify(limit=limit, offset=offset, count=count, reviews=reviews)
def get_album(spotify_id): """Get Spotify catalog information for a single album. Returns: Album object from Spotify. More info about this type of object is available at https://developer.spotify.com/web-api/object-model/#album-object. """ namespace = "spotify_album" album = cache.get(spotify_id, namespace) if not album: album = requests.get("%s/albums/%s" % (BASE_URL, spotify_id)).json() cache.set(key=spotify_id, namespace=namespace, val=album, time=DEFAULT_CACHE_EXPIRATION) return album
def search(query, type, limit=20, offset=0): """Get Spotify catalog information about artists, albums, or tracks that match a keyword string. More information is available at https://developer.spotify.com/web-api/search-item/. """ key = cache.gen_key(query, type, limit, offset) namespace = "spotify_search" result = cache.get(key, namespace) if not result: result = requests.get("%s/search?q=%s&type=%s&limit=%s&offset=%s" % (BASE_URL, urllib.quote(query.encode('utf8')), type, str(limit), str(offset))).json() cache.set(key=key, namespace=namespace, val=result, time=DEFAULT_CACHE_EXPIRATION) return result
def get_release_by_id(id): """Get release with the MusicBrainz ID. Returns: Release object with the following includes: recordings, media. """ key = cache.gen_key(id) release = cache.get(key) if not release: try: release = musicbrainzngs.get_release_by_id(id, ['recordings', 'media', 'release-groups']).get('release') except ResponseError as e: if e.cause.code == 404: return None else: raise InternalServerError(e.cause.msg) cache.set(key=key, val=release, time=DEFAULT_CACHE_EXPIRATION) return release
def get_artist_by_id(id): """Get artist with the MusicBrainz ID. Returns: Artist object with the following includes: url-rels, artist-rels. """ key = cache.gen_key(id) artist = cache.get(key) if not artist: try: artist = musicbrainzngs.get_artist_by_id(id, ['url-rels', 'artist-rels']).get('artist') except ResponseError as e: if e.cause.code == 404: return None else: raise InternalServerError(e.cause.msg) artist = artist_rel.process(artist) cache.set(key=key, val=artist, time=DEFAULT_CACHE_EXPIRATION) return artist
def get_place_by_id(id): """Get event with the MusicBrainz ID. Returns: Event object with the following includes: artist-rels, place-rels, series-rels, url-rels. """ key = cache.gen_key(id) place = cache.get(key) if not place: try: place = musicbrainzngs.get_place_by_id( id, ['artist-rels', 'place-rels', 'release-group-rels', 'url-rels']).get('place') except ResponseError as e: if e.cause.code == 404: return None else: raise InternalServerError(e.cause.msg) cache.set(key=key, val=place, time=DEFAULT_CACHE_EXPIRATION) return place
def browse_release_groups(artist_id=None, release_types=None, limit=None, offset=None): """Get all release groups linked to an artist. You need to provide artist's MusicBrainz ID. """ if release_types is None: release_types = [] key = cache.gen_key(artist_id, limit, offset, *release_types) release_groups = cache.get(key) if not release_groups: try: api_resp = musicbrainzngs.browse_release_groups(artist=artist_id, release_type=release_types, limit=limit, offset=offset) release_groups = api_resp.get('release-group-count'), api_resp.get('release-group-list') except ResponseError as e: if e.cause.code == 404: return None else: raise InternalServerError(e.cause.msg) cache.set(key=key, val=release_groups, time=DEFAULT_CACHE_EXPIRATION) return release_groups
def get_release_group_by_id(id): """Get release group with the MusicBrainz ID. Returns: Release group object with the following includes: artists, releases, release-group-rels, url-rels, work-rels. """ key = cache.gen_key(id) release_group = cache.get(key) if not release_group: try: release_group = musicbrainzngs.get_release_group_by_id( id, ['artists', 'releases', 'release-group-rels', 'url-rels', 'work-rels'] ).get('release-group') except ResponseError as e: if e.cause.code == 404: return None else: raise InternalServerError(e.cause.msg) release_group = release_group_rel.process(release_group) cache.set(key=key, val=release_group, time=DEFAULT_CACHE_EXPIRATION) return release_group
def mappings(mbid=None): """Get mappings to Spotify for a specified MusicBrainz ID. Returns: List containing Spotify URIs that are mapped to specified MBID. """ if _base_url is None: flash(lazy_gettext(_UNAVAILABLE_MSG), "warning") return [] resp = cache.get(mbid, _CACHE_NAMESPACE) if not resp: try: session = requests.Session() session.mount(_base_url, HTTPAdapter(max_retries=2)) resp = session.post(_base_url + 'mapping', headers={'Content-Type': 'application/json'}, data=json.dumps({'mbid': mbid})).json().get('mappings') except RequestException: flash(lazy_gettext("Spotify mapping server is unavailable. You will not see an embedded player."), "warning") return [] cache.set(key=mbid, namespace=_CACHE_NAMESPACE, val=resp) return resp
def get_popular(cls, limit=None): """Get list of popular reviews. Popularity is determined by rating of a particular review. Rating is a difference between positive votes and negative. In this case only votes from the last month are used to calculate rating. Results are cached for 12 hours. Args: limit: Maximum number of reviews to return. Returns: Randomized list of popular reviews which are converted into dictionaries using to_dict method. """ cache_key = cache.gen_key('popular_reviews', limit) reviews = cache.get(cache_key, Review.CACHE_NAMESPACE) if not reviews: # Selecting reviews for distinct release groups # TODO(roman): The is a problem with selecting popular reviews like # this: if there are multiple reviews for a release group we don't # choose the most popular. distinct_subquery = db.session.query(Review) \ .filter(Review.is_draft == False) \ .distinct(Review.entity_id).subquery() # Randomizing results to get some variety rand_subquery = db.session.query(aliased(Review, distinct_subquery)) \ .order_by(func.random()).subquery() # Sorting reviews by rating query = db.session.query(aliased(Review, rand_subquery)) # Preparing base query for getting votes vote_query_base = db.session.query( Vote.revision_id, Vote.vote, func.count().label('c')) \ .group_by(Vote.revision_id, Vote.vote) \ .filter(Vote.rated_at > datetime.now() - timedelta(weeks=4)) # Getting positive votes votes_pos = vote_query_base.subquery('votes_pos') query = query.outerjoin(Revision).outerjoin( votes_pos, and_(votes_pos.c.revision_id == Revision.id, votes_pos.c.vote == True)) # Getting negative votes votes_neg = vote_query_base.subquery('votes_neg') query = query.outerjoin(Revision).outerjoin( votes_neg, and_(votes_neg.c.revision_id == Revision.id, votes_neg.c.vote == False)) query = query.order_by(desc(func.coalesce(votes_pos.c.c, 0) - func.coalesce(votes_neg.c.c, 0))) if limit is not None: # Selecting more reviews there so we'll have something # different to show (shuffling is done below). query = query.limit(limit * 4) reviews = query.all() reviews = [review.to_dict(confidential=True) for review in reviews] cache.set(cache_key, reviews, 1 * 60 * 60, Review.CACHE_NAMESPACE) # 1 hour shuffle(reviews) # a bit more variety return reviews[:limit]
def get_popular(cls, limit=None): """Get list of popular reviews. Popularity is determined by rating of a particular review. Rating is a difference between positive votes and negative. In this case only votes from the last month are used to calculate rating. Results are cached for 12 hours. Args: limit: Maximum number of reviews to return. Returns: Randomized list of popular reviews which are converted into dictionaries using to_dict method. """ cache_key = cache.gen_key('popular_reviews', limit) reviews = cache.get(cache_key, Review.CACHE_NAMESPACE) if not reviews: # Selecting reviews for distinct release groups # TODO(roman): The is a problem with selecting popular reviews like # this: if there are multiple reviews for a release group we don't # choose the most popular. distinct_subquery = db.session.query(Review) \ .filter(Review.is_draft == False) \ .distinct(Review.entity_id).subquery() # Randomizing results to get some variety rand_subquery = db.session.query(aliased(Review, distinct_subquery)) \ .order_by(func.random()).subquery() # Sorting reviews by rating query = db.session.query(aliased(Review, rand_subquery)) # Preparing base query for getting votes vote_query_base = db.session.query( Vote.revision_id, Vote.vote, func.count().label('c')) \ .group_by(Vote.revision_id, Vote.vote) \ .filter(Vote.rated_at > datetime.now() - timedelta(weeks=4)) # Getting positive votes votes_pos = vote_query_base.subquery('votes_pos') query = query.outerjoin(Revision).outerjoin( votes_pos, and_(votes_pos.c.revision_id == Revision.id, votes_pos.c.vote == True)) # Getting negative votes votes_neg = vote_query_base.subquery('votes_neg') query = query.outerjoin(Revision).outerjoin( votes_neg, and_(votes_neg.c.revision_id == Revision.id, votes_neg.c.vote == False)) query = query.order_by( desc( func.coalesce(votes_pos.c.c, 0) - func.coalesce(votes_neg.c.c, 0))) if limit is not None: # Selecting more reviews there so we'll have something # different to show (shuffling is done below). query = query.limit(limit * 4) reviews = query.all() reviews = [review.to_dict(confidential=True) for review in reviews] cache.set(cache_key, reviews, 1 * 60 * 60, Review.CACHE_NAMESPACE) # 1 hour shuffle(reviews) # a bit more variety return reviews[:limit]