def add_artist(user, search): tools.sleep() logging.info('[JOB] Searching for artist [%s] for user %d' % (search, user.id)) found_artists, count = mb.search_artists(search, limit=2, offset=0) if found_artists is None: logging.warning('[ERR] MusicBrainz error while searching, skipping') return True only_one = len(found_artists) == 1 first_is_exact = (len(found_artists) > 1 and found_artists[0]['name'].lower() == search.lower() and found_artists[1]['name'].lower() != search.lower()) if only_one or first_is_exact: artist_data = found_artists[0] mbid = artist_data['id'] # get_by_mbid() queries MB, must sleep. tools.sleep() logging.info('[JOB] Adding artist %s' % mbid) try: artist = Artist.get_by_mbid(mbid) except Artist.Blacklisted: logging.warning('[ERR] Artist %s is blacklisted, skipping' % mbid) return True except Artist.Unknown: logging.warning('[ERR] Artist %s is unknown, skipping' % mbid) return True if not artist: logging.warning('[ERR] Could not fetch artist %s, retrying' % mbid) return False UserArtist.add(user, artist) else: logging.info( '[JOB] Could not identify artist by name, saving for later') UserSearch(user=user, search=search).save() return True
def add_artist(user, search): tools.sleep() logging.info('[JOB] Searching for artist [%s] for user %d' % (search, user.id)) found_artists, count = mb.search_artists(search, limit=2, offset=0) if found_artists is None: logging.warning('[ERR] MusicBrainz error while searching, skipping') return True only_one = len(found_artists) == 1 first_is_exact = (len(found_artists) > 1 and found_artists[0]['name'].lower() == search.lower() and found_artists[1]['name'].lower() != search.lower()) if only_one or first_is_exact: artist_data = found_artists[0] mbid = artist_data['id'] # get_by_mbid() queries MB, must sleep. tools.sleep() logging.info('[JOB] Adding artist %s' % mbid) try: artist = Artist.get_by_mbid(mbid) except Artist.Blacklisted: logging.warning('[ERR] Artist %s is blacklisted, skipping' % mbid) return True except Artist.Unknown: logging.warning('[ERR] Artist %s is unknown, skipping' % mbid) return True if not artist: logging.warning('[ERR] Could not fetch artist %s, retrying' % mbid) return False UserArtist.add(user, artist) else: logging.info('[JOB] Could not identify artist by name, saving for later') UserSearch(user=user, search=search).save() return True
def artists(request): artists = Artist.get_by_user(request.user) COLUMNS = 3 artist_rows = arrange_for_table(artists, COLUMNS) # Using REQUEST because this handler can be called using both GET and POST. search = request.REQUEST.get('search', '') dontadd = request.REQUEST.get('dontadd', '') offset = request.REQUEST.get('offset', '') offset = int(offset) if offset.isdigit() else 0 found_artists, count = [], 0 LIMIT = 20 if search: if len(search) > 16384: messages.error(request, 'The search string is too long.') return redirect('/artists') # FB likes are separated by '*'. 32 is completely random. if len(search) > 32 and search.count('*') > len(search) // 32: searches = [s.strip() for s in search.split('*') if s.strip()] else: searches = [s.strip() for s in search.split(',') if s.strip()] if len(searches) > 1 and not offset: # Batch add mode. if dontadd: messages.warning( request, 'Cannot search for multiple artists. ' 'Remove all commas and asterisks to search.') return render(request, 'artists.html', { 'artist_rows': artist_rows, 'search': search, 'dontadd': dontadd}) Job.add_artists(request.user, searches) messages.info( request, 'Your artists will be processed in the next couple of ' 'minutes. In the meantime you can add more artists.') return redirect('/artists') found_artists, count = mb.search_artists(search, limit=LIMIT, offset=offset) if found_artists is None: messages.error(request, 'The search server could not fulfil your request ' 'due to an internal error. Please try again later.') return render(request, 'artists.html', { 'artist_rows': artist_rows, 'search': search, 'dontadd': dontadd}) only_one = len(found_artists) == 1 first_is_exact = (len(found_artists) > 1 and found_artists[0]['name'].lower() == search.lower() and found_artists[1]['name'].lower() != search.lower()) if not dontadd and not offset and (only_one or first_is_exact): # Only one artist found - add it right away. artist_data = found_artists[0] mbid = artist_data['id'] try: artist = Artist.get_by_mbid(mbid) except (Artist.Blacklisted, Artist.Unknown): return redirect('/artists') if not artist: # TODO: error message return redirect('/artists') UserArtist.add(request.user, artist) messages.success(request, "%s has been added!" % artist.name) return redirect('/artists') artists_offset = offset + len(found_artists) artists_left = max(0, count - artists_offset) found_artists = [a for a in found_artists if a['id'] not in Artist.blacklisted] importing = ', '.join(Job.importing_artists(request.user)) pending = sorted(s.search for s in UserSearch.get(request.user)[:200]) pending_rows = arrange_for_table(pending, COLUMNS) return render(request, 'artists.html', { 'artist_rows': artist_rows, 'artist_count': len(artists), 'search': search, 'dontadd': dontadd, 'found_artists': found_artists, 'artists_offset': artists_offset, 'artists_left': artists_left, 'importing': importing, 'pending_rows': pending_rows, 'pending_count': len(pending)})
def artists(request): artists = Artist.get_by_user(request.user) COLUMNS = 3 artist_rows = arrange_for_table(artists, COLUMNS) # Using REQUEST because this handler can be called using both GET and POST. search = request.REQUEST.get('search', '') dontadd = request.REQUEST.get('dontadd', '') offset = request.REQUEST.get('offset', '') offset = int(offset) if offset.isdigit() else 0 found_artists, count = [], 0 LIMIT = 20 if search: if len(search) > 16384: messages.error(request, 'The search string is too long.') return redirect('/artists') # FB likes are separated by '*'. 32 is completely random. if len(search) > 32 and search.count('*') > len(search) // 32: searches = [s.strip() for s in search.split('*') if s.strip()] else: searches = [s.strip() for s in search.split(',') if s.strip()] if len(searches) > 1 and not offset: # Batch add mode. if dontadd: messages.warning( request, 'Cannot search for multiple artists. ' 'Remove all commas and asterisks to search.') return render( request, 'artists.html', { 'artist_rows': artist_rows, 'search': search, 'dontadd': dontadd }) Job.add_artists(request.user, searches) messages.info( request, 'Your artists will be processed in the next couple of ' 'minutes. In the meantime you can add more artists.') return redirect('/artists') found_artists, count = mb.search_artists(search, limit=LIMIT, offset=offset) if found_artists is None: messages.error( request, 'The search server could not fulfil your request ' 'due to an internal error. Please try again later.') return render(request, 'artists.html', { 'artist_rows': artist_rows, 'search': search, 'dontadd': dontadd }) only_one = len(found_artists) == 1 first_is_exact = ( len(found_artists) > 1 and found_artists[0]['name'].lower() == search.lower() and found_artists[1]['name'].lower() != search.lower()) if not dontadd and not offset and (only_one or first_is_exact): # Only one artist found - add it right away. artist_data = found_artists[0] mbid = artist_data['id'] try: artist = Artist.get_by_mbid(mbid) except (Artist.Blacklisted, Artist.Unknown): return redirect('/artists') if not artist: # TODO: error message return redirect('/artists') UserArtist.add(request.user, artist) messages.success(request, "%s has been added!" % artist.name) return redirect('/artists') artists_offset = offset + len(found_artists) artists_left = max(0, count - artists_offset) found_artists = [ a for a in found_artists if a['id'] not in Artist.blacklisted ] importing = ', '.join(Job.importing_artists(request.user)) pending = sorted(s.search for s in UserSearch.get(request.user)[:200]) pending_rows = arrange_for_table(pending, COLUMNS) return render( request, 'artists.html', { 'artist_rows': artist_rows, 'artist_count': len(artists), 'search': search, 'dontadd': dontadd, 'found_artists': found_artists, 'artists_offset': artists_offset, 'artists_left': artists_left, 'importing': importing, 'pending_rows': pending_rows, 'pending_count': len(pending) })