def notify(self, message, event): if not self.enabled: return # Split host and port if self.host == "": host, port = "localhost", 23053 if ":" in self.host: host, port = self.host.split(':', 1) port = int(port) else: host, port = self.host, 23053 # If password is empty, assume none if self.password == "": password = None else: password = self.password # Register notification growl = gntp.notifier.GrowlNotifier( applicationName='Headphones', notifications=['New Event'], defaultNotifications=['New Event'], hostname=host, port=port, password=password ) try: growl.register() except gntp.notifier.errors.NetworkError: logger.warning(u'Growl notification failed: network error') return except gntp.notifier.errors.AuthError: logger.warning(u'Growl notification failed: authentication error') return # Fix message message = message.encode(headphones.SYS_ENCODING, "replace") # Send it, including an image image_file = os.path.join(str(headphones.PROG_DIR), "data/images/headphoneslogo.png") with open(image_file, 'rb') as f: image = f.read() try: growl.notify( noteType='New Event', title=event, description=message, icon=image ) except gntp.notifier.errors.NetworkError: logger.warning(u'Growl notification failed: network error') return logger.info(u"Growl notifications sent.")
def __init__( self, base_url=None, username=None, password=None, ): host = headphones.CONFIG.QBITTORRENT_HOST if not host.startswith('http'): host = 'http://' + host if host.endswith('/'): host = host[:-1] if host.endswith('/gui'): host = host[:-4] self.base_url = host self.username = headphones.CONFIG.QBITTORRENT_USERNAME self.password = headphones.CONFIG.QBITTORRENT_PASSWORD # Try new v2 api try: self.qb = Client(self.base_url) login_text = self.qb.login(self.username, self.password) if login_text: logger.warning( "Could not login to qBittorrent v2 api, check credentials: %s", login_text) self.version = 2 except Exception as e: logger.warning( "Error with qBittorrent v2 api, check settings or update, will try v1: %s" % e) self.cookiejar = cookielib.CookieJar() self.opener = self._make_opener() self._get_sid(self.base_url, self.username, self.password) self.version = 1
def addArtisttoDB(artistid, extrasonly=False, forcefull=False): # Putting this here to get around the circular import. We're using this to update thumbnails for artist/albums from headphones import cache # Can't add various artists - throws an error from MB if artistid in blacklisted_special_artists: logger.warn('Cannot import blocked special purpose artist with id' + artistid) return # We'll use this to see if we should update the 'LastUpdated' time stamp errors = False myDB = db.DBConnection() # Delete from blacklist if it's on there myDB.action('DELETE from blacklist WHERE ArtistID=?', [artistid]) # We need the current minimal info in the database instantly # so we don't throw a 500 error when we redirect to the artistPage controlValueDict = {"ArtistID": artistid} # Don't replace a known artist name with an "Artist ID" placeholder dbartist = myDB.action('SELECT * FROM artists WHERE ArtistID=?', [artistid]).fetchone() # Only modify the Include Extras stuff if it's a new artist. We need it early so we know what to fetch if not dbartist: newValueDict = {"ArtistName": "Artist ID: %s" % (artistid), "Status": "Loading", "IncludeExtras": headphones.CONFIG.INCLUDE_EXTRAS, "Extras": headphones.CONFIG.EXTRAS} else: newValueDict = {"Status": "Loading"} myDB.upsert("artists", newValueDict, controlValueDict) artist = mb.getArtist(artistid, extrasonly) if artist and artist.get('artist_name') in blacklisted_special_artist_names: logger.warn('Cannot import blocked special purpose artist: %s' % artist.get('artist_name')) myDB.action('DELETE from artists WHERE ArtistID=?', [artistid]) #in case it's already in the db myDB.action('DELETE from albums WHERE ArtistID=?', [artistid]) myDB.action('DELETE from tracks WHERE ArtistID=?', [artistid]) return if not artist: logger.warn("Error fetching artist info. ID: " + artistid) if dbartist is None: newValueDict = {"ArtistName": "Fetch failed, try refreshing. (%s)" % (artistid), "Status": "Active"} else: newValueDict = {"Status": "Active"} myDB.upsert("artists", newValueDict, controlValueDict) return if artist['artist_name'].startswith('The '): sortname = artist['artist_name'][4:] else: sortname = artist['artist_name'] logger.info(u"Now adding/updating: " + artist['artist_name']) controlValueDict = {"ArtistID": artistid} newValueDict = {"ArtistName": artist['artist_name'], "ArtistSortName": sortname, "DateAdded": helpers.today(), "Status": "Loading"} myDB.upsert("artists", newValueDict, controlValueDict) # See if we need to grab extras. Artist specific extras take precedence # over global option. Global options are set when adding a new artist try: db_artist = myDB.action('SELECT IncludeExtras, Extras from artists WHERE ArtistID=?', [artistid]).fetchone() includeExtras = db_artist['IncludeExtras'] except IndexError: includeExtras = False # Clean all references to release group in dB that are no longer referenced # from the musicbrainz refresh group_list = [] force_repackage = 0 # Don't nuke the database if there's a MusicBrainz error if len(artist['releasegroups']) != 0: for groups in artist['releasegroups']: group_list.append(groups['id']) if not extrasonly: remove_missing_groups_from_albums = myDB.select("SELECT AlbumID FROM albums WHERE ArtistID=?", [artistid]) else: remove_missing_groups_from_albums = myDB.select('SELECT AlbumID FROM albums WHERE ArtistID=? AND Status="Skipped" AND Type!="Album"', [artistid]) for items in remove_missing_groups_from_albums: if items['AlbumID'] not in group_list: # Remove all from albums/tracks that aren't in release groups myDB.action("DELETE FROM albums WHERE AlbumID=?", [items['AlbumID']]) myDB.action("DELETE FROM allalbums WHERE AlbumID=?", [items['AlbumID']]) myDB.action("DELETE FROM tracks WHERE AlbumID=?", [items['AlbumID']]) myDB.action("DELETE FROM alltracks WHERE AlbumID=?", [items['AlbumID']]) myDB.action('DELETE from releases WHERE ReleaseGroupID=?', [items['AlbumID']]) logger.info("[%s] Removing all references to release group %s to reflect MusicBrainz refresh" % (artist['artist_name'], items['AlbumID'])) if not extrasonly: force_repackage = 1 else: if not extrasonly: logger.info("[%s] There was either an error pulling data from MusicBrainz or there might not be any releases for this category" % artist['artist_name']) # Then search for releases within releasegroups, if releases don't exist, then remove from allalbums/alltracks album_searches = [] for rg in artist['releasegroups']: al_title = rg['title'] today = helpers.today() rgid = rg['id'] skip_log = 0 #Make a user configurable variable to skip update of albums with release dates older than this date (in days) pause_delta = headphones.CONFIG.MB_IGNORE_AGE rg_exists = myDB.action("SELECT * from albums WHERE AlbumID=?", [rg['id']]).fetchone() if not forcefull: new_release_group = False try: check_release_date = rg_exists['ReleaseDate'] except TypeError: check_release_date = None new_release_group = True if new_release_group: logger.info("[%s] Now adding: %s (New Release Group)" % (artist['artist_name'], rg['title'])) new_releases = mb.get_new_releases(rgid, includeExtras) else: if check_release_date is None or check_release_date == u"None": logger.info("[%s] Now updating: %s (No Release Date)" % (artist['artist_name'], rg['title'])) new_releases = mb.get_new_releases(rgid, includeExtras, True) else: if len(check_release_date) == 10: release_date = check_release_date elif len(check_release_date) == 7: release_date = check_release_date + "-31" elif len(check_release_date) == 4: release_date = check_release_date + "-12-31" else: release_date = today if helpers.get_age(today) - helpers.get_age(release_date) < pause_delta: logger.info("[%s] Now updating: %s (Release Date <%s Days)", artist['artist_name'], rg['title'], pause_delta) new_releases = mb.get_new_releases(rgid, includeExtras, True) else: logger.info("[%s] Skipping: %s (Release Date >%s Days)", artist['artist_name'], rg['title'], pause_delta) skip_log = 1 new_releases = 0 if force_repackage == 1: new_releases = -1 logger.info('[%s] Forcing repackage of %s (Release Group Removed)', artist['artist_name'], al_title) else: new_releases = new_releases else: logger.info("[%s] Now adding/updating: %s (Comprehensive Force)", artist['artist_name'], rg['title']) new_releases = mb.get_new_releases(rgid, includeExtras, forcefull) if new_releases != 0: # Dump existing hybrid release since we're repackaging/replacing it myDB.action("DELETE from albums WHERE ReleaseID=?", [rg['id']]) myDB.action("DELETE from allalbums WHERE ReleaseID=?", [rg['id']]) myDB.action("DELETE from tracks WHERE ReleaseID=?", [rg['id']]) myDB.action("DELETE from alltracks WHERE ReleaseID=?", [rg['id']]) myDB.action('DELETE from releases WHERE ReleaseGroupID=?', [rg['id']]) # This will be used later to build a hybrid release fullreleaselist = [] # Search for releases within a release group find_hybrid_releases = myDB.action("SELECT * from allalbums WHERE AlbumID=?", [rg['id']]) # Build the dictionary for the fullreleaselist for items in find_hybrid_releases: if items['ReleaseID'] != rg['id']: #don't include hybrid information, since that's what we're replacing hybrid_release_id = items['ReleaseID'] newValueDict = {"ArtistID": items['ArtistID'], "ArtistName": items['ArtistName'], "AlbumTitle": items['AlbumTitle'], "AlbumID": items['AlbumID'], "AlbumASIN": items['AlbumASIN'], "ReleaseDate": items['ReleaseDate'], "Type": items['Type'], "ReleaseCountry": items['ReleaseCountry'], "ReleaseFormat": items['ReleaseFormat'] } find_hybrid_tracks = myDB.action("SELECT * from alltracks WHERE ReleaseID=?", [hybrid_release_id]) totalTracks = 1 hybrid_track_array = [] for hybrid_tracks in find_hybrid_tracks: hybrid_track_array.append({ 'number': hybrid_tracks['TrackNumber'], 'title': hybrid_tracks['TrackTitle'], 'id': hybrid_tracks['TrackID'], #'url': hybrid_tracks['TrackURL'], 'duration': hybrid_tracks['TrackDuration'] }) totalTracks += 1 newValueDict['ReleaseID'] = hybrid_release_id newValueDict['Tracks'] = hybrid_track_array fullreleaselist.append(newValueDict) # Basically just do the same thing again for the hybrid release # This may end up being called with an empty fullreleaselist try: hybridrelease = getHybridRelease(fullreleaselist) logger.info('[%s] Packaging %s releases into hybrid title' % (artist['artist_name'], rg['title'])) except Exception as e: errors = True logger.warn('[%s] Unable to get hybrid release information for %s: %s' % (artist['artist_name'], rg['title'], e)) continue # Use the ReleaseGroupID as the ReleaseID for the hybrid release to differentiate it # We can then use the condition WHERE ReleaseID == ReleaseGroupID to select it # The hybrid won't have a country or a format controlValueDict = {"ReleaseID": rg['id']} newValueDict = {"ArtistID": artistid, "ArtistName": artist['artist_name'], "AlbumTitle": rg['title'], "AlbumID": rg['id'], "AlbumASIN": hybridrelease['AlbumASIN'], "ReleaseDate": hybridrelease['ReleaseDate'], "Type": rg['type'] } myDB.upsert("allalbums", newValueDict, controlValueDict) for track in hybridrelease['Tracks']: cleanname = helpers.cleanName(artist['artist_name'] + ' ' + rg['title'] + ' ' + track['title']) controlValueDict = {"TrackID": track['id'], "ReleaseID": rg['id']} newValueDict = {"ArtistID": artistid, "ArtistName": artist['artist_name'], "AlbumTitle": rg['title'], "AlbumASIN": hybridrelease['AlbumASIN'], "AlbumID": rg['id'], "TrackTitle": track['title'], "TrackDuration": track['duration'], "TrackNumber": track['number'], "CleanName": cleanname } match = myDB.action('SELECT Location, BitRate, Format from have WHERE CleanName=?', [cleanname]).fetchone() if not match: match = myDB.action('SELECT Location, BitRate, Format from have WHERE ArtistName LIKE ? AND AlbumTitle LIKE ? AND TrackTitle LIKE ?', [artist['artist_name'], rg['title'], track['title']]).fetchone() #if not match: #match = myDB.action('SELECT Location, BitRate, Format from have WHERE TrackID=?', [track['id']]).fetchone() if match: newValueDict['Location'] = match['Location'] newValueDict['BitRate'] = match['BitRate'] newValueDict['Format'] = match['Format'] #myDB.action('UPDATE have SET Matched="True" WHERE Location=?', [match['Location']]) myDB.action('UPDATE have SET Matched=? WHERE Location=?', (rg['id'], match['Location'])) myDB.upsert("alltracks", newValueDict, controlValueDict) # Delete matched tracks from the have table #myDB.action('DELETE from have WHERE Matched="True"') # If there's no release in the main albums tables, add the default (hybrid) # If there is a release, check the ReleaseID against the AlbumID to see if they differ (user updated) # check if the album already exists if not rg_exists: releaseid = rg['id'] else: releaseid = rg_exists['ReleaseID'] if not releaseid: releaseid = rg['id'] album = myDB.action('SELECT * from allalbums WHERE ReleaseID=?', [releaseid]).fetchone() controlValueDict = {"AlbumID": rg['id']} newValueDict = {"ArtistID": album['ArtistID'], "ArtistName": album['ArtistName'], "AlbumTitle": album['AlbumTitle'], "ReleaseID": album['ReleaseID'], "AlbumASIN": album['AlbumASIN'], "ReleaseDate": album['ReleaseDate'], "Type": album['Type'], "ReleaseCountry": album['ReleaseCountry'], "ReleaseFormat": album['ReleaseFormat'] } if rg_exists: newValueDict['DateAdded'] = rg_exists['DateAdded'] newValueDict['Status'] = rg_exists['Status'] else: today = helpers.today() newValueDict['DateAdded'] = today if headphones.CONFIG.AUTOWANT_ALL: newValueDict['Status'] = "Wanted" elif album['ReleaseDate'] > today and headphones.CONFIG.AUTOWANT_UPCOMING: newValueDict['Status'] = "Wanted" # Sometimes "new" albums are added to musicbrainz after their release date, so let's try to catch these # The first test just makes sure we have year-month-day elif helpers.get_age(album['ReleaseDate']) and helpers.get_age(today) - helpers.get_age(album['ReleaseDate']) < 21 and headphones.CONFIG.AUTOWANT_UPCOMING: newValueDict['Status'] = "Wanted" else: newValueDict['Status'] = "Skipped" myDB.upsert("albums", newValueDict, controlValueDict) tracks = myDB.action('SELECT * from alltracks WHERE ReleaseID=?', [releaseid]).fetchall() # This is used to see how many tracks you have from an album - to # mark it as downloaded. Default is 80%, can be set in config as # ALBUM_COMPLETION_PCT total_track_count = len(tracks) if total_track_count == 0: logger.warning("Total track count is zero for Release ID " + "'%s', skipping.", releaseid) continue for track in tracks: controlValueDict = {"TrackID": track['TrackID'], "AlbumID": rg['id']} newValueDict = {"ArtistID": track['ArtistID'], "ArtistName": track['ArtistName'], "AlbumTitle": track['AlbumTitle'], "AlbumASIN": track['AlbumASIN'], "ReleaseID": track['ReleaseID'], "TrackTitle": track['TrackTitle'], "TrackDuration": track['TrackDuration'], "TrackNumber": track['TrackNumber'], "CleanName": track['CleanName'], "Location": track['Location'], "Format": track['Format'], "BitRate": track['BitRate'] } myDB.upsert("tracks", newValueDict, controlValueDict) # Mark albums as downloaded if they have at least 80% (by default, configurable) of the album have_track_count = len(myDB.select('SELECT * from tracks WHERE AlbumID=? AND Location IS NOT NULL', [rg['id']])) marked_as_downloaded = False if rg_exists: if rg_exists['Status'] == 'Skipped' and ((have_track_count / float(total_track_count)) >= (headphones.CONFIG.ALBUM_COMPLETION_PCT / 100.0)): myDB.action('UPDATE albums SET Status=? WHERE AlbumID=?', ['Downloaded', rg['id']]) marked_as_downloaded = True else: if ((have_track_count / float(total_track_count)) >= (headphones.CONFIG.ALBUM_COMPLETION_PCT / 100.0)): myDB.action('UPDATE albums SET Status=? WHERE AlbumID=?', ['Downloaded', rg['id']]) marked_as_downloaded = True logger.info(u"[%s] Seeing if we need album art for %s" % (artist['artist_name'], rg['title'])) cache.getThumb(AlbumID=rg['id']) # Start a search for the album if it's new, hasn't been marked as # downloaded and autowant_all is selected. This search is deferred, # in case the search failes and the rest of the import will halt. if not rg_exists and not marked_as_downloaded and headphones.CONFIG.AUTOWANT_ALL: album_searches.append(rg['id']) else: if skip_log == 0: logger.info(u"[%s] No new releases, so no changes made to %s" % (artist['artist_name'], rg['title'])) time.sleep(3) finalize_update(artistid, artist['artist_name'], errors) logger.info(u"Seeing if we need album art for: %s" % artist['artist_name']) cache.getThumb(ArtistID=artistid) if errors: logger.info("[%s] Finished updating artist: %s but with errors, so not marking it as updated in the database" % (artist['artist_name'], artist['artist_name'])) else: myDB.action('DELETE FROM newartists WHERE ArtistName = ?', [artist['artist_name']]) logger.info(u"Updating complete for: %s" % artist['artist_name']) # Start searching for newly added albums if album_searches: from headphones import searcher logger.info("Start searching for %d albums.", len(album_searches)) for album_search in album_searches: searcher.searchforalbum(albumid=album_search)
def libraryScan(dir=None, append=False, ArtistID=None, ArtistName=None, cron=False, artistScan=False): if cron and not headphones.CONFIG.LIBRARYSCAN: return if not dir: if not headphones.CONFIG.MUSIC_DIR: return else: dir = headphones.CONFIG.MUSIC_DIR # If we're appending a dir, it's coming from the post processor which is # already bytestring if not append or artistScan: dir = dir.encode(headphones.SYS_ENCODING) if not os.path.isdir(dir): logger.warn('Cannot find directory: %s. Not scanning' % dir.decode(headphones.SYS_ENCODING, 'replace')) return myDB = db.DBConnection() new_artists = [] logger.info('Scanning music directory: %s' % dir.decode(headphones.SYS_ENCODING, 'replace')) if not append: # Clean up bad filepaths. Queries can take some time, ensure all results are loaded before processing if ArtistID: tracks = myDB.action( 'SELECT Location FROM alltracks WHERE ArtistID = ? AND Location IS NOT NULL UNION SELECT Location FROM tracks WHERE ArtistID = ? AND Location ' 'IS NOT NULL', [ArtistID, ArtistID]) else: tracks = myDB.action( 'SELECT Location FROM alltracks WHERE Location IS NOT NULL UNION SELECT Location FROM tracks WHERE Location IS NOT NULL' ) locations = [] for track in tracks: locations.append(track['Location']) for location in locations: encoded_track_string = location.encode(headphones.SYS_ENCODING, 'replace') if not os.path.isfile(encoded_track_string): myDB.action( 'UPDATE tracks SET Location=?, BitRate=?, Format=? WHERE Location=?', [None, None, None, location]) myDB.action( 'UPDATE alltracks SET Location=?, BitRate=?, Format=? WHERE Location=?', [None, None, None, location]) if ArtistName: del_have_tracks = myDB.select( 'SELECT Location, Matched, ArtistName FROM have WHERE ArtistName = ? COLLATE NOCASE', [ArtistName]) else: del_have_tracks = myDB.select( 'SELECT Location, Matched, ArtistName FROM have') locations = [] for track in del_have_tracks: locations.append([track['Location'], track['ArtistName']]) for location in locations: encoded_track_string = location[0].encode(headphones.SYS_ENCODING, 'replace') if not os.path.isfile(encoded_track_string): if location[1]: # Make sure deleted files get accounted for when updating artist track counts new_artists.append(location[1]) myDB.action('DELETE FROM have WHERE Location=?', [location[0]]) logger.info( 'File %s removed from Headphones, as it is no longer on disk' % encoded_track_string.decode(headphones.SYS_ENCODING, 'replace')) bitrates = [] song_list = [] latest_subdirectory = [] new_song_count = 0 file_count = 0 for r, d, f in helpers.walk_directory(dir): # Filter paths based on config. Note that these methods work directly # on the inputs helpers.path_filter_patterns(d, headphones.CONFIG.IGNORED_FOLDERS, r) helpers.path_filter_patterns(f, headphones.CONFIG.IGNORED_FILES, r) for files in f: # MEDIA_FORMATS = music file extensions, e.g. mp3, flac, etc if any(files.lower().endswith('.' + x.lower()) for x in headphones.MEDIA_FORMATS): subdirectory = r.replace(dir, '') latest_subdirectory.append(subdirectory) if file_count == 0 and r.replace(dir, '') != '': logger.info( "[%s] Now scanning subdirectory %s" % (dir.decode(headphones.SYS_ENCODING, 'replace'), subdirectory.decode(headphones.SYS_ENCODING, 'replace'))) elif latest_subdirectory[file_count] != latest_subdirectory[ file_count - 1] and file_count != 0: logger.info( "[%s] Now scanning subdirectory %s" % (dir.decode(headphones.SYS_ENCODING, 'replace'), subdirectory.decode(headphones.SYS_ENCODING, 'replace'))) song = os.path.join(r, files) # We need the unicode path to use for logging, inserting into database unicode_song_path = song.decode(headphones.SYS_ENCODING, 'replace') # Try to read the metadata try: f = MediaFile(song) except (FileTypeError, UnreadableFileError): logger.warning( "Cannot read media file '%s', skipping. It may be corrupted or not a media file.", unicode_song_path) continue except IOError: logger.warning( "Cannnot read media file '%s', skipping. Does the file exists?", unicode_song_path) continue # Grab the bitrates for the auto detect bit rate option if f.bitrate: bitrates.append(f.bitrate) # Use the album artist over the artist if available if f.albumartist: f_artist = f.albumartist elif f.artist: f_artist = f.artist else: f_artist = None # Add the song to our song list - # TODO: skip adding songs without the minimum requisite information (just a matter of putting together the right if statements) if f_artist and f.album and f.title: CleanName = helpers.clean_name(f_artist + ' ' + f.album + ' ' + f.title) else: CleanName = None controlValueDict = {'Location': unicode_song_path} newValueDict = { 'TrackID': f.mb_trackid, # 'ReleaseID' : f.mb_albumid, 'ArtistName': f_artist, 'AlbumTitle': f.album, 'TrackNumber': f.track, 'TrackLength': f.length, 'Genre': f.genre, 'Date': f.date, 'TrackTitle': f.title, 'BitRate': f.bitrate, 'Format': f.format, 'CleanName': CleanName } # song_list.append(song_dict) check_exist_song = myDB.action( "SELECT * FROM have WHERE Location=?", [unicode_song_path]).fetchone() # Only attempt to match songs that are new, haven't yet been matched, or metadata has changed. if not check_exist_song: # This is a new track if f_artist: new_artists.append(f_artist) myDB.upsert("have", newValueDict, controlValueDict) new_song_count += 1 else: if check_exist_song[ 'ArtistName'] != f_artist or check_exist_song[ 'AlbumTitle'] != f.album or check_exist_song[ 'TrackTitle'] != f.title: # Important track metadata has been modified, need to run matcher again if f_artist and f_artist != check_exist_song[ 'ArtistName']: new_artists.append(f_artist) elif f_artist and f_artist == check_exist_song['ArtistName'] and \ check_exist_song['Matched'] != "Ignored": new_artists.append(f_artist) else: continue newValueDict['Matched'] = None myDB.upsert("have", newValueDict, controlValueDict) myDB.action( 'UPDATE tracks SET Location=?, BitRate=?, Format=? WHERE Location=?', [None, None, None, unicode_song_path]) myDB.action( 'UPDATE alltracks SET Location=?, BitRate=?, Format=? WHERE Location=?', [None, None, None, unicode_song_path]) new_song_count += 1 else: # This track information hasn't changed if f_artist and check_exist_song[ 'Matched'] != "Ignored": new_artists.append(f_artist) file_count += 1 # Now we start track matching logger.info("%s new/modified songs found and added to the database" % new_song_count) song_list = myDB.action( "SELECT * FROM have WHERE Matched IS NULL AND LOCATION LIKE ?", [dir.decode(headphones.SYS_ENCODING, 'replace') + "%"]) total_number_of_songs = \ myDB.action("SELECT COUNT(*) FROM have WHERE Matched IS NULL AND LOCATION LIKE ?", [dir.decode(headphones.SYS_ENCODING, 'replace') + "%"]).fetchone()[0] logger.info("Found " + str(total_number_of_songs) + " new/modified tracks in: '" + dir.decode(headphones.SYS_ENCODING, 'replace') + "'. Matching tracks to the appropriate releases....") # Sort the song_list by most vague (e.g. no trackid or releaseid) to most specific (both trackid & releaseid) # When we insert into the database, the tracks with the most specific information will overwrite the more general matches # song_list = helpers.multikeysort(song_list, ['ReleaseID', 'TrackID']) song_list = helpers.multikeysort(song_list, ['ArtistName', 'AlbumTitle']) # We'll use this to give a % completion, just because the track matching might take a while song_count = 0 latest_artist = [] last_completion_percentage = 0 prev_artist_name = None artistid = None for song in song_list: latest_artist.append(song['ArtistName']) if song_count == 0: logger.info("Now matching songs by %s" % song['ArtistName']) elif latest_artist[song_count] != latest_artist[song_count - 1] and song_count != 0: logger.info("Now matching songs by %s" % song['ArtistName']) song_count += 1 completion_percentage = math.floor( float(song_count) / total_number_of_songs * 1000) / 10 if completion_percentage >= (last_completion_percentage + 10): logger.info("Track matching is " + str(completion_percentage) + "% complete") last_completion_percentage = completion_percentage # THE "MORE-SPECIFIC" CLAUSES HERE HAVE ALL BEEN REMOVED. WHEN RUNNING A LIBRARY SCAN, THE ONLY CLAUSES THAT # EVER GOT HIT WERE [ARTIST/ALBUM/TRACK] OR CLEANNAME. ARTISTID & RELEASEID ARE NEVER PASSED TO THIS FUNCTION, # ARE NEVER FOUND, AND THE OTHER CLAUSES WERE NEVER HIT. FURTHERMORE, OTHER MATCHING FUNCTIONS IN THIS PROGRAM # (IMPORTER.PY, MB.PY) SIMPLY DO A [ARTIST/ALBUM/TRACK] OR CLEANNAME MATCH, SO IT'S ALL CONSISTENT. albumid = None if song['ArtistName'] and song['CleanName']: artist_name = song['ArtistName'] clean_name = song['CleanName'] # Only update if artist is in the db if artist_name != prev_artist_name: prev_artist_name = artist_name artistid = None artist_lookup = "\"" + artist_name.replace("\"", "\"\"") + "\"" try: dbartist = myDB.select( 'SELECT DISTINCT ArtistID, ArtistName FROM artists WHERE ArtistName LIKE ' + artist_lookup + '') except: dbartist = None if not dbartist: dbartist = myDB.select( 'SELECT DISTINCT ArtistID, ArtistName FROM tracks WHERE CleanName = ?', [clean_name]) if not dbartist: dbartist = myDB.select( 'SELECT DISTINCT ArtistID, ArtistName FROM alltracks WHERE CleanName = ?', [clean_name]) if not dbartist: clean_artist = helpers.clean_name(artist_name) if clean_artist: dbartist = myDB.select( 'SELECT DISTINCT ArtistID, ArtistName FROM tracks WHERE CleanName >= ? and CleanName < ?', [clean_artist, clean_artist + '{']) if not dbartist: dbartist = myDB.select( 'SELECT DISTINCT ArtistID, ArtistName FROM alltracks WHERE CleanName >= ? and CleanName < ?', [clean_artist, clean_artist + '{']) if dbartist: artistid = dbartist[0][0] if artistid: # This was previously using Artist, Album, Title with a SELECT LIKE ? and was not using an index # (Possible issue: https://stackoverflow.com/questions/37845854/python-sqlite3-not-using-index-with-like) # Now selects/updates using CleanName index (may have to revert if not working) # matching on CleanName should be enough, ensure it's the same artist just in case # Update tracks track = myDB.action( 'SELECT AlbumID, ArtistName FROM tracks WHERE CleanName = ? AND ArtistID = ?', [clean_name, artistid]).fetchone() if track: albumid = track['AlbumID'] myDB.action( 'UPDATE tracks SET Location = ?, BitRate = ?, Format = ? WHERE CleanName = ? AND ArtistID = ?', [ song['Location'], song['BitRate'], song['Format'], clean_name, artistid ]) # Update alltracks alltrack = myDB.action( 'SELECT AlbumID, ArtistName FROM alltracks WHERE CleanName = ? AND ArtistID = ?', [clean_name, artistid]).fetchone() if alltrack: albumid = alltrack['AlbumID'] myDB.action( 'UPDATE alltracks SET Location = ?, BitRate = ?, Format = ? WHERE CleanName = ? AND ArtistID = ?', [ song['Location'], song['BitRate'], song['Format'], clean_name, artistid ]) # Update have controlValueDict2 = {'Location': song['Location']} if albumid: newValueDict2 = {'Matched': albumid} else: newValueDict2 = {'Matched': "Failed"} myDB.upsert("have", newValueDict2, controlValueDict2) # myDB.action('INSERT INTO have (ArtistName, AlbumTitle, TrackNumber, TrackTitle, TrackLength, BitRate, Genre, Date, TrackID, Location, CleanName, Format) VALUES( ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', [song['ArtistName'], song['AlbumTitle'], song['TrackNumber'], song['TrackTitle'], song['TrackLength'], song['BitRate'], song['Genre'], song['Date'], song['TrackID'], song['Location'], CleanName, song['Format']]) logger.info('Completed matching tracks from directory: %s' % dir.decode(headphones.SYS_ENCODING, 'replace')) if not append or artistScan: logger.info('Updating scanned artist track counts') # Clean up the new artist list unique_artists = {}.fromkeys(new_artists).keys() # # Don't think we need to do this, check the db instead below # # # artist scan # if ArtistName: # current_artists = [[ArtistName]] # # directory scan # else: # current_artists = myDB.select('SELECT ArtistName, ArtistID FROM artists WHERE ArtistName IS NOT NULL') # # # There was a bug where artists with special characters (-,') would show up in new artists. # # # artist_list = scanned artists not in the db # artist_list = [ # x for x in unique_artists # if helpers.clean_name(x).lower() not in [ # helpers.clean_name(y[0]).lower() # for y in current_artists # ] # ] # # # artists_checked = scanned artists that exist in the db # artists_checked = [ # x for x in unique_artists # if helpers.clean_name(x).lower() in [ # helpers.clean_name(y[0]).lower() # for y in current_artists # ] # ] new_artist_list = [] for artist in unique_artists: if not artist: continue logger.info('Processing artist: %s' % artist) # check if artist is already in the db artist_lookup = "\"" + artist.replace("\"", "\"\"") + "\"" try: dbartist = myDB.select( 'SELECT DISTINCT ArtistID, ArtistName FROM artists WHERE ArtistName LIKE ' + artist_lookup + '') except: dbartist = None if not dbartist: clean_artist = helpers.clean_name(artist) if clean_artist: dbartist = myDB.select( 'SELECT DISTINCT ArtistID, ArtistName FROM tracks WHERE CleanName >= ? and CleanName < ?', [clean_artist, clean_artist + '{']) if not dbartist: dbartist = myDB.select( 'SELECT DISTINCT ArtistID, ArtistName FROM alltracks WHERE CleanName >= ? and CleanName < ?', [clean_artist, clean_artist + '{']) # new artist not in db, add to list if not dbartist: new_artist_list.append(artist) else: # artist in db, update have track counts artistid = dbartist[0][0] # Have tracks are selected from tracks table and not all tracks because of duplicates # We update the track count upon an album switch to compliment this # havetracks = ( # len(myDB.select( # 'SELECT TrackTitle from tracks WHERE ArtistName like ? AND Location IS NOT NULL', # [artist])) + len(myDB.select( # 'SELECT TrackTitle from have WHERE ArtistName like ? AND Matched = "Failed"', # [artist])) # ) try: havetracks = (len( myDB.select( 'SELECT ArtistID From tracks WHERE ArtistID = ? AND Location IS NOT NULL', [artistid]) ) + len( myDB.select( 'SELECT ArtistName FROM have WHERE ArtistName LIKE ' + artist_lookup + ' AND Matched = "Failed"'))) except Exception as e: logger.warn('Error updating counts for artist: %s: %s' % (artist, e)) # Note: some people complain about having "artist have tracks" > # of tracks total in artist official releases # (can fix by getting rid of second len statement) if havetracks: myDB.action( 'UPDATE artists SET HaveTracks = ? WHERE ArtistID = ?', [havetracks, artistid]) # Update albums to downloaded update_album_status(ArtistID=artistid) logger.info('Found %i new artists' % len(new_artist_list)) # Add scanned artists not in the db if new_artist_list: if headphones.CONFIG.AUTO_ADD_ARTISTS: logger.info('Importing %i new artists' % len(new_artist_list)) importer.artistlist_to_mbids(new_artist_list) else: logger.info( 'To add these artists, go to Manage->Manage New Artists') # myDB.action('DELETE from newartists') for artist in new_artist_list: myDB.action('INSERT OR IGNORE INTO newartists VALUES (?)', [artist]) if headphones.CONFIG.DETECT_BITRATE and bitrates: headphones.CONFIG.PREFERRED_BITRATE = sum(bitrates) / len( bitrates) / 1000 else: # If we're appending a new album to the database, update the artists total track counts logger.info('Updating artist track counts') artist_lookup = "\"" + ArtistName.replace("\"", "\"\"") + "\"" try: havetracks = len( myDB.select( 'SELECT ArtistID FROM tracks WHERE ArtistID = ? AND Location IS NOT NULL', [ArtistID]) ) + len( myDB.select( 'SELECT ArtistName FROM have WHERE ArtistName LIKE ' + artist_lookup + ' AND Matched = "Failed"')) except Exception as e: logger.warn('Error updating counts for artist: %s: %s' % (ArtistName, e)) if havetracks: myDB.action('UPDATE artists SET HaveTracks=? WHERE ArtistID=?', [havetracks, ArtistID]) # Moved above to call for each artist # if not append: # update_album_status() if not append and not artistScan: lastfm.getSimilar() if ArtistName: logger.info('Scanning complete for artist: %s', ArtistName) else: logger.info('Library scan complete')
def addArtisttoDB(artistid, extrasonly=False, forcefull=False, type="artist"): # Putting this here to get around the circular import. We're using this to update thumbnails for artist/albums from headphones import cache # Can't add various artists - throws an error from MB if artistid in blacklisted_special_artists: logger.warn('Cannot import blocked special purpose artist with id' + artistid) return # We'll use this to see if we should update the 'LastUpdated' time stamp errors = False myDB = db.DBConnection() # Delete from blacklist if it's on there myDB.action('DELETE from blacklist WHERE ArtistID=?', [artistid]) # We need the current minimal info in the database instantly # so we don't throw a 500 error when we redirect to the artistPage controlValueDict = {"ArtistID": artistid} # Don't replace a known artist name with an "Artist ID" placeholder dbartist = myDB.action('SELECT * FROM artists WHERE ArtistID=?', [artistid]).fetchone() # Only modify the Include Extras stuff if it's a new artist. We need it early so we know what to fetch if not dbartist: newValueDict = { "ArtistName": "Artist ID: %s" % (artistid), "Status": "Loading", "IncludeExtras": headphones.CONFIG.INCLUDE_EXTRAS, "Extras": headphones.CONFIG.EXTRAS } if type == "series": newValueDict['Type'] = "series" else: newValueDict = {"Status": "Loading"} if dbartist["Type"] == "series": type = "series" myDB.upsert("artists", newValueDict, controlValueDict) if type == "series": artist = mb.getSeries(artistid) else: artist = mb.getArtist(artistid, extrasonly) if artist and artist.get( 'artist_name') in blacklisted_special_artist_names: logger.warn('Cannot import blocked special purpose artist: %s' % artist.get('artist_name')) myDB.action('DELETE from artists WHERE ArtistID=?', [artistid]) # in case it's already in the db myDB.action('DELETE from albums WHERE ArtistID=?', [artistid]) myDB.action('DELETE from tracks WHERE ArtistID=?', [artistid]) return if not artist: logger.warn("Error fetching artist info. ID: " + artistid) if dbartist is None: newValueDict = { "ArtistName": "Fetch failed, try refreshing. (%s)" % (artistid), "Status": "Active" } else: newValueDict = {"Status": "Active"} myDB.upsert("artists", newValueDict, controlValueDict) return if artist['artist_name'].startswith('The '): sortname = artist['artist_name'][4:] else: sortname = artist['artist_name'] logger.info(u"Now adding/updating: " + artist['artist_name']) controlValueDict = {"ArtistID": artistid} newValueDict = { "ArtistName": artist['artist_name'], "ArtistSortName": sortname, "DateAdded": helpers.today(), "Status": "Loading" } myDB.upsert("artists", newValueDict, controlValueDict) # See if we need to grab extras. Artist specific extras take precedence # over global option. Global options are set when adding a new artist try: db_artist = myDB.action( 'SELECT IncludeExtras, Extras from artists WHERE ArtistID=?', [artistid]).fetchone() includeExtras = db_artist['IncludeExtras'] except IndexError: includeExtras = False # Clean all references to release group in dB that are no longer referenced # from the musicbrainz refresh group_list = [] force_repackage = 0 # Don't nuke the database if there's a MusicBrainz error if len(artist['releasegroups']) != 0: for groups in artist['releasegroups']: group_list.append(groups['id']) if not extrasonly: remove_missing_groups_from_albums = myDB.select( "SELECT AlbumID FROM albums WHERE ArtistID=?", [artistid]) else: remove_missing_groups_from_albums = myDB.select( 'SELECT AlbumID FROM albums WHERE ArtistID=? AND Status="Skipped" AND Type!="Album"', [artistid]) for items in remove_missing_groups_from_albums: if items['AlbumID'] not in group_list: # Remove all from albums/tracks that aren't in release groups myDB.action("DELETE FROM albums WHERE AlbumID=?", [items['AlbumID']]) myDB.action("DELETE FROM allalbums WHERE AlbumID=?", [items['AlbumID']]) myDB.action("DELETE FROM tracks WHERE AlbumID=?", [items['AlbumID']]) myDB.action("DELETE FROM alltracks WHERE AlbumID=?", [items['AlbumID']]) myDB.action('DELETE from releases WHERE ReleaseGroupID=?', [items['AlbumID']]) logger.info( "[%s] Removing all references to release group %s to reflect MusicBrainz refresh" % (artist['artist_name'], items['AlbumID'])) if not extrasonly: force_repackage = 1 else: if not extrasonly: logger.info( "[%s] There was either an error pulling data from MusicBrainz or there might not be any releases for this category" % artist['artist_name']) # Then search for releases within releasegroups, if releases don't exist, then remove from allalbums/alltracks album_searches = [] for rg in artist['releasegroups']: al_title = rg['title'] today = helpers.today() rgid = rg['id'] skip_log = 0 # Make a user configurable variable to skip update of albums with release dates older than this date (in days) pause_delta = headphones.CONFIG.MB_IGNORE_AGE rg_exists = myDB.action("SELECT * from albums WHERE AlbumID=?", [rg['id']]).fetchone() if not forcefull: new_release_group = False try: check_release_date = rg_exists['ReleaseDate'] except TypeError: check_release_date = None new_release_group = True if new_release_group: logger.info("[%s] Now adding: %s (New Release Group)" % (artist['artist_name'], rg['title'])) new_releases = mb.get_new_releases(rgid, includeExtras) else: if check_release_date is None or check_release_date == u"None": if headphones.CONFIG.MB_IGNORE_AGE_MISSING is not 1: logger.info("[%s] Now updating: %s (No Release Date)" % (artist['artist_name'], rg['title'])) new_releases = mb.get_new_releases( rgid, includeExtras, True) else: logger.info( "[%s] Skipping update of: %s (No Release Date)" % (artist['artist_name'], rg['title'])) new_releases = 0 else: if len(check_release_date) == 10: release_date = check_release_date elif len(check_release_date) == 7: release_date = check_release_date + "-31" elif len(check_release_date) == 4: release_date = check_release_date + "-12-31" else: release_date = today if helpers.get_age(today) - helpers.get_age( release_date) < pause_delta: logger.info( "[%s] Now updating: %s (Release Date <%s Days)", artist['artist_name'], rg['title'], pause_delta) new_releases = mb.get_new_releases( rgid, includeExtras, True) else: logger.info( "[%s] Skipping: %s (Release Date >%s Days)", artist['artist_name'], rg['title'], pause_delta) skip_log = 1 new_releases = 0 if force_repackage == 1: new_releases = -1 logger.info( '[%s] Forcing repackage of %s (Release Group Removed)', artist['artist_name'], al_title) else: new_releases = new_releases else: logger.info("[%s] Now adding/updating: %s (Comprehensive Force)", artist['artist_name'], rg['title']) new_releases = mb.get_new_releases(rgid, includeExtras, forcefull) if new_releases != 0: # Dump existing hybrid release since we're repackaging/replacing it myDB.action("DELETE from albums WHERE ReleaseID=?", [rg['id']]) myDB.action("DELETE from allalbums WHERE ReleaseID=?", [rg['id']]) myDB.action("DELETE from tracks WHERE ReleaseID=?", [rg['id']]) myDB.action("DELETE from alltracks WHERE ReleaseID=?", [rg['id']]) myDB.action('DELETE from releases WHERE ReleaseGroupID=?', [rg['id']]) # This will be used later to build a hybrid release fullreleaselist = [] # Search for releases within a release group find_hybrid_releases = myDB.action( "SELECT * from allalbums WHERE AlbumID=?", [rg['id']]) # Build the dictionary for the fullreleaselist for items in find_hybrid_releases: # don't include hybrid information, since that's what we're replacing if items['ReleaseID'] != rg['id']: hybrid_release_id = items['ReleaseID'] newValueDict = { "ArtistID": items['ArtistID'], "ArtistName": items['ArtistName'], "AlbumTitle": items['AlbumTitle'], "AlbumID": items['AlbumID'], "AlbumASIN": items['AlbumASIN'], "ReleaseDate": items['ReleaseDate'], "Type": items['Type'], "ReleaseCountry": items['ReleaseCountry'], "ReleaseFormat": items['ReleaseFormat'] } find_hybrid_tracks = myDB.action( "SELECT * from alltracks WHERE ReleaseID=?", [hybrid_release_id]) totalTracks = 1 hybrid_track_array = [] for hybrid_tracks in find_hybrid_tracks: hybrid_track_array.append({ 'number': hybrid_tracks['TrackNumber'], 'title': hybrid_tracks['TrackTitle'], 'id': hybrid_tracks['TrackID'], # 'url': hybrid_tracks['TrackURL'], 'duration': hybrid_tracks['TrackDuration'] }) totalTracks += 1 newValueDict['ReleaseID'] = hybrid_release_id newValueDict['Tracks'] = hybrid_track_array fullreleaselist.append(newValueDict) # Basically just do the same thing again for the hybrid release # This may end up being called with an empty fullreleaselist try: hybridrelease = getHybridRelease(fullreleaselist) logger.info('[%s] Packaging %s releases into hybrid title' % (artist['artist_name'], rg['title'])) except Exception as e: errors = True logger.warn( '[%s] Unable to get hybrid release information for %s: %s' % (artist['artist_name'], rg['title'], e)) continue # Use the ReleaseGroupID as the ReleaseID for the hybrid release to differentiate it # We can then use the condition WHERE ReleaseID == ReleaseGroupID to select it # The hybrid won't have a country or a format controlValueDict = {"ReleaseID": rg['id']} newValueDict = { "ArtistID": artistid, "ArtistName": artist['artist_name'], "AlbumTitle": rg['title'], "AlbumID": rg['id'], "AlbumASIN": hybridrelease['AlbumASIN'], "ReleaseDate": hybridrelease['ReleaseDate'], "Type": rg['type'] } myDB.upsert("allalbums", newValueDict, controlValueDict) for track in hybridrelease['Tracks']: cleanname = helpers.clean_name(artist['artist_name'] + ' ' + rg['title'] + ' ' + track['title']) controlValueDict = { "TrackID": track['id'], "ReleaseID": rg['id'] } newValueDict = { "ArtistID": artistid, "ArtistName": artist['artist_name'], "AlbumTitle": rg['title'], "AlbumASIN": hybridrelease['AlbumASIN'], "AlbumID": rg['id'], "TrackTitle": track['title'], "TrackDuration": track['duration'], "TrackNumber": track['number'], "CleanName": cleanname } match = myDB.action( 'SELECT Location, BitRate, Format from have WHERE CleanName=?', [cleanname]).fetchone() if not match: match = myDB.action( 'SELECT Location, BitRate, Format from have WHERE ArtistName LIKE ? AND AlbumTitle LIKE ? AND TrackTitle LIKE ?', [artist['artist_name'], rg['title'], track['title'] ]).fetchone() # if not match: # match = myDB.action('SELECT Location, BitRate, Format from have WHERE TrackID=?', [track['id']]).fetchone() if match: newValueDict['Location'] = match['Location'] newValueDict['BitRate'] = match['BitRate'] newValueDict['Format'] = match['Format'] # myDB.action('UPDATE have SET Matched="True" WHERE Location=?', [match['Location']]) myDB.action('UPDATE have SET Matched=? WHERE Location=?', (rg['id'], match['Location'])) myDB.upsert("alltracks", newValueDict, controlValueDict) # Delete matched tracks from the have table # myDB.action('DELETE from have WHERE Matched="True"') # If there's no release in the main albums tables, add the default (hybrid) # If there is a release, check the ReleaseID against the AlbumID to see if they differ (user updated) # check if the album already exists if not rg_exists: releaseid = rg['id'] else: releaseid = rg_exists['ReleaseID'] if not releaseid: releaseid = rg['id'] album = myDB.action('SELECT * from allalbums WHERE ReleaseID=?', [releaseid]).fetchone() controlValueDict = {"AlbumID": rg['id']} newValueDict = { "ArtistID": album['ArtistID'], "ArtistName": album['ArtistName'], "AlbumTitle": album['AlbumTitle'], "ReleaseID": album['ReleaseID'], "AlbumASIN": album['AlbumASIN'], "ReleaseDate": album['ReleaseDate'], "Type": album['Type'], "ReleaseCountry": album['ReleaseCountry'], "ReleaseFormat": album['ReleaseFormat'] } if rg_exists: newValueDict['DateAdded'] = rg_exists['DateAdded'] newValueDict['Status'] = rg_exists['Status'] else: today = helpers.today() newValueDict['DateAdded'] = today if headphones.CONFIG.AUTOWANT_ALL: newValueDict['Status'] = "Wanted" elif album[ 'ReleaseDate'] > today and headphones.CONFIG.AUTOWANT_UPCOMING: newValueDict['Status'] = "Wanted" # Sometimes "new" albums are added to musicbrainz after their release date, so let's try to catch these # The first test just makes sure we have year-month-day elif helpers.get_age( album['ReleaseDate'] ) and helpers.get_age(today) - helpers.get_age( album['ReleaseDate'] ) < 21 and headphones.CONFIG.AUTOWANT_UPCOMING: newValueDict['Status'] = "Wanted" else: newValueDict['Status'] = "Skipped" myDB.upsert("albums", newValueDict, controlValueDict) tracks = myDB.action('SELECT * from alltracks WHERE ReleaseID=?', [releaseid]).fetchall() # This is used to see how many tracks you have from an album - to # mark it as downloaded. Default is 80%, can be set in config as # ALBUM_COMPLETION_PCT total_track_count = len(tracks) if total_track_count == 0: logger.warning( "Total track count is zero for Release ID " + "'%s', skipping.", releaseid) continue for track in tracks: controlValueDict = { "TrackID": track['TrackID'], "AlbumID": rg['id'] } newValueDict = { "ArtistID": track['ArtistID'], "ArtistName": track['ArtistName'], "AlbumTitle": track['AlbumTitle'], "AlbumASIN": track['AlbumASIN'], "ReleaseID": track['ReleaseID'], "TrackTitle": track['TrackTitle'], "TrackDuration": track['TrackDuration'], "TrackNumber": track['TrackNumber'], "CleanName": track['CleanName'], "Location": track['Location'], "Format": track['Format'], "BitRate": track['BitRate'] } myDB.upsert("tracks", newValueDict, controlValueDict) # Mark albums as downloaded if they have at least 80% (by default, configurable) of the album have_track_count = len( myDB.select( 'SELECT * from tracks WHERE AlbumID=? AND Location IS NOT NULL', [rg['id']])) marked_as_downloaded = False if rg_exists: if rg_exists['Status'] == 'Skipped' and ( (have_track_count / float(total_track_count)) >= (headphones.CONFIG.ALBUM_COMPLETION_PCT / 100.0)): myDB.action('UPDATE albums SET Status=? WHERE AlbumID=?', ['Downloaded', rg['id']]) marked_as_downloaded = True else: if (have_track_count / float(total_track_count)) >= ( headphones.CONFIG.ALBUM_COMPLETION_PCT / 100.0): myDB.action('UPDATE albums SET Status=? WHERE AlbumID=?', ['Downloaded', rg['id']]) marked_as_downloaded = True logger.info(u"[%s] Seeing if we need album art for %s" % (artist['artist_name'], rg['title'])) cache.getThumb(AlbumID=rg['id']) # Start a search for the album if it's new, hasn't been marked as # downloaded and autowant_all is selected. This search is deferred, # in case the search failes and the rest of the import will halt. if not rg_exists and not marked_as_downloaded and headphones.CONFIG.AUTOWANT_ALL: album_searches.append(rg['id']) else: if skip_log == 0: logger.info(u"[%s] No new releases, so no changes made to %s" % (artist['artist_name'], rg['title'])) time.sleep(3) finalize_update(artistid, artist['artist_name'], errors) logger.info(u"Seeing if we need album art for: %s" % artist['artist_name']) cache.getThumb(ArtistID=artistid) logger.info(u"Fetching Metacritic reviews for: %s" % artist['artist_name']) metacritic.update(artistid, artist['artist_name'], artist['releasegroups']) if errors: logger.info( "[%s] Finished updating artist: %s but with errors, so not marking it as updated in the database" % (artist['artist_name'], artist['artist_name'])) else: myDB.action('DELETE FROM newartists WHERE ArtistName = ?', [artist['artist_name']]) logger.info(u"Updating complete for: %s" % artist['artist_name']) # Start searching for newly added albums if album_searches: from headphones import searcher logger.info("Start searching for %d albums.", len(album_searches)) for album_search in album_searches: searcher.searchforalbum(albumid=album_search)
def libraryScan(dir=None, append=False, ArtistID=None, ArtistName=None, cron=False, artistScan=False): if cron and not headphones.CONFIG.LIBRARYSCAN: return if not dir: if not headphones.CONFIG.MUSIC_DIR: return else: dir = headphones.CONFIG.MUSIC_DIR # If we're appending a dir, it's coming from the post processor which is # already bytestring if not append or artistScan: dir = dir.encode(headphones.SYS_ENCODING) if not os.path.isdir(dir): logger.warn('Cannot find directory: %s. Not scanning' % dir.decode(headphones.SYS_ENCODING, 'replace')) return myDB = db.DBConnection() new_artists = [] logger.info('Scanning music directory: %s' % dir.decode(headphones.SYS_ENCODING, 'replace')) if not append: # Clean up bad filepaths tracks = myDB.select( 'SELECT Location from alltracks WHERE Location IS NOT NULL UNION SELECT Location from tracks WHERE Location IS NOT NULL') for track in tracks: encoded_track_string = track['Location'].encode(headphones.SYS_ENCODING, 'replace') if not os.path.isfile(encoded_track_string): myDB.action('UPDATE tracks SET Location=?, BitRate=?, Format=? WHERE Location=?', [None, None, None, track['Location']]) myDB.action('UPDATE alltracks SET Location=?, BitRate=?, Format=? WHERE Location=?', [None, None, None, track['Location']]) del_have_tracks = myDB.select('SELECT Location, Matched, ArtistName from have') for track in del_have_tracks: encoded_track_string = track['Location'].encode(headphones.SYS_ENCODING, 'replace') if not os.path.isfile(encoded_track_string): if track['ArtistName']: # Make sure deleted files get accounted for when updating artist track counts new_artists.append(track['ArtistName']) myDB.action('DELETE FROM have WHERE Location=?', [track['Location']]) logger.info( 'File %s removed from Headphones, as it is no longer on disk' % encoded_track_string.decode( headphones.SYS_ENCODING, 'replace')) bitrates = [] song_list = [] latest_subdirectory = [] new_song_count = 0 file_count = 0 for r, d, f in helpers.walk_directory(dir): # Filter paths based on config. Note that these methods work directly # on the inputs helpers.path_filter_patterns(d, headphones.CONFIG.IGNORED_FOLDERS, r) helpers.path_filter_patterns(f, headphones.CONFIG.IGNORED_FILES, r) for files in f: # MEDIA_FORMATS = music file extensions, e.g. mp3, flac, etc if any(files.lower().endswith('.' + x.lower()) for x in headphones.MEDIA_FORMATS): subdirectory = r.replace(dir, '') latest_subdirectory.append(subdirectory) if file_count == 0 and r.replace(dir, '') != '': logger.info("[%s] Now scanning subdirectory %s" % ( dir.decode(headphones.SYS_ENCODING, 'replace'), subdirectory.decode(headphones.SYS_ENCODING, 'replace'))) elif latest_subdirectory[file_count] != latest_subdirectory[ file_count - 1] and file_count != 0: logger.info("[%s] Now scanning subdirectory %s" % ( dir.decode(headphones.SYS_ENCODING, 'replace'), subdirectory.decode(headphones.SYS_ENCODING, 'replace'))) song = os.path.join(r, files) # We need the unicode path to use for logging, inserting into database unicode_song_path = song.decode(headphones.SYS_ENCODING, 'replace') # Try to read the metadata try: f = MediaFile(song) except (FileTypeError, UnreadableFileError): logger.warning( "Cannot read media file '%s', skipping. It may be corrupted or not a media file.", unicode_song_path) continue except IOError: logger.warning("Cannnot read media file '%s', skipping. Does the file exists?", unicode_song_path) continue # Grab the bitrates for the auto detect bit rate option if f.bitrate: bitrates.append(f.bitrate) # Use the album artist over the artist if available if f.albumartist: f_artist = f.albumartist elif f.artist: f_artist = f.artist else: f_artist = None # Add the song to our song list - # TODO: skip adding songs without the minimum requisite information (just a matter of putting together the right if statements) if f_artist and f.album and f.title: CleanName = helpers.clean_name(f_artist + ' ' + f.album + ' ' + f.title) else: CleanName = None controlValueDict = {'Location': unicode_song_path} newValueDict = {'TrackID': f.mb_trackid, # 'ReleaseID' : f.mb_albumid, 'ArtistName': f_artist, 'AlbumTitle': f.album, 'TrackNumber': f.track, 'TrackLength': f.length, 'Genre': f.genre, 'Date': f.date, 'TrackTitle': f.title, 'BitRate': f.bitrate, 'Format': f.format, 'CleanName': CleanName } # song_list.append(song_dict) check_exist_song = myDB.action("SELECT * FROM have WHERE Location=?", [unicode_song_path]).fetchone() # Only attempt to match songs that are new, haven't yet been matched, or metadata has changed. if not check_exist_song: # This is a new track if f_artist: new_artists.append(f_artist) myDB.upsert("have", newValueDict, controlValueDict) new_song_count += 1 else: if check_exist_song['ArtistName'] != f_artist or check_exist_song[ 'AlbumTitle'] != f.album or check_exist_song['TrackTitle'] != f.title: # Important track metadata has been modified, need to run matcher again if f_artist and f_artist != check_exist_song['ArtistName']: new_artists.append(f_artist) elif f_artist and f_artist == check_exist_song['ArtistName'] and \ check_exist_song['Matched'] != "Ignored": new_artists.append(f_artist) else: continue newValueDict['Matched'] = None myDB.upsert("have", newValueDict, controlValueDict) myDB.action( 'UPDATE tracks SET Location=?, BitRate=?, Format=? WHERE Location=?', [None, None, None, unicode_song_path]) myDB.action( 'UPDATE alltracks SET Location=?, BitRate=?, Format=? WHERE Location=?', [None, None, None, unicode_song_path]) new_song_count += 1 else: # This track information hasn't changed if f_artist and check_exist_song['Matched'] != "Ignored": new_artists.append(f_artist) file_count += 1 # Now we start track matching logger.info("%s new/modified songs found and added to the database" % new_song_count) song_list = myDB.action("SELECT * FROM have WHERE Matched IS NULL AND LOCATION LIKE ?", [dir.decode(headphones.SYS_ENCODING, 'replace') + "%"]) total_number_of_songs = \ myDB.action("SELECT COUNT(*) FROM have WHERE Matched IS NULL AND LOCATION LIKE ?", [dir.decode(headphones.SYS_ENCODING, 'replace') + "%"]).fetchone()[0] logger.info("Found " + str(total_number_of_songs) + " new/modified tracks in: '" + dir.decode( headphones.SYS_ENCODING, 'replace') + "'. Matching tracks to the appropriate releases....") # Sort the song_list by most vague (e.g. no trackid or releaseid) to most specific (both trackid & releaseid) # When we insert into the database, the tracks with the most specific information will overwrite the more general matches # song_list = helpers.multikeysort(song_list, ['ReleaseID', 'TrackID']) song_list = helpers.multikeysort(song_list, ['ArtistName', 'AlbumTitle']) # We'll use this to give a % completion, just because the track matching might take a while song_count = 0 latest_artist = [] last_completion_percentage = 0 for song in song_list: latest_artist.append(song['ArtistName']) if song_count == 0: logger.info("Now matching songs by %s" % song['ArtistName']) elif latest_artist[song_count] != latest_artist[song_count - 1] and song_count != 0: logger.info("Now matching songs by %s" % song['ArtistName']) song_count += 1 completion_percentage = math.floor(float(song_count) / total_number_of_songs * 1000) / 10 if completion_percentage >= (last_completion_percentage + 10): logger.info("Track matching is " + str(completion_percentage) + "% complete") last_completion_percentage = completion_percentage # THE "MORE-SPECIFIC" CLAUSES HERE HAVE ALL BEEN REMOVED. WHEN RUNNING A LIBRARY SCAN, THE ONLY CLAUSES THAT # EVER GOT HIT WERE [ARTIST/ALBUM/TRACK] OR CLEANNAME. ARTISTID & RELEASEID ARE NEVER PASSED TO THIS FUNCTION, # ARE NEVER FOUND, AND THE OTHER CLAUSES WERE NEVER HIT. FURTHERMORE, OTHER MATCHING FUNCTIONS IN THIS PROGRAM # (IMPORTER.PY, MB.PY) SIMPLY DO A [ARTIST/ALBUM/TRACK] OR CLEANNAME MATCH, SO IT'S ALL CONSISTENT. if song['ArtistName'] and song['AlbumTitle'] and song['TrackTitle']: track = myDB.action( 'SELECT ArtistName, AlbumTitle, TrackTitle, AlbumID from tracks WHERE ArtistName LIKE ? AND AlbumTitle LIKE ? AND TrackTitle LIKE ?', [song['ArtistName'], song['AlbumTitle'], song['TrackTitle']]).fetchone() have_updated = False if track: controlValueDict = {'ArtistName': track['ArtistName'], 'AlbumTitle': track['AlbumTitle'], 'TrackTitle': track['TrackTitle']} newValueDict = {'Location': song['Location'], 'BitRate': song['BitRate'], 'Format': song['Format']} myDB.upsert("tracks", newValueDict, controlValueDict) controlValueDict2 = {'Location': song['Location']} newValueDict2 = {'Matched': track['AlbumID']} myDB.upsert("have", newValueDict2, controlValueDict2) have_updated = True else: track = myDB.action('SELECT CleanName, AlbumID from tracks WHERE CleanName LIKE ?', [song['CleanName']]).fetchone() if track: controlValueDict = {'CleanName': track['CleanName']} newValueDict = {'Location': song['Location'], 'BitRate': song['BitRate'], 'Format': song['Format']} myDB.upsert("tracks", newValueDict, controlValueDict) controlValueDict2 = {'Location': song['Location']} newValueDict2 = {'Matched': track['AlbumID']} myDB.upsert("have", newValueDict2, controlValueDict2) have_updated = True else: controlValueDict2 = {'Location': song['Location']} newValueDict2 = {'Matched': "Failed"} myDB.upsert("have", newValueDict2, controlValueDict2) have_updated = True alltrack = myDB.action( 'SELECT ArtistName, AlbumTitle, TrackTitle, AlbumID from alltracks WHERE ArtistName LIKE ? AND AlbumTitle LIKE ? AND TrackTitle LIKE ?', [song['ArtistName'], song['AlbumTitle'], song['TrackTitle']]).fetchone() if alltrack: controlValueDict = {'ArtistName': alltrack['ArtistName'], 'AlbumTitle': alltrack['AlbumTitle'], 'TrackTitle': alltrack['TrackTitle']} newValueDict = {'Location': song['Location'], 'BitRate': song['BitRate'], 'Format': song['Format']} myDB.upsert("alltracks", newValueDict, controlValueDict) controlValueDict2 = {'Location': song['Location']} newValueDict2 = {'Matched': alltrack['AlbumID']} myDB.upsert("have", newValueDict2, controlValueDict2) else: alltrack = myDB.action( 'SELECT CleanName, AlbumID from alltracks WHERE CleanName LIKE ?', [song['CleanName']]).fetchone() if alltrack: controlValueDict = {'CleanName': alltrack['CleanName']} newValueDict = {'Location': song['Location'], 'BitRate': song['BitRate'], 'Format': song['Format']} myDB.upsert("alltracks", newValueDict, controlValueDict) controlValueDict2 = {'Location': song['Location']} newValueDict2 = {'Matched': alltrack['AlbumID']} myDB.upsert("have", newValueDict2, controlValueDict2) else: # alltracks may not exist if adding album manually, have should only be set to failed if not already updated in tracks if not have_updated: controlValueDict2 = {'Location': song['Location']} newValueDict2 = {'Matched': "Failed"} myDB.upsert("have", newValueDict2, controlValueDict2) else: controlValueDict2 = {'Location': song['Location']} newValueDict2 = {'Matched': "Failed"} myDB.upsert("have", newValueDict2, controlValueDict2) # myDB.action('INSERT INTO have (ArtistName, AlbumTitle, TrackNumber, TrackTitle, TrackLength, BitRate, Genre, Date, TrackID, Location, CleanName, Format) VALUES( ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', [song['ArtistName'], song['AlbumTitle'], song['TrackNumber'], song['TrackTitle'], song['TrackLength'], song['BitRate'], song['Genre'], song['Date'], song['TrackID'], song['Location'], CleanName, song['Format']]) logger.info('Completed matching tracks from directory: %s' % dir.decode(headphones.SYS_ENCODING, 'replace')) if not append or artistScan: logger.info('Updating scanned artist track counts') # Clean up the new artist list unique_artists = {}.fromkeys(new_artists).keys() current_artists = myDB.select('SELECT ArtistName, ArtistID from artists') # There was a bug where artists with special characters (-,') would show up in new artists. artist_list = [ x for x in unique_artists if helpers.clean_name(x).lower() not in [ helpers.clean_name(y[0]).lower() for y in current_artists ] ] artists_checked = [ x for x in unique_artists if helpers.clean_name(x).lower() in [ helpers.clean_name(y[0]).lower() for y in current_artists ] ] # Update track counts for artist in artists_checked: # Have tracks are selected from tracks table and not all tracks because of duplicates # We update the track count upon an album switch to compliment this havetracks = ( len(myDB.select( 'SELECT TrackTitle from tracks WHERE ArtistName like ? AND Location IS NOT NULL', [artist])) + len(myDB.select( 'SELECT TrackTitle from have WHERE ArtistName like ? AND Matched = "Failed"', [artist])) ) # Note: some people complain about having "artist have tracks" > # of tracks total in artist official releases # (can fix by getting rid of second len statement) myDB.action('UPDATE artists SET HaveTracks=? WHERE ArtistName=?', [havetracks, artist]) logger.info('Found %i new artists' % len(artist_list)) if artist_list: if headphones.CONFIG.AUTO_ADD_ARTISTS: logger.info('Importing %i new artists' % len(artist_list)) importer.artistlist_to_mbids(artist_list) else: logger.info('To add these artists, go to Manage->Manage New Artists') # myDB.action('DELETE from newartists') for artist in artist_list: myDB.action('INSERT OR IGNORE INTO newartists VALUES (?)', [artist]) if headphones.CONFIG.DETECT_BITRATE and bitrates: headphones.CONFIG.PREFERRED_BITRATE = sum(bitrates) / len(bitrates) / 1000 else: # If we're appending a new album to the database, update the artists total track counts logger.info('Updating artist track counts') havetracks = len( myDB.select('SELECT TrackTitle from tracks WHERE ArtistID=? AND Location IS NOT NULL', [ArtistID])) + len(myDB.select( 'SELECT TrackTitle from have WHERE ArtistName like ? AND Matched = "Failed"', [ArtistName])) myDB.action('UPDATE artists SET HaveTracks=? WHERE ArtistID=?', [havetracks, ArtistID]) if not append: update_album_status() if not append and not artistScan: lastfm.getSimilar() logger.info('Library scan complete')
def libraryScan(dir=None, append=False, ArtistID=None, ArtistName=None, cron=False): if cron and not headphones.CONFIG.LIBRARYSCAN: return if not dir: if not headphones.CONFIG.MUSIC_DIR: return else: dir = headphones.CONFIG.MUSIC_DIR # If we're appending a dir, it's coming from the post processor which is # already bytestring if not append: dir = dir.encode(headphones.SYS_ENCODING) if not os.path.isdir(dir): logger.warn('Cannot find directory: %s. Not scanning' % dir.decode(headphones.SYS_ENCODING, 'replace')) return myDB = db.DBConnection() new_artists = [] logger.info('Scanning music directory: %s' % dir.decode(headphones.SYS_ENCODING, 'replace')) if not append: # Clean up bad filepaths tracks = myDB.select( 'SELECT Location from alltracks WHERE Location IS NOT NULL UNION SELECT Location from tracks WHERE Location IS NOT NULL' ) for track in tracks: encoded_track_string = track['Location'].encode( headphones.SYS_ENCODING) if not os.path.isfile(encoded_track_string): myDB.action( 'UPDATE tracks SET Location=?, BitRate=?, Format=? WHERE Location=?', [None, None, None, track['Location']]) myDB.action( 'UPDATE alltracks SET Location=?, BitRate=?, Format=? WHERE Location=?', [None, None, None, track['Location']]) del_have_tracks = myDB.select( 'SELECT Location, Matched, ArtistName from have') for track in del_have_tracks: encoded_track_string = track['Location'].encode( headphones.SYS_ENCODING, 'replace') if not os.path.isfile(encoded_track_string): if track['ArtistName']: #Make sure deleted files get accounted for when updating artist track counts new_artists.append(track['ArtistName']) myDB.action('DELETE FROM have WHERE Location=?', [track['Location']]) logger.info( 'File %s removed from Headphones, as it is no longer on disk' % encoded_track_string.decode(headphones.SYS_ENCODING, 'replace')) ###############myDB.action('DELETE from have') bitrates = [] song_list = [] new_song_count = 0 file_count = 0 latest_subdirectory = [] for r, d, f in helpers.walk_directory(dir): # Need to abuse slicing to get a copy of the list, doing it directly # will skip the element after a deleted one using a list comprehension # will not work correctly for nested subdirectories (os.walk keeps its # original list) for directory in d[:]: if directory.startswith("."): d.remove(directory) for files in f: # MEDIA_FORMATS = music file extensions, e.g. mp3, flac, etc if any(files.lower().endswith('.' + x.lower()) for x in headphones.MEDIA_FORMATS): subdirectory = r.replace(dir, '') latest_subdirectory.append(subdirectory) if file_count == 0 and r.replace(dir, '') != '': logger.info( "[%s] Now scanning subdirectory %s" % (dir.decode(headphones.SYS_ENCODING, 'replace'), subdirectory.decode(headphones.SYS_ENCODING, 'replace'))) elif latest_subdirectory[file_count] != latest_subdirectory[ file_count - 1] and file_count != 0: logger.info( "[%s] Now scanning subdirectory %s" % (dir.decode(headphones.SYS_ENCODING, 'replace'), subdirectory.decode(headphones.SYS_ENCODING, 'replace'))) song = os.path.join(r, files) # We need the unicode path to use for logging, inserting into database unicode_song_path = song.decode(headphones.SYS_ENCODING, 'replace') # Try to read the metadata try: f = MediaFile(song) except (FileTypeError, UnreadableFileError): logger.warning( "Cannot read media file '%s', skipping. It may be corrupted or not a media file.", unicode_song_path) continue except IOError: logger.warning( "Cannnot read media file '%s', skipping. Does the file exists?", unicode_song_path) continue # Grab the bitrates for the auto detect bit rate option if f.bitrate: bitrates.append(f.bitrate) # Use the album artist over the artist if available if f.albumartist: f_artist = f.albumartist elif f.artist: f_artist = f.artist else: f_artist = None # Add the song to our song list - # TODO: skip adding songs without the minimum requisite information (just a matter of putting together the right if statements) if f_artist and f.album and f.title: CleanName = helpers.cleanName(f_artist + ' ' + f.album + ' ' + f.title) else: CleanName = None controlValueDict = {'Location': unicode_song_path} newValueDict = { 'TrackID': f.mb_trackid, #'ReleaseID' : f.mb_albumid, 'ArtistName': f_artist, 'AlbumTitle': f.album, 'TrackNumber': f.track, 'TrackLength': f.length, 'Genre': f.genre, 'Date': f.date, 'TrackTitle': f.title, 'BitRate': f.bitrate, 'Format': f.format, 'CleanName': CleanName } #song_list.append(song_dict) check_exist_song = myDB.action( "SELECT * FROM have WHERE Location=?", [unicode_song_path]).fetchone() #Only attempt to match songs that are new, haven't yet been matched, or metadata has changed. if not check_exist_song: #This is a new track if f_artist: new_artists.append(f_artist) myDB.upsert("have", newValueDict, controlValueDict) new_song_count += 1 else: if check_exist_song[ 'ArtistName'] != f_artist or check_exist_song[ 'AlbumTitle'] != f.album or check_exist_song[ 'TrackTitle'] != f.title: #Important track metadata has been modified, need to run matcher again if f_artist and f_artist != check_exist_song[ 'ArtistName']: new_artists.append(f_artist) elif f_artist and f_artist == check_exist_song[ 'ArtistName'] and check_exist_song[ 'Matched'] != "Ignored": new_artists.append(f_artist) else: continue newValueDict['Matched'] = None myDB.upsert("have", newValueDict, controlValueDict) myDB.action( 'UPDATE tracks SET Location=?, BitRate=?, Format=? WHERE Location=?', [None, None, None, unicode_song_path]) myDB.action( 'UPDATE alltracks SET Location=?, BitRate=?, Format=? WHERE Location=?', [None, None, None, unicode_song_path]) new_song_count += 1 else: #This track information hasn't changed if f_artist and check_exist_song[ 'Matched'] != "Ignored": new_artists.append(f_artist) file_count += 1 # Now we start track matching logger.info("%s new/modified songs found and added to the database" % new_song_count) song_list = myDB.action( "SELECT * FROM have WHERE Matched IS NULL AND LOCATION LIKE ?", [dir.decode(headphones.SYS_ENCODING, 'replace') + "%"]) total_number_of_songs = myDB.action( "SELECT COUNT(*) FROM have WHERE Matched IS NULL AND LOCATION LIKE ?", [dir.decode(headphones.SYS_ENCODING, 'replace') + "%"]).fetchone()[0] logger.info("Found " + str(total_number_of_songs) + " new/modified tracks in: '" + dir.decode(headphones.SYS_ENCODING, 'replace') + "'. Matching tracks to the appropriate releases....") # Sort the song_list by most vague (e.g. no trackid or releaseid) to most specific (both trackid & releaseid) # When we insert into the database, the tracks with the most specific information will overwrite the more general matches ##############song_list = helpers.multikeysort(song_list, ['ReleaseID', 'TrackID']) song_list = helpers.multikeysort(song_list, ['ArtistName', 'AlbumTitle']) # We'll use this to give a % completion, just because the track matching might take a while song_count = 0 latest_artist = [] for song in song_list: latest_artist.append(song['ArtistName']) if song_count == 0: logger.info("Now matching songs by %s" % song['ArtistName']) elif latest_artist[song_count] != latest_artist[song_count - 1] and song_count != 0: logger.info("Now matching songs by %s" % song['ArtistName']) song_count += 1 completion_percentage = float(song_count) / total_number_of_songs * 100 if completion_percentage % 10 == 0: logger.info("Track matching is " + str(completion_percentage) + "% complete") #THE "MORE-SPECIFIC" CLAUSES HERE HAVE ALL BEEN REMOVED. WHEN RUNNING A LIBRARY SCAN, THE ONLY CLAUSES THAT #EVER GOT HIT WERE [ARTIST/ALBUM/TRACK] OR CLEANNAME. ARTISTID & RELEASEID ARE NEVER PASSED TO THIS FUNCTION, #ARE NEVER FOUND, AND THE OTHER CLAUSES WERE NEVER HIT. FURTHERMORE, OTHER MATCHING FUNCTIONS IN THIS PROGRAM #(IMPORTER.PY, MB.PY) SIMPLY DO A [ARTIST/ALBUM/TRACK] OR CLEANNAME MATCH, SO IT'S ALL CONSISTENT. if song['ArtistName'] and song['AlbumTitle'] and song['TrackTitle']: track = myDB.action( 'SELECT ArtistName, AlbumTitle, TrackTitle, AlbumID from tracks WHERE ArtistName LIKE ? AND AlbumTitle LIKE ? AND TrackTitle LIKE ?', [song['ArtistName'], song['AlbumTitle'], song['TrackTitle'] ]).fetchone() have_updated = False if track: controlValueDict = { 'ArtistName': track['ArtistName'], 'AlbumTitle': track['AlbumTitle'], 'TrackTitle': track['TrackTitle'] } newValueDict = { 'Location': song['Location'], 'BitRate': song['BitRate'], 'Format': song['Format'] } myDB.upsert("tracks", newValueDict, controlValueDict) controlValueDict2 = {'Location': song['Location']} newValueDict2 = {'Matched': track['AlbumID']} myDB.upsert("have", newValueDict2, controlValueDict2) have_updated = True else: track = myDB.action( 'SELECT CleanName, AlbumID from tracks WHERE CleanName LIKE ?', [song['CleanName']]).fetchone() if track: controlValueDict = {'CleanName': track['CleanName']} newValueDict = { 'Location': song['Location'], 'BitRate': song['BitRate'], 'Format': song['Format'] } myDB.upsert("tracks", newValueDict, controlValueDict) controlValueDict2 = {'Location': song['Location']} newValueDict2 = {'Matched': track['AlbumID']} myDB.upsert("have", newValueDict2, controlValueDict2) have_updated = True else: controlValueDict2 = {'Location': song['Location']} newValueDict2 = {'Matched': "Failed"} myDB.upsert("have", newValueDict2, controlValueDict2) have_updated = True alltrack = myDB.action( 'SELECT ArtistName, AlbumTitle, TrackTitle, AlbumID from alltracks WHERE ArtistName LIKE ? AND AlbumTitle LIKE ? AND TrackTitle LIKE ?', [song['ArtistName'], song['AlbumTitle'], song['TrackTitle'] ]).fetchone() if alltrack: controlValueDict = { 'ArtistName': alltrack['ArtistName'], 'AlbumTitle': alltrack['AlbumTitle'], 'TrackTitle': alltrack['TrackTitle'] } newValueDict = { 'Location': song['Location'], 'BitRate': song['BitRate'], 'Format': song['Format'] } myDB.upsert("alltracks", newValueDict, controlValueDict) controlValueDict2 = {'Location': song['Location']} newValueDict2 = {'Matched': alltrack['AlbumID']} myDB.upsert("have", newValueDict2, controlValueDict2) else: alltrack = myDB.action( 'SELECT CleanName, AlbumID from alltracks WHERE CleanName LIKE ?', [song['CleanName']]).fetchone() if alltrack: controlValueDict = {'CleanName': alltrack['CleanName']} newValueDict = { 'Location': song['Location'], 'BitRate': song['BitRate'], 'Format': song['Format'] } myDB.upsert("alltracks", newValueDict, controlValueDict) controlValueDict2 = {'Location': song['Location']} newValueDict2 = {'Matched': alltrack['AlbumID']} myDB.upsert("have", newValueDict2, controlValueDict2) else: # alltracks may not exist if adding album manually, have should only be set to failed if not already updated in tracks if not have_updated: controlValueDict2 = {'Location': song['Location']} newValueDict2 = {'Matched': "Failed"} myDB.upsert("have", newValueDict2, controlValueDict2) else: controlValueDict2 = {'Location': song['Location']} newValueDict2 = {'Matched': "Failed"} myDB.upsert("have", newValueDict2, controlValueDict2) #######myDB.action('INSERT INTO have (ArtistName, AlbumTitle, TrackNumber, TrackTitle, TrackLength, BitRate, Genre, Date, TrackID, Location, CleanName, Format) VALUES( ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', [song['ArtistName'], song['AlbumTitle'], song['TrackNumber'], song['TrackTitle'], song['TrackLength'], song['BitRate'], song['Genre'], song['Date'], song['TrackID'], song['Location'], CleanName, song['Format']]) logger.info('Completed matching tracks from directory: %s' % dir.decode(headphones.SYS_ENCODING, 'replace')) if not append: logger.info('Updating scanned artist track counts') # Clean up the new artist list unique_artists = {}.fromkeys(new_artists).keys() current_artists = myDB.select( 'SELECT ArtistName, ArtistID from artists') #There was a bug where artists with special characters (-,') would show up in new artists. artist_list = [ x for x in unique_artists if helpers.cleanName(x).lower() not in [helpers.cleanName(y[0]).lower() for y in current_artists] ] artists_checked = [ x for x in unique_artists if helpers.cleanName(x).lower() in [helpers.cleanName(y[0]).lower() for y in current_artists] ] # Update track counts for artist in artists_checked: # Have tracks are selected from tracks table and not all tracks because of duplicates # We update the track count upon an album switch to compliment this havetracks = (len( myDB.select( 'SELECT TrackTitle from tracks WHERE ArtistName like ? AND Location IS NOT NULL', [artist]) ) + len( myDB.select( 'SELECT TrackTitle from have WHERE ArtistName like ? AND Matched = "Failed"', [artist]))) #Note, some people complain about having "artist have tracks" > # of tracks total in artist official releases # (can fix by getting rid of second len statement) myDB.action('UPDATE artists SET HaveTracks=? WHERE ArtistName=?', [havetracks, artist]) logger.info('Found %i new artists' % len(artist_list)) if len(artist_list): if headphones.CONFIG.AUTO_ADD_ARTISTS: logger.info('Importing %i new artists' % len(artist_list)) importer.artistlist_to_mbids(artist_list) else: logger.info( 'To add these artists, go to Manage->Manage New Artists') #myDB.action('DELETE from newartists') for artist in artist_list: myDB.action('INSERT OR IGNORE INTO newartists VALUES (?)', [artist]) if headphones.CONFIG.DETECT_BITRATE: headphones.CONFIG.PREFERRED_BITRATE = sum(bitrates) / len( bitrates) / 1000 else: # If we're appending a new album to the database, update the artists total track counts logger.info('Updating artist track counts') havetracks = len( myDB.select( 'SELECT TrackTitle from tracks WHERE ArtistID=? AND Location IS NOT NULL', [ArtistID]) ) + len( myDB.select( 'SELECT TrackTitle from have WHERE ArtistName like ? AND Matched = "Failed"', [ArtistName])) myDB.action('UPDATE artists SET HaveTracks=? WHERE ArtistID=?', [havetracks, ArtistID]) if not append: update_album_status() lastfm.getSimilar() logger.info('Library scan complete')