def addReleaseById(rid): myDB = db.DBConnection() rgid = None artistid = None release_dict = None results = myDB.select( "SELECT albums.ArtistID, releases.ReleaseGroupID from releases, albums WHERE releases.ReleaseID=? and releases.ReleaseGroupID=albums.AlbumID LIMIT 1", [rid], ) for result in results: rgid = result["ReleaseGroupID"] artistid = result["ArtistID"] logger.debug("Found a cached releaseid : releasegroupid relationship: " + rid + " : " + rgid) if not rgid: # didn't find it in the cache, get the information from MB logger.debug("Didn't find releaseID " + rid + " in the cache. Looking up its ReleaseGroupID") try: release_dict = mb.getRelease(rid) except Exception, e: logger.info("Unable to get release information for Release: " + str(rid) + " " + str(e)) return if not release_dict: logger.info("Unable to get release information for Release: " + str(rid) + " no dict") return rgid = release_dict["rgid"] artistid = release_dict["artist_id"]
def shutdown(restart=False, update=False): cherrypy.engine.exit() SCHED.shutdown(wait=False) CONFIG.write() if not restart and not update: logger.info('Headphones is shutting down...') if update: logger.info('Headphones is updating...') try: versioncheck.update() except Exception as e: logger.warn('Headphones failed to update: %s. Restarting.', e) if CREATEPID: logger.info('Removing pidfile %s', PIDFILE) os.remove(PIDFILE) if restart: logger.info('Headphones is restarting...') popen_list = [sys.executable, FULL_PATH] popen_list += ARGS if '--nolaunch' not in popen_list: popen_list += ['--nolaunch'] logger.info('Restarting Headphones with %s', popen_list) subprocess.Popen(popen_list, cwd=os.getcwd()) os._exit(0)
def editSearchTerm(self, AlbumID, SearchTerm): logger.info(u"Updating search term for albumid: " + AlbumID) myDB = db.DBConnection() controlValueDict = {'AlbumID': AlbumID} newValueDict = {'SearchTerm': SearchTerm} myDB.upsert("albums", newValueDict, controlValueDict) raise cherrypy.HTTPRedirect("albumPage?AlbumID=%s" % AlbumID)
def deleteArtist(self, ArtistID): logger.info(u"Deleting all traces of artist: " + ArtistID) myDB = db.DBConnection() myDB.action("DELETE from artists WHERE ArtistID=?", [ArtistID]) myDB.action("DELETE from albums WHERE ArtistID=?", [ArtistID]) myDB.action("DELETE from tracks WHERE ArtistID=?", [ArtistID]) raise cherrypy.HTTPRedirect("home")
def smartMove(src, dest, delete=True): from headphones import logger source_dir = os.path.dirname(src) filename = os.path.basename(src) if os.path.isfile(os.path.join(dest, filename)): logger.info('Destination file exists: %s', os.path.join(dest, filename)) title = os.path.splitext(filename)[0] ext = os.path.splitext(filename)[1] i = 1 while True: newfile = title + '(' + str(i) + ')' + ext if os.path.isfile(os.path.join(dest, newfile)): i += 1 else: logger.info('Renaming to %s', newfile) try: os.rename(src, os.path.join(source_dir, newfile)) filename = newfile except Exception as e: logger.warn('Error renaming %s: %s', src.decode(headphones.SYS_ENCODING, 'replace'), e) break try: if delete: shutil.move(os.path.join(source_dir, filename), os.path.join(dest, filename)) else: shutil.copy(os.path.join(source_dir, filename), os.path.join(dest, filename)) return True except Exception as e: logger.warn('Error moving file %s: %s', filename.decode(headphones.SYS_ENCODING, 'replace'), e)
def getArtists(): myDB = db.DBConnection() results = myDB.select("SELECT ArtistID from artists") if not headphones.LASTFM_USERNAME: logger.warn("Last.FM username not set, not importing artists.") return logger.info("Fetching artists from Last.FM for username: %s", headphones.LASTFM_USERNAME) data = request_lastfm("library.getartists", limit=10000, user=headphones.LASTFM_USERNAME) if data and "artists" in data: artistlist = [] artists = data["artists"]["artist"] logger.debug("Fetched %d artists from Last.FM", len(artists)) for artist in artists: artist_mbid = artist["mbid"] if not any(artist_mbid in x for x in results): artistlist.append(artist_mbid) from headphones import importer for artistid in artistlist: importer.addArtisttoDB(artistid) logger.info("Imported %d new artists from Last.FM", len(artistlist))
def removeTorrent(torrentid, remove_data=False): method = "torrent-get" arguments = {"ids": torrentid, "fields": ["isFinished", "name"]} response = torrentAction(method, arguments) if not response: return False try: finished = response["arguments"]["torrents"][0]["isFinished"] name = response["arguments"]["torrents"][0]["name"] if finished: logger.info("%s has finished seeding, removing torrent and data" % name) method = "torrent-remove" if remove_data: arguments = {"delete-local-data": True, "ids": torrentid} else: arguments = {"ids": torrentid} response = torrentAction(method, arguments) return True else: logger.info( "%s has not finished seeding yet, torrent will not be removed, will try again on next run" % name ) except: return False return False
def unqueueAlbum(self, AlbumID, ArtistID): logger.info(u"Marking album: " + AlbumID + "as skipped...") myDB = db.DBConnection() controlValueDict = {"AlbumID": AlbumID} newValueDict = {"Status": "Skipped"} myDB.upsert("albums", newValueDict, controlValueDict) raise cherrypy.HTTPRedirect("artistPage?ArtistID=%s" % ArtistID)
def addTorrent(link): method = "torrent-add" if link.endswith(".torrent"): with open(link, "rb") as f: metainfo = str(base64.b64encode(f.read())) arguments = {"metainfo": metainfo, "download-dir": headphones.CONFIG.DOWNLOAD_TORRENT_DIR} else: arguments = {"filename": link, "download-dir": headphones.CONFIG.DOWNLOAD_TORRENT_DIR} response = torrentAction(method, arguments) if not response: return False if response["result"] == "success": if "torrent-added" in response["arguments"]: retid = response["arguments"]["torrent-added"]["hashString"] elif "torrent-duplicate" in response["arguments"]: retid = response["arguments"]["torrent-duplicate"]["hashString"] else: retid = False logger.info(u"Torrent sent to Transmission successfully") return retid else: logger.info("Transmission returned status %s" % response["result"]) return False
def clearhistory(self): logger.info(u"Clearing history") myDB = db.DBConnection() myDB.action('''DELETE from snatched''') raise cherrypy.HTTPRedirect("history")
def addAlbumArt(artwork, albumpath, release): logger.info('Adding album art to folder') try: year = release['ReleaseDate'][:4] except TypeError: year = '' values = { '$Artist': release['ArtistName'], '$Album': release['AlbumTitle'], '$Year': year, '$artist': release['ArtistName'].lower(), '$album': release['AlbumTitle'].lower(), '$year': year } album_art_name = helpers.replace_all(headphones.ALBUM_ART_FORMAT.strip(), values) + ".jpg" album_art_name = helpers.replace_illegal_chars(album_art_name).encode(headphones.SYS_ENCODING, 'replace') if headphones.FILE_UNDERSCORES: album_art_name = album_art_name.replace(' ', '_') if album_art_name.startswith('.'): album_art_name = album_art_name.replace(0, '_') try: file = open(os.path.join(albumpath, album_art_name), 'wb') file.write(artwork) file.close() except Exception, e: logger.error('Error saving album art: %s' % str(e)) return
def resumeArtist(self, ArtistID): logger.info(u"Resuming artist: " + ArtistID) myDB = db.DBConnection() controlValueDict = {"ArtistID": ArtistID} newValueDict = {"Status": "Active"} myDB.upsert("artists", newValueDict, controlValueDict) raise cherrypy.HTTPRedirect("artistPage?ArtistID=%s" % ArtistID)
def embedLyrics(downloaded_track_list): logger.info('Adding lyrics') # TODO: If adding lyrics for flac & lossy, only fetch the lyrics once # and apply it to both files for downloaded_track in downloaded_track_list: try: f = MediaFile(downloaded_track) except: logger.error('Could not read %s. Not checking lyrics' % downloaded_track.decode(headphones.SYS_ENCODING, 'replace')) continue if f.albumartist and f.title: metalyrics = lyrics.getLyrics(f.albumartist, f.title) elif f.artist and f.title: metalyrics = lyrics.getLyrics(f.artist, f.title) else: logger.info('No artist/track metadata found for track: %s. Not fetching lyrics' % downloaded_track.decode(headphones.SYS_ENCODING, 'replace')) metalyrics = None if lyrics: logger.debug('Adding lyrics to: %s' % downloaded_track.decode(headphones.SYS_ENCODING, 'replace')) f.lyrics = metalyrics f.save()
def addAlbumArt(artwork, albumpath): logger.info('Adding album art to folder') artwork_file_name = os.path.join(albumpath, 'folder.jpg') file = open(artwork_file_name, 'wb') file.write(artwork) file.close()
def notify(self, message, status): if not headphones.CONFIG.PUSHBULLET_ENABLED: return url = "https://api.pushbullet.com/v2/pushes" data = {'type': "note", 'title': "Headphones", 'body': message + ': ' + status} if self.deviceid: data['device_iden'] = self.deviceid headers = {'Content-type': "application/json", 'Authorization': 'Bearer ' + headphones.CONFIG.PUSHBULLET_APIKEY} response = request.request_json(url, method="post", headers=headers, data=json.dumps(data)) if response: logger.info(u"PushBullet notifications sent.") return True else: logger.info(u"PushBullet notification failed.") return False
def searchurl(self, artist, album, year, format): """ Return the search url """ # Build search url searchterm = '' if artist != 'Various Artists': searchterm = artist searchterm = searchterm + ' ' searchterm = searchterm + album searchterm = searchterm + ' ' searchterm = searchterm + year if format == 'lossless': format = '+lossless' self.maxsize = 10000000000 elif format == 'lossless+mp3': format = '+lossless||mp3||aac' self.maxsize = 10000000000 else: format = '+mp3||aac' self.maxsize = 300000000 # sort by size, descending. sort = '&o=7&s=2' searchurl = "%s?nm=%s%s%s" % (self.search_referer, urllib.quote(searchterm), format, sort) logger.info("Searching rutracker using term: %s", searchterm) return searchurl
def notify(self, message, event): if not headphones.CONFIG.JOIN_ENABLED or \ not headphones.CONFIG.JOIN_APIKEY: return icon = "https://cdn.rawgit.com/Headphones/" \ "headphones/develop/data/images/headphoneslogo.png" if not self.deviceid: self.deviceid = "group.all" l = [x.strip() for x in self.deviceid.split(',')] if len(l) > 1: self.url += '&deviceIds={deviceid}' else: self.url += '&deviceId={deviceid}' response = urllib2.urlopen(self.url.format(apikey=self.apikey, title=quote_plus(event), text=quote_plus( message.encode( "utf-8")), icon=icon, deviceid=self.deviceid)) if response: logger.info(u"Join notifications sent.") return True else: logger.error(u"Join notification failed.") return False
def notify(self, path): path = os.path.abspath(path) if not self.util_exists(): logger.warn( "Error sending notification: synoindex utility " "not found at %s" % self.util_loc) return if os.path.isfile(path): cmd_arg = '-a' elif os.path.isdir(path): cmd_arg = '-A' else: logger.warn( "Error sending notification: Path passed to synoindex " "was not a file or folder.") return cmd = [self.util_loc, cmd_arg, path] logger.info("Calling synoindex command: %s" % str(cmd)) try: p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=headphones.PROG_DIR) out, error = p.communicate() # synoindex never returns any codes other than '0', # highly irritating except OSError, e: logger.warn("Error sending notification: %s" % str(e))
def login(self): """ Logs in user """ loginpage = 'http://login.rutracker.org/forum/login.php' post_params = { 'login_username': headphones.CONFIG.RUTRACKER_USER, 'login_password': headphones.CONFIG.RUTRACKER_PASSWORD, 'login': b'\xc2\xf5\xee\xe4' # '%C2%F5%EE%E4' } logger.info("Attempting to log in to rutracker...") try: r = self.session.post(loginpage, data=post_params, timeout=self.timeout, allow_redirects=False) # try again if not self.has_bb_data_cookie(r): time.sleep(10) r = self.session.post(loginpage, data=post_params, timeout=self.timeout, allow_redirects=False) if self.has_bb_data_cookie(r): self.loggedin = True logger.info("Successfully logged in to rutracker") else: logger.error( "Could not login to rutracker, credentials maybe incorrect, site is down or too many attempts. Try again later") self.loggedin = False return self.loggedin except Exception as e: logger.error("Unknown error logging in to rutracker: %s" % e) self.loggedin = False return self.loggedin
def addTorrent(link, data=None): method = 'torrent-add' if link.endswith('.torrent') and not link.startswith('http') or data: if data: metainfo = str(base64.b64encode(data)) else: with open(link, 'rb') as f: metainfo = str(base64.b64encode(f.read())) arguments = {'metainfo': metainfo, 'download-dir': headphones.CONFIG.DOWNLOAD_TORRENT_DIR} else: arguments = {'filename': link, 'download-dir': headphones.CONFIG.DOWNLOAD_TORRENT_DIR} response = torrentAction(method, arguments) if not response: return False if response['result'] == 'success': if 'torrent-added' in response['arguments']: retid = response['arguments']['torrent-added']['hashString'] elif 'torrent-duplicate' in response['arguments']: retid = response['arguments']['torrent-duplicate']['hashString'] else: retid = False logger.info(u"Torrent sent to Transmission successfully") return retid else: logger.info('Transmission returned status %s' % response['result']) return False
def generateAPI(self): import hashlib, random apikey = hashlib.sha224( str(random.getrandbits(256)) ).hexdigest()[0:32] logger.info("New API generated") return apikey
def extract_data(s): from headphones import logger #headphones default format pattern = re.compile(r'(?P<name>.*?)\s\-\s(?P<album>.*?)\s\[(?P<year>.*?)\]', re.VERBOSE) match = pattern.match(s) if match: name = match.group("name") album = match.group("album") year = match.group("year") return (name, album, year) else: logger.info("Couldn't parse " + s + " into a valid default format") #newzbin default format pattern = re.compile(r'(?P<name>.*?)\s\-\s(?P<album>.*?)\s\((?P<year>\d+?\))', re.VERBOSE) match = pattern.match(s) if match: name = match.group("name") album = match.group("album") year = match.group("year") return (name, album, year) else: logger.info("Couldn't parse " + s + " into a valid Newbin format") return (name, album, year)
def extract_song_data(s): #headphones default format music_dir = headphones.MUSIC_DIR folder_format = headphones.FOLDER_FORMAT file_format = headphones.FILE_FORMAT full_format = os.path.join(headphones.MUSIC_DIR) pattern = re.compile(r'(?P<name>.*?)\s\-\s(?P<album>.*?)\s\[(?P<year>.*?)\]', re.VERBOSE) match = pattern.match(s) if match: name = match.group("name") album = match.group("album") year = match.group("year") return (name, album, year) else: logger.info("Couldn't parse " + s + " into a valid default format") #newzbin default format pattern = re.compile(r'(?P<name>.*?)\s\-\s(?P<album>.*?)\s\((?P<year>\d+?\))', re.VERBOSE) match = pattern.match(s) if match: name = match.group("name") album = match.group("album") year = match.group("year") return (name, album, year) else: logger.info("Couldn't parse " + s + " into a valid Newbin format") return (name, album, year)
def addAlbumArt(artwork, albumpath, release): logger.info('Adding album art to folder') try: year = release['ReleaseDate'][:4] except TypeError: year = '' values = { '$Artist': release['ArtistName'], '$Album': release['AlbumTitle'], '$Year': year, '$artist': release['ArtistName'].lower(), '$album': release['AlbumTitle'].lower(), '$year': year } album_art_name = helpers.replace_all(headphones.ALBUM_ART_FORMAT.strip(), values).replace('/','_') + ".jpg" album_art_name = album_art_name.replace('?','_').replace(':', '_').encode(headphones.SYS_ENCODING, 'replace') if headphones.FILE_UNDERSCORES: album_art_name = album_art_name.replace(' ', '_') if album_art_name.startswith('.'): album_art_name = album_art_name.replace(0, '_') prev = os.umask(headphones.UMASK) file = open(os.path.join(albumpath, album_art_name), 'wb') file.write(artwork) file.close() os.umask(prev)
def doPostProcessing(albumid, albumpath, release, tracks, downloaded_track_list, Kind=None): logger.info('Starting post-processing for: %s - %s' % (release['ArtistName'], release['AlbumTitle'])) # Check to see if we're preserving the torrent dir if headphones.KEEP_TORRENT_FILES and Kind=="torrent": new_folder = os.path.join(albumpath, 'headphones-modified'.encode(headphones.SYS_ENCODING, 'replace')) logger.info("Copying files to 'headphones-modified' subfolder to preserve downloaded files for seeding") try: shutil.copytree(albumpath, new_folder) # Update the album path with the new location albumpath = new_folder except Exception, e: logger.warn("Cannot copy/move files to temp folder: " + new_folder.decode(headphones.SYS_ENCODING, 'replace') + ". Not continuing. Error: " + str(e)) return # Need to update the downloaded track list with the new location. # Could probably just throw in the "headphones-modified" folder, # but this is good to make sure we're not counting files that may have failed to move downloaded_track_list = [] downloaded_cuecount = 0 for r,d,f in os.walk(albumpath): for files in f: if any(files.lower().endswith('.' + x.lower()) for x in headphones.MEDIA_FORMATS): downloaded_track_list.append(os.path.join(r, files)) elif files.lower().endswith('.cue'): downloaded_cuecount += 1
def preprocess(resultlist): if not headphones.USENET_RETENTION: usenet_retention = 2000 else: usenet_retention = int(headphones.USENET_RETENTION) for result in resultlist: nzb = getresultNZB(result) if nzb: try: d = minidom.parseString(nzb) node = d.documentElement nzbfiles = d.getElementsByTagName("file") skipping = False for nzbfile in nzbfiles: if int(nzbfile.getAttribute("date")) < (time.time() - usenet_retention * 86400): logger.info('NZB contains a file out of your retention. Skipping.') skipping = True break if skipping: continue #TODO: Do we want rar checking in here to try to keep unknowns out? #or at least the option to do so? except ExpatError: logger.error('Unable to parse the best result NZB. Skipping.') continue return nzb, result else: logger.error("Couldn't retrieve the best nzb. Skipping.") return (False, False)
def verifyresult(title, artistterm, term): title = re.sub('[\.\-\/\_]', ' ', title) #if artistterm != 'Various Artists': # # if not re.search('^' + re.escape(artistterm), title, re.IGNORECASE): # #logger.info("Removed from results: " + title + " (artist not at string start).") # #return False # elif re.search(re.escape(artistterm) + '\w', title, re.IGNORECASE | re.UNICODE): # logger.info("Removed from results: " + title + " (post substring result).") # return False # elif re.search('\w' + re.escape(artistterm), title, re.IGNORECASE | re.UNICODE): # logger.info("Removed from results: " + title + " (pre substring result).") # return False #another attempt to weed out substrings. We don't want "Vol III" when we were looking for "Vol II" tokens = re.split('\W', term, re.IGNORECASE | re.UNICODE) for token in tokens: if not token: continue if token == 'Various' or token == 'Artists' or token == 'VA': continue if not re.search('(?:\W|^)+' + token + '(?:\W|$)+', title, re.IGNORECASE | re.UNICODE): cleantoken = ''.join(c for c in token if c not in string.punctuation) if not not re.search('(?:\W|^)+' + cleantoken + '(?:\W|$)+', title, re.IGNORECASE | re.UNICODE): dic = {'!':'i', '$':'s'} dumbtoken = helpers.replace_all(token, dic) if not not re.search('(?:\W|^)+' + dumbtoken + '(?:\W|$)+', title, re.IGNORECASE | re.UNICODE): logger.info("Removed from results: " + title + " (missing tokens: " + token + " and " + cleantoken + ")") return False return True
def pauseArtist(self, ArtistID): logger.info(u"Pausing artist: " + ArtistID) myDB = db.DBConnection() controlValueDict = {'ArtistID': ArtistID} newValueDict = {'Status': 'Paused'} myDB.upsert("artists", newValueDict, controlValueDict) raise cherrypy.HTTPRedirect("artistPage?ArtistID=%s" % ArtistID)
def notify(self, artist, album, albumartpath): hosts = [x.strip() for x in self.hosts.split(',')] header = "Headphones" message = "%s - %s added to your library" % (artist, album) time = "3000" # in ms for host in hosts: logger.info('Sending notification command to XMBC @ ' + host) try: version = self._sendjson(host, 'Application.GetProperties', {'properties': ['version']})[ 'version']['major'] if version < 12: # Eden notification = header + "," + message + "," + time + \ "," + albumartpath notifycommand = {'command': 'ExecBuiltIn', 'parameter': 'Notification(' + notification + ')'} request = self._sendhttp(host, notifycommand) else: # Frodo params = {'title': header, 'message': message, 'displaytime': int(time), 'image': albumartpath} request = self._sendjson(host, 'GUI.ShowNotification', params) if not request: raise Exception except Exception: logger.error('Error sending notification request to XBMC')
def removeTorrent(torrentid, remove_data=False): method = 'torrent-get' arguments = {'ids': torrentid, 'fields': ['isFinished', 'name']} response = torrentAction(method, arguments) if not response: return False try: finished = response['arguments']['torrents'][0]['isFinished'] name = response['arguments']['torrents'][0]['name'] if finished: logger.info('%s has finished seeding, removing torrent and data' % name) method = 'torrent-remove' if remove_data: arguments = {'delete-local-data': True, 'ids': torrentid} else: arguments = {'ids': torrentid} response = torrentAction(method, arguments) return True else: logger.info( '%s has not finished seeding yet, torrent will not be removed, will try again on next run' % name) except: return False return False
def notify(self, message, event): if not headphones.CONFIG.PUSHALOT_ENABLED: return pushalot_authorizationtoken = headphones.CONFIG.PUSHALOT_APIKEY logger.debug(u"Pushalot event: " + event) logger.debug(u"Pushalot message: " + message) logger.debug(u"Pushalot api: " + pushalot_authorizationtoken) http_handler = HTTPSConnection("pushalot.com") data = {'AuthorizationToken': pushalot_authorizationtoken, 'Title': event.encode('utf-8'), 'Body': message.encode("utf-8")} http_handler.request("POST", "/api/sendmessage", headers={ 'Content-type': "application/x-www-form-urlencoded"}, body=urlencode(data)) response = http_handler.getresponse() request_status = response.status logger.debug(u"Pushalot response status: %r" % request_status) logger.debug(u"Pushalot response headers: %r" % response.getheaders()) logger.debug(u"Pushalot response body: %r" % response.read()) if request_status == 200: logger.info(u"Pushalot notifications sent.") return True elif request_status == 410: logger.info(u"Pushalot auth failed: %s" % response.reason) return False else: logger.info(u"Pushalot notification failed.") return False
def removeTorrent(hash, remove_data=False): logger.debug('removeTorrent(%s,%s)' % (hash, remove_data)) qbclient = qbittorrentclient() if qbclient.version == 2: torrentlist = qbclient.qb.torrents(hashes=hash.lower()) else: status, torrentlist = qbclient._get_list() for torrent in torrentlist: if torrent['hash'].lower() == hash.lower(): if torrent['ratio'] >= torrent['ratio_limit'] and torrent[ 'ratio_limit'] >= 0: if qbclient.version == 2: if remove_data: logger.info( '%s has finished seeding, removing torrent and data. ' 'Ratio: %s, Ratio Limit: %s' % (torrent['name'], torrent['ratio'], torrent['ratio_limit'])) qbclient.qb.delete_permanently(hash) else: logger.info( '%s has finished seeding, removing torrent' % torrent['name']) qbclient.qb.delete(hash) else: qbclient.remove(hash, remove_data) return True else: logger.info( '%s has not finished seeding yet, torrent will not be removed, will try again on next run. ' 'Ratio: %s, Ratio Limit: %s' % (torrent['name'], torrent['ratio'], torrent['ratio_limit'])) return False return False
def schedule_job(function, name, hours=0, minutes=0): """ Start scheduled job if starting or restarting headphones. Reschedule job if Interval Settings have changed. Remove job if if Interval Settings changed to 0 """ job = SCHED.get_job(name) if job: if hours == 0 and minutes == 0: SCHED.remove_job(name) logger.info("Removed background task: %s", name) elif job.trigger.interval != datetime.timedelta(hours=hours, minutes=minutes): SCHED.reschedule_job(name, trigger=IntervalTrigger(hours=hours, minutes=minutes)) logger.info("Re-scheduled background task: %s", name) elif hours > 0 or minutes > 0: SCHED.add_job(function, id=name, trigger=IntervalTrigger(hours=hours, minutes=minutes)) logger.info("Scheduled background task: %s", name)
def http_error_default(self, url, fp, errcode, errmsg, headers): # if newzbin is throttling us, wait seconds and try again if errcode == 400: newzbinErrCode = int(headers.getheader('X-DNZB-RCode')) if newzbinErrCode == 450: rtext = str(headers.getheader('X-DNZB-RText')) result = re.search("wait (\d+) seconds", rtext) logger.info( "Newzbin throttled our NZB downloading, pausing for " + result.group(1) + " seconds") time.sleep(int(result.group(1))) raise exceptions.NewzbinAPIThrottled() elif newzbinErrCode == 401: logger.info("Newzbin error 401") #raise exceptions.AuthException("Newzbin username or password incorrect") elif newzbinErrCode == 402: #raise exceptions.AuthException("Newzbin account not premium status, can't download NZBs") logger.info("Newzbin error 402")
def login(self): """ Logs in user """ loginpage = 'http://rutracker.org/forum/login.php' post_params = { 'login_username': headphones.CONFIG.RUTRACKER_USER, 'login_password': headphones.CONFIG.RUTRACKER_PASSWORD, 'login': b'\xc2\xf5\xee\xe4' # '%C2%F5%EE%E4' } logger.info("Attempting to log in to rutracker...") try: r = self.session.post(loginpage, data=post_params, timeout=self.timeout, allow_redirects=False) # try again if not self.has_bb_session_cookie(r): time.sleep(10) if headphones.CONFIG.RUTRACKER_COOKIE: logger.info("Attempting to log in using predefined cookie...") r = self.session.post(loginpage, data=post_params, timeout=self.timeout, allow_redirects=False, cookies={'bb_session': headphones.CONFIG.RUTRACKER_COOKIE}) else: r = self.session.post(loginpage, data=post_params, timeout=self.timeout, allow_redirects=False) if self.has_bb_session_cookie(r): self.loggedin = True logger.info("Successfully logged in to rutracker") else: logger.error( "Could not login to rutracker, credentials maybe incorrect, site is down or too many attempts. Try again later") self.loggedin = False return self.loggedin except Exception as e: logger.error("Unknown error logging in to rutracker: %s" % e) self.loggedin = False return self.loggedin
def update(): if headphones.INSTALL_TYPE == 'win': logger.info('Windows .exe updating not supported yet.') elif headphones.INSTALL_TYPE == 'git': output, err = runGit('pull origin ' + headphones.GIT_BRANCH) if not output: logger.error('Couldn\'t download latest version') for line in output.split('\n'): if 'Already up-to-date.' in line: logger.info('No update available, not updating') logger.info('Output: ' + str(output)) elif line.endswith('Aborting.'): logger.error('Unable to update from git: ' + line) logger.info('Output: ' + str(output)) else: tar_download_url = 'https://github.com/%s/headphones/tarball/%s' % ( headphones.GIT_USER, headphones.GIT_BRANCH) update_dir = os.path.join(headphones.PROG_DIR, 'update') version_path = os.path.join(headphones.PROG_DIR, 'version.txt') logger.info('Downloading update from: ' + tar_download_url) data = request.request_content(tar_download_url) if not data: logger.error( "Unable to retrieve new version from '%s', can't update", tar_download_url) return download_name = headphones.GIT_BRANCH + '-github' tar_download_path = os.path.join(headphones.PROG_DIR, download_name) # Save tar to disk with open(tar_download_path, 'wb') as f: f.write(data) # Extract the tar to update folder logger.info('Extracting file: ' + tar_download_path) tar = tarfile.open(tar_download_path) tar.extractall(update_dir) tar.close() # Delete the tar.gz logger.info('Deleting file: ' + tar_download_path) os.remove(tar_download_path) # Find update dir name update_dir_contents = [ x for x in os.listdir(update_dir) if os.path.isdir(os.path.join(update_dir, x)) ] if len(update_dir_contents) != 1: logger.error("Invalid update data, update failed: " + str(update_dir_contents)) return content_dir = os.path.join(update_dir, update_dir_contents[0]) # walk temp folder and move files to main folder for dirname, dirnames, filenames in os.walk(content_dir): dirname = dirname[len(content_dir) + 1:] for curfile in filenames: old_path = os.path.join(content_dir, dirname, curfile) new_path = os.path.join(headphones.PROG_DIR, dirname, curfile) if os.path.isfile(new_path): os.remove(new_path) os.renames(old_path, new_path) # Update version.txt try: with open(version_path, 'w') as f: f.write(str(headphones.LATEST_VERSION)) except IOError as e: logger.error("Unable to write current version to version.txt, " \ "update not complete: ", e) return
def dbcheck(): conn = sqlite3.connect(DB_FILE) c = conn.cursor() c.execute( 'CREATE TABLE IF NOT EXISTS artists (ArtistID TEXT UNIQUE, ArtistName TEXT, ArtistSortName TEXT, DateAdded TEXT, Status TEXT, IncludeExtras INTEGER, LatestAlbum TEXT, ReleaseDate TEXT, AlbumID TEXT, HaveTracks INTEGER, TotalTracks INTEGER, LastUpdated TEXT, ArtworkURL TEXT, ThumbURL TEXT, Extras TEXT, Type TEXT, MetaCritic TEXT)' ) # ReleaseFormat here means CD,Digital,Vinyl, etc. If using the default # Headphones hybrid release, ReleaseID will equal AlbumID (AlbumID is # releasegroup id) c.execute( 'CREATE TABLE IF NOT EXISTS albums (ArtistID TEXT, ArtistName TEXT, AlbumTitle TEXT, AlbumASIN TEXT, ReleaseDate TEXT, DateAdded TEXT, AlbumID TEXT UNIQUE, Status TEXT, Type TEXT, ArtworkURL TEXT, ThumbURL TEXT, ReleaseID TEXT, ReleaseCountry TEXT, ReleaseFormat TEXT, SearchTerm TEXT, CriticScore TEXT, UserScore TEXT)' ) # Format here means mp3, flac, etc. c.execute( 'CREATE TABLE IF NOT EXISTS tracks (ArtistID TEXT, ArtistName TEXT, AlbumTitle TEXT, AlbumASIN TEXT, AlbumID TEXT, TrackTitle TEXT, TrackDuration, TrackID TEXT, TrackNumber INTEGER, Location TEXT, BitRate INTEGER, CleanName TEXT, Format TEXT, ReleaseID TEXT)' ) c.execute( 'CREATE TABLE IF NOT EXISTS allalbums (ArtistID TEXT, ArtistName TEXT, AlbumTitle TEXT, AlbumASIN TEXT, ReleaseDate TEXT, AlbumID TEXT, Type TEXT, ReleaseID TEXT, ReleaseCountry TEXT, ReleaseFormat TEXT)' ) c.execute( 'CREATE TABLE IF NOT EXISTS alltracks (ArtistID TEXT, ArtistName TEXT, AlbumTitle TEXT, AlbumASIN TEXT, AlbumID TEXT, TrackTitle TEXT, TrackDuration, TrackID TEXT, TrackNumber INTEGER, Location TEXT, BitRate INTEGER, CleanName TEXT, Format TEXT, ReleaseID TEXT)' ) c.execute( 'CREATE TABLE IF NOT EXISTS snatched (AlbumID TEXT, Title TEXT, Size INTEGER, URL TEXT, DateAdded TEXT, Status TEXT, FolderName TEXT, Kind TEXT, TorrentHash TEXT)' ) # Matched is a temporary value used to see if there was a match found in # alltracks c.execute( 'CREATE TABLE IF NOT EXISTS have (ArtistName TEXT, AlbumTitle TEXT, TrackNumber TEXT, TrackTitle TEXT, TrackLength TEXT, BitRate TEXT, Genre TEXT, Date TEXT, TrackID TEXT, Location TEXT, CleanName TEXT, Format TEXT, Matched TEXT)' ) c.execute( 'CREATE TABLE IF NOT EXISTS lastfmcloud (ArtistName TEXT, ArtistID TEXT, Count INTEGER)' ) c.execute( 'CREATE TABLE IF NOT EXISTS descriptions (ArtistID TEXT, ReleaseGroupID TEXT, ReleaseID TEXT, Summary TEXT, Content TEXT, LastUpdated TEXT)' ) c.execute('CREATE TABLE IF NOT EXISTS blacklist (ArtistID TEXT UNIQUE)') c.execute('CREATE TABLE IF NOT EXISTS newartists (ArtistName TEXT UNIQUE)') c.execute( 'CREATE TABLE IF NOT EXISTS releases (ReleaseID TEXT, ReleaseGroupID TEXT, UNIQUE(ReleaseID, ReleaseGroupID))' ) c.execute( 'CREATE INDEX IF NOT EXISTS tracks_albumid ON tracks(AlbumID ASC)') c.execute( 'CREATE INDEX IF NOT EXISTS album_artistid_reldate ON albums(ArtistID ASC, ReleaseDate DESC)' ) # Below creates indices to speed up Active Artist updating c.execute( 'CREATE INDEX IF NOT EXISTS alltracks_relid ON alltracks(ReleaseID ASC, TrackID ASC)' ) c.execute( 'CREATE INDEX IF NOT EXISTS allalbums_relid ON allalbums(ReleaseID ASC)' ) c.execute('CREATE INDEX IF NOT EXISTS have_location ON have(Location ASC)') # Below creates indices to speed up library scanning & matching c.execute( 'CREATE INDEX IF NOT EXISTS have_Metadata ON have(ArtistName ASC, AlbumTitle ASC, TrackTitle ASC)' ) c.execute( 'CREATE INDEX IF NOT EXISTS have_CleanName ON have(CleanName ASC)') c.execute( 'CREATE INDEX IF NOT EXISTS tracks_Metadata ON tracks(ArtistName ASC, AlbumTitle ASC, TrackTitle ASC)' ) c.execute( 'CREATE INDEX IF NOT EXISTS tracks_CleanName ON tracks(CleanName ASC)') c.execute( 'CREATE INDEX IF NOT EXISTS alltracks_Metadata ON alltracks(ArtistName ASC, AlbumTitle ASC, TrackTitle ASC)' ) c.execute( 'CREATE INDEX IF NOT EXISTS alltracks_CleanName ON alltracks(CleanName ASC)' ) c.execute( 'CREATE INDEX IF NOT EXISTS tracks_Location ON tracks(Location ASC)') c.execute( 'CREATE INDEX IF NOT EXISTS alltracks_Location ON alltracks(Location ASC)' ) try: c.execute('SELECT IncludeExtras from artists') except sqlite3.OperationalError: c.execute( 'ALTER TABLE artists ADD COLUMN IncludeExtras INTEGER DEFAULT 0') try: c.execute('SELECT LatestAlbum from artists') except sqlite3.OperationalError: c.execute('ALTER TABLE artists ADD COLUMN LatestAlbum TEXT') try: c.execute('SELECT ReleaseDate from artists') except sqlite3.OperationalError: c.execute('ALTER TABLE artists ADD COLUMN ReleaseDate TEXT') try: c.execute('SELECT AlbumID from artists') except sqlite3.OperationalError: c.execute('ALTER TABLE artists ADD COLUMN AlbumID TEXT') try: c.execute('SELECT HaveTracks from artists') except sqlite3.OperationalError: c.execute( 'ALTER TABLE artists ADD COLUMN HaveTracks INTEGER DEFAULT 0') try: c.execute('SELECT TotalTracks from artists') except sqlite3.OperationalError: c.execute( 'ALTER TABLE artists ADD COLUMN TotalTracks INTEGER DEFAULT 0') try: c.execute('SELECT Type from albums') except sqlite3.OperationalError: c.execute('ALTER TABLE albums ADD COLUMN Type TEXT DEFAULT "Album"') try: c.execute('SELECT TrackNumber from tracks') except sqlite3.OperationalError: c.execute('ALTER TABLE tracks ADD COLUMN TrackNumber INTEGER') try: c.execute('SELECT FolderName from snatched') except sqlite3.OperationalError: c.execute('ALTER TABLE snatched ADD COLUMN FolderName TEXT') try: c.execute('SELECT Location from tracks') except sqlite3.OperationalError: c.execute('ALTER TABLE tracks ADD COLUMN Location TEXT') try: c.execute('SELECT Location from have') except sqlite3.OperationalError: c.execute('ALTER TABLE have ADD COLUMN Location TEXT') try: c.execute('SELECT BitRate from tracks') except sqlite3.OperationalError: c.execute('ALTER TABLE tracks ADD COLUMN BitRate INTEGER') try: c.execute('SELECT CleanName from tracks') except sqlite3.OperationalError: c.execute('ALTER TABLE tracks ADD COLUMN CleanName TEXT') try: c.execute('SELECT CleanName from have') except sqlite3.OperationalError: c.execute('ALTER TABLE have ADD COLUMN CleanName TEXT') # Add the Format column try: c.execute('SELECT Format from have') except sqlite3.OperationalError: c.execute('ALTER TABLE have ADD COLUMN Format TEXT DEFAULT NULL') try: c.execute('SELECT Format from tracks') except sqlite3.OperationalError: c.execute('ALTER TABLE tracks ADD COLUMN Format TEXT DEFAULT NULL') try: c.execute('SELECT LastUpdated from artists') except sqlite3.OperationalError: c.execute( 'ALTER TABLE artists ADD COLUMN LastUpdated TEXT DEFAULT NULL') try: c.execute('SELECT ArtworkURL from artists') except sqlite3.OperationalError: c.execute( 'ALTER TABLE artists ADD COLUMN ArtworkURL TEXT DEFAULT NULL') try: c.execute('SELECT ArtworkURL from albums') except sqlite3.OperationalError: c.execute('ALTER TABLE albums ADD COLUMN ArtworkURL TEXT DEFAULT NULL') try: c.execute('SELECT ThumbURL from artists') except sqlite3.OperationalError: c.execute('ALTER TABLE artists ADD COLUMN ThumbURL TEXT DEFAULT NULL') try: c.execute('SELECT ThumbURL from albums') except sqlite3.OperationalError: c.execute('ALTER TABLE albums ADD COLUMN ThumbURL TEXT DEFAULT NULL') try: c.execute('SELECT ArtistID from descriptions') except sqlite3.OperationalError: c.execute( 'ALTER TABLE descriptions ADD COLUMN ArtistID TEXT DEFAULT NULL') try: c.execute('SELECT LastUpdated from descriptions') except sqlite3.OperationalError: c.execute( 'ALTER TABLE descriptions ADD COLUMN LastUpdated TEXT DEFAULT NULL' ) try: c.execute('SELECT ReleaseID from albums') except sqlite3.OperationalError: c.execute('ALTER TABLE albums ADD COLUMN ReleaseID TEXT DEFAULT NULL') try: c.execute('SELECT ReleaseFormat from albums') except sqlite3.OperationalError: c.execute( 'ALTER TABLE albums ADD COLUMN ReleaseFormat TEXT DEFAULT NULL') try: c.execute('SELECT ReleaseCountry from albums') except sqlite3.OperationalError: c.execute( 'ALTER TABLE albums ADD COLUMN ReleaseCountry TEXT DEFAULT NULL') try: c.execute('SELECT ReleaseID from tracks') except sqlite3.OperationalError: c.execute('ALTER TABLE tracks ADD COLUMN ReleaseID TEXT DEFAULT NULL') try: c.execute('SELECT Matched from have') except sqlite3.OperationalError: c.execute('ALTER TABLE have ADD COLUMN Matched TEXT DEFAULT NULL') try: c.execute('SELECT Extras from artists') except sqlite3.OperationalError: c.execute('ALTER TABLE artists ADD COLUMN Extras TEXT DEFAULT NULL') # Need to update some stuff when people are upgrading and have 'include # extras' set globally/for an artist if CONFIG.INCLUDE_EXTRAS: CONFIG.EXTRAS = "1,2,3,4,5,6,7,8" logger.info("Copying over current artist IncludeExtras information") artists = c.execute( 'SELECT ArtistID, IncludeExtras from artists').fetchall() for artist in artists: if artist[1]: c.execute('UPDATE artists SET Extras=? WHERE ArtistID=?', ("1,2,3,4,5,6,7,8", artist[0])) try: c.execute('SELECT Kind from snatched') except sqlite3.OperationalError: c.execute('ALTER TABLE snatched ADD COLUMN Kind TEXT DEFAULT NULL') try: c.execute('SELECT SearchTerm from albums') except sqlite3.OperationalError: c.execute('ALTER TABLE albums ADD COLUMN SearchTerm TEXT DEFAULT NULL') try: c.execute('SELECT CriticScore from albums') except sqlite3.OperationalError: c.execute( 'ALTER TABLE albums ADD COLUMN CriticScore TEXT DEFAULT NULL') try: c.execute('SELECT UserScore from albums') except sqlite3.OperationalError: c.execute('ALTER TABLE albums ADD COLUMN UserScore TEXT DEFAULT NULL') try: c.execute('SELECT Type from artists') except sqlite3.OperationalError: c.execute('ALTER TABLE artists ADD COLUMN Type TEXT DEFAULT NULL') try: c.execute('SELECT MetaCritic from artists') except sqlite3.OperationalError: c.execute( 'ALTER TABLE artists ADD COLUMN MetaCritic TEXT DEFAULT NULL') try: c.execute('SELECT TorrentHash from snatched') except sqlite3.OperationalError: c.execute('ALTER TABLE snatched ADD COLUMN TorrentHash TEXT') c.execute( 'UPDATE snatched SET TorrentHash = FolderName WHERE Status LIKE "Seed_%"' ) # One off script to set CleanName to lower case clean_name_mixed = c.execute( 'SELECT CleanName FROM have ORDER BY Date Desc').fetchone()[0] if clean_name_mixed != clean_name_mixed.lower(): logger.info("Updating track clean name, this could take some time...") c.execute( 'UPDATE tracks SET CleanName = LOWER(CleanName) WHERE LOWER(CleanName) != CleanName' ) c.execute( 'UPDATE alltracks SET CleanName = LOWER(CleanName) WHERE LOWER(CleanName) != CleanName' ) c.execute( 'UPDATE have SET CleanName = LOWER(CleanName) WHERE LOWER(CleanName) != CleanName' ) conn.commit() c.close()
def extract_metadata(f): """ Scan all files in the given directory and decide on an artist, album and year based on the metadata. A decision is based on the number of different artists, albums and years found in the media files. """ from headphones import logger # Walk directory and scan all media files results = [] count = 0 for root, dirs, files in os.walk(f): for file in files: # Count the number of potential media files extension = os.path.splitext(file)[1].lower()[1:] if extension in headphones.MEDIA_FORMATS: count += 1 # Try to read the file info try: media_file = MediaFile(os.path.join(root, file)) except (FileTypeError, UnreadableFileError): # Probably not a media file continue # Append metadata to file artist = media_file.albumartist or media_file.artist album = media_file.album year = media_file.year if artist and album and year: results.append((artist.lower(), album.lower(), year)) # Verify results if len(results) == 0: logger.info("No metadata in media files found, ignoring.") return (None, None, None) # Require that some percentage of files have tags count_ratio = 0.75 if count < (count_ratio * len(results)): logger.info("Counted %d media files, but only %d have tags, ignoring.", count, len(results)) return (None, None, None) # Count distinct values artists = list(set([x[0] for x in results])) albums = list(set([x[1] for x in results])) years = list(set([x[2] for x in results])) # Remove things such as CD2 from album names if len(albums) > 1: new_albums = list(albums) # Replace occurences of e.g. CD1 for index, album in enumerate(new_albums): if RE_CD_ALBUM.search(album): old_album = new_albums[index] new_albums[index] = RE_CD_ALBUM.sub("", album).strip() logger.debug("Stripped albumd number identifier: %s -> %s", old_album, new_albums[index]) # Remove duplicates new_albums = list(set(new_albums)) # Safety check: if nothing has merged, then ignore the work. This can # happen if only one CD of a multi part CD is processed. if len(new_albums) < len(albums): albums = new_albums # All files have the same metadata, so it's trivial if len(artists) == 1 and len(albums) == 1: return (artists[0], albums[0], years[0]) # (Lots of) different artists. Could be a featuring album, so test for this. if len(artists) > 1 and len(albums) == 1: split_artists = [RE_FEATURING.split(x) for x in artists] featurings = [len(split_artist) - 1 for split_artist in split_artists] logger.info("Album seem to feature %d different artists", sum(featurings)) if sum(featurings) > 0: # Find the artist of which the least splits have been generated. # Ideally, this should be 0, which should be the album artist # itself. artist = split_artists[featurings.index(min(featurings))][0] # Done return (artist, albums[0], years[0]) # Not sure what to do here. logger.info("Found %d artists, %d albums and %d years in metadata, so ignoring", len(artists), len(albums), len(years)) logger.debug("Artists: %s, Albums: %s, Years: %s", artists, albums, years) return (None, None, None)
def renameFiles(albumpath, downloaded_track_list, release): logger.info('Renaming files') try: year = release['ReleaseDate'][:4] except TypeError: year = '' # Until tagging works better I'm going to rely on the already provided metadata for downloaded_track in downloaded_track_list: try: f = MediaFile(downloaded_track) except: logger.info( "MediaFile couldn't parse: " + downloaded_track.decode(headphones.SYS_ENCODING, 'replace')) continue if not f.disc: discnumber = '' else: discnumber = '%d' % f.disc if not f.track: tracknumber = '' else: tracknumber = '%02d' % f.track if not f.title: basename = os.path.basename( downloaded_track.decode(headphones.SYS_ENCODING, 'replace')) title = os.path.splitext(basename)[0] ext = os.path.splitext(basename)[1] new_file_name = helpers.cleanTitle(title) + ext else: title = f.title if release['ArtistName'] == "Various Artists" and f.artist: artistname = f.artist else: artistname = release['ArtistName'] if artistname.startswith('The '): sortname = artistname[4:] + ", The" else: sortname = artistname values = { '$Disc': discnumber, '$Track': tracknumber, '$Title': title, '$Artist': artistname, '$SortArtist': sortname, '$Album': release['AlbumTitle'], '$Year': year, '$disc': discnumber, '$track': tracknumber, '$title': title.lower(), '$artist': artistname.lower(), '$sortartist': sortname.lower(), '$album': release['AlbumTitle'].lower(), '$year': year } ext = os.path.splitext(downloaded_track)[1] new_file_name = helpers.replace_all(headphones.FILE_FORMAT.strip(), values).replace('/', '_') + ext new_file_name = new_file_name.replace('?', '_').replace( ':', '_').encode(headphones.SYS_ENCODING, 'replace') if new_file_name.startswith('.'): new_file_name = new_file_name.replace(0, '_') new_file = os.path.join(albumpath, new_file_name) if downloaded_track == new_file_name: logger.debug( "Renaming for: " + downloaded_track.decode(headphones.SYS_ENCODING, 'replace') + " is not neccessary") continue logger.debug( 'Renaming %s ---> %s' % (downloaded_track.decode(headphones.SYS_ENCODING, 'replace'), new_file_name.decode(headphones.SYS_ENCODING, 'replace'))) try: os.rename(downloaded_track, new_file) except Exception, e: logger.error('Error renaming file: %s. Error: %s' % (downloaded_track.decode(headphones.SYS_ENCODING, 'replace'), e)) continue
def dbcheck(): conn = sqlite3.connect(DB_FILE) c = conn.cursor() c.execute( 'CREATE TABLE IF NOT EXISTS artists (ArtistID TEXT UNIQUE, ArtistName TEXT, ArtistSortName TEXT, DateAdded TEXT, Status TEXT, IncludeExtras INTEGER, LatestAlbum TEXT, ReleaseDate TEXT, AlbumID TEXT, HaveTracks INTEGER, TotalTracks INTEGER, LastUpdated TEXT, ArtworkURL TEXT, ThumbURL TEXT, Extras TEXT)' ) c.execute( 'CREATE TABLE IF NOT EXISTS albums (ArtistID TEXT, ArtistName TEXT, AlbumTitle TEXT, AlbumASIN TEXT, ReleaseDate TEXT, DateAdded TEXT, AlbumID TEXT UNIQUE, Status TEXT, Type TEXT, ArtworkURL TEXT, ThumbURL TEXT, ReleaseID TEXT, ReleaseCountry TEXT, ReleaseFormat TEXT, SearchTerm TEXT)' ) # ReleaseFormat here means CD,Digital,Vinyl, etc. If using the default Headphones hybrid release, ReleaseID will equal AlbumID (AlbumID is releasegroup id) c.execute( 'CREATE TABLE IF NOT EXISTS tracks (ArtistID TEXT, ArtistName TEXT, AlbumTitle TEXT, AlbumASIN TEXT, AlbumID TEXT, TrackTitle TEXT, TrackDuration, TrackID TEXT, TrackNumber INTEGER, Location TEXT, BitRate INTEGER, CleanName TEXT, Format TEXT, ReleaseID TEXT)' ) # Format here means mp3, flac, etc. c.execute( 'CREATE TABLE IF NOT EXISTS allalbums (ArtistID TEXT, ArtistName TEXT, AlbumTitle TEXT, AlbumASIN TEXT, ReleaseDate TEXT, AlbumID TEXT, Type TEXT, ReleaseID TEXT, ReleaseCountry TEXT, ReleaseFormat TEXT)' ) c.execute( 'CREATE TABLE IF NOT EXISTS alltracks (ArtistID TEXT, ArtistName TEXT, AlbumTitle TEXT, AlbumASIN TEXT, AlbumID TEXT, TrackTitle TEXT, TrackDuration, TrackID TEXT, TrackNumber INTEGER, Location TEXT, BitRate INTEGER, CleanName TEXT, Format TEXT, ReleaseID TEXT)' ) c.execute( 'CREATE TABLE IF NOT EXISTS snatched (AlbumID TEXT, Title TEXT, Size INTEGER, URL TEXT, DateAdded TEXT, Status TEXT, FolderName TEXT, Kind TEXT)' ) c.execute( 'CREATE TABLE IF NOT EXISTS have (ArtistName TEXT, AlbumTitle TEXT, TrackNumber TEXT, TrackTitle TEXT, TrackLength TEXT, BitRate TEXT, Genre TEXT, Date TEXT, TrackID TEXT, Location TEXT, CleanName TEXT, Format TEXT, Matched TEXT)' ) # Matched is a temporary value used to see if there was a match found in alltracks c.execute( 'CREATE TABLE IF NOT EXISTS lastfmcloud (ArtistName TEXT, ArtistID TEXT, Count INTEGER)' ) c.execute( 'CREATE TABLE IF NOT EXISTS descriptions (ArtistID TEXT, ReleaseGroupID TEXT, ReleaseID TEXT, Summary TEXT, Content TEXT, LastUpdated TEXT)' ) c.execute('CREATE TABLE IF NOT EXISTS blacklist (ArtistID TEXT UNIQUE)') c.execute('CREATE TABLE IF NOT EXISTS newartists (ArtistName TEXT UNIQUE)') c.execute( 'CREATE TABLE IF NOT EXISTS releases (ReleaseID TEXT, ReleaseGroupID TEXT, UNIQUE(ReleaseID, ReleaseGroupID))' ) c.execute( 'CREATE INDEX IF NOT EXISTS tracks_albumid ON tracks(AlbumID ASC)') c.execute( 'CREATE INDEX IF NOT EXISTS album_artistid_reldate ON albums(ArtistID ASC, ReleaseDate DESC)' ) try: c.execute('SELECT IncludeExtras from artists') except sqlite3.OperationalError: c.execute( 'ALTER TABLE artists ADD COLUMN IncludeExtras INTEGER DEFAULT 0') try: c.execute('SELECT LatestAlbum from artists') except sqlite3.OperationalError: c.execute('ALTER TABLE artists ADD COLUMN LatestAlbum TEXT') try: c.execute('SELECT ReleaseDate from artists') except sqlite3.OperationalError: c.execute('ALTER TABLE artists ADD COLUMN ReleaseDate TEXT') try: c.execute('SELECT AlbumID from artists') except sqlite3.OperationalError: c.execute('ALTER TABLE artists ADD COLUMN AlbumID TEXT') try: c.execute('SELECT HaveTracks from artists') except sqlite3.OperationalError: c.execute( 'ALTER TABLE artists ADD COLUMN HaveTracks INTEGER DEFAULT 0') try: c.execute('SELECT TotalTracks from artists') except sqlite3.OperationalError: c.execute( 'ALTER TABLE artists ADD COLUMN TotalTracks INTEGER DEFAULT 0') try: c.execute('SELECT Type from albums') except sqlite3.OperationalError: c.execute('ALTER TABLE albums ADD COLUMN Type TEXT DEFAULT "Album"') try: c.execute('SELECT TrackNumber from tracks') except sqlite3.OperationalError: c.execute('ALTER TABLE tracks ADD COLUMN TrackNumber INTEGER') try: c.execute('SELECT FolderName from snatched') except sqlite3.OperationalError: c.execute('ALTER TABLE snatched ADD COLUMN FolderName TEXT') try: c.execute('SELECT Location from tracks') except sqlite3.OperationalError: c.execute('ALTER TABLE tracks ADD COLUMN Location TEXT') try: c.execute('SELECT Location from have') except sqlite3.OperationalError: c.execute('ALTER TABLE have ADD COLUMN Location TEXT') try: c.execute('SELECT BitRate from tracks') except sqlite3.OperationalError: c.execute('ALTER TABLE tracks ADD COLUMN BitRate INTEGER') try: c.execute('SELECT CleanName from tracks') except sqlite3.OperationalError: c.execute('ALTER TABLE tracks ADD COLUMN CleanName TEXT') try: c.execute('SELECT CleanName from have') except sqlite3.OperationalError: c.execute('ALTER TABLE have ADD COLUMN CleanName TEXT') # Add the Format column try: c.execute('SELECT Format from have') except sqlite3.OperationalError: c.execute('ALTER TABLE have ADD COLUMN Format TEXT DEFAULT NULL') try: c.execute('SELECT Format from tracks') except sqlite3.OperationalError: c.execute('ALTER TABLE tracks ADD COLUMN Format TEXT DEFAULT NULL') try: c.execute('SELECT LastUpdated from artists') except sqlite3.OperationalError: c.execute( 'ALTER TABLE artists ADD COLUMN LastUpdated TEXT DEFAULT NULL') try: c.execute('SELECT ArtworkURL from artists') except sqlite3.OperationalError: c.execute( 'ALTER TABLE artists ADD COLUMN ArtworkURL TEXT DEFAULT NULL') try: c.execute('SELECT ArtworkURL from albums') except sqlite3.OperationalError: c.execute('ALTER TABLE albums ADD COLUMN ArtworkURL TEXT DEFAULT NULL') try: c.execute('SELECT ThumbURL from artists') except sqlite3.OperationalError: c.execute('ALTER TABLE artists ADD COLUMN ThumbURL TEXT DEFAULT NULL') try: c.execute('SELECT ThumbURL from albums') except sqlite3.OperationalError: c.execute('ALTER TABLE albums ADD COLUMN ThumbURL TEXT DEFAULT NULL') try: c.execute('SELECT ArtistID from descriptions') except sqlite3.OperationalError: c.execute( 'ALTER TABLE descriptions ADD COLUMN ArtistID TEXT DEFAULT NULL') try: c.execute('SELECT LastUpdated from descriptions') except sqlite3.OperationalError: c.execute( 'ALTER TABLE descriptions ADD COLUMN LastUpdated TEXT DEFAULT NULL' ) try: c.execute('SELECT ReleaseID from albums') except sqlite3.OperationalError: c.execute('ALTER TABLE albums ADD COLUMN ReleaseID TEXT DEFAULT NULL') try: c.execute('SELECT ReleaseFormat from albums') except sqlite3.OperationalError: c.execute( 'ALTER TABLE albums ADD COLUMN ReleaseFormat TEXT DEFAULT NULL') try: c.execute('SELECT ReleaseCountry from albums') except sqlite3.OperationalError: c.execute( 'ALTER TABLE albums ADD COLUMN ReleaseCountry TEXT DEFAULT NULL') try: c.execute('SELECT ReleaseID from tracks') except sqlite3.OperationalError: c.execute('ALTER TABLE tracks ADD COLUMN ReleaseID TEXT DEFAULT NULL') try: c.execute('SELECT Matched from have') except sqlite3.OperationalError: c.execute('ALTER TABLE have ADD COLUMN Matched TEXT DEFAULT NULL') try: c.execute('SELECT Extras from artists') except sqlite3.OperationalError: c.execute('ALTER TABLE artists ADD COLUMN Extras TEXT DEFAULT NULL') # Need to update some stuff when people are upgrading and have 'include extras' set globally/for an artist if INCLUDE_EXTRAS: EXTRAS = "1,2,3,4,5,6,7,8" logger.info("Copying over current artist IncludeExtras information") artists = c.execute( 'SELECT ArtistID, IncludeExtras from artists').fetchall() for artist in artists: if artist[1]: c.execute('UPDATE artists SET Extras=? WHERE ArtistID=?', ("1,2,3,4,5,6,7,8", artist[0])) try: c.execute('SELECT Kind from snatched') except sqlite3.OperationalError: c.execute('ALTER TABLE snatched ADD COLUMN Kind TEXT DEFAULT NULL') try: c.execute('SELECT SearchTerm from albums') except sqlite3.OperationalError: c.execute('ALTER TABLE albums ADD COLUMN SearchTerm TEXT DEFAULT NULL') conn.commit() c.close()
def initialize_scheduler(): """ Start the scheduled background tasks. Re-schedule if interval settings changed. """ from headphones import updater, searcher, librarysync, postprocessor, \ torrentfinished with SCHED_LOCK: # Check if scheduler should be started start_jobs = not len(SCHED.get_jobs()) # Regular jobs minutes = CONFIG.SEARCH_INTERVAL schedule_job(searcher.searchforalbum, 'Search for Wanted', hours=0, minutes=minutes) minutes = CONFIG.DOWNLOAD_SCAN_INTERVAL schedule_job(postprocessor.checkFolder, 'Download Scan', hours=0, minutes=minutes) hours = CONFIG.LIBRARYSCAN_INTERVAL schedule_job(librarysync.libraryScan, 'Library Scan', hours=hours, minutes=0) hours = CONFIG.UPDATE_DB_INTERVAL schedule_job(updater.dbUpdate, 'MusicBrainz Update', hours=hours, minutes=0) # Update check if CONFIG.CHECK_GITHUB: if CONFIG.CHECK_GITHUB_INTERVAL: minutes = CONFIG.CHECK_GITHUB_INTERVAL else: minutes = 0 schedule_job(versioncheck.checkGithub, 'Check GitHub for updates', hours=0, minutes=minutes) # Remove Torrent + data if Post Processed and finished Seeding if headphones.CONFIG.TORRENT_DOWNLOADER != 0: minutes = CONFIG.TORRENT_REMOVAL_INTERVAL schedule_job(torrentfinished.checkTorrentFinished, 'Torrent removal check', hours=0, minutes=minutes) # Start scheduler if start_jobs and len(SCHED.get_jobs()): try: SCHED.start() except Exception as e: logger.info(e)
def addTorrent(link, data=None, name=None): try: # Authenticate anyway logger.debug('Deluge: addTorrent Authentication') _get_auth() result = {} retid = False url_apollo = ['https://apollo.rip/', 'http://apollo.rip/'] url_waffles = ['https://waffles.ch/', 'http://waffles.ch/'] if link.lower().startswith('magnet:'): logger.debug('Deluge: Got a magnet link: %s' % _scrubber(link)) result = {'type': 'magnet', 'url': link} retid = _add_torrent_magnet(result) elif link.lower().startswith('http://') or link.lower().startswith( 'https://'): logger.debug('Deluge: Got a URL: %s' % _scrubber(link)) if link.lower().startswith(tuple(url_waffles)): if 'rss=' not in link: link = link + '&rss=1' if link.lower().startswith(tuple(url_apollo)): logger.debug( 'Deluge: Using different User-Agent for this site') user_agent = 'Headphones' # This method will make Deluge download the file # logger.debug('Deluge: Letting Deluge download this') # local_torrent_path = _add_torrent_url({'url': link}) # logger.debug('Deluge: Returned this local path: %s' % _scrubber(local_torrent_path)) # return addTorrent(local_torrent_path) else: user_agent = 'Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2243.2 Safari/537.36' get_headers = {'User-Agent': user_agent} torrentfile = '' logger.debug('Deluge: Trying to download (GET)') try: r = requests.get(link, headers=get_headers) if r.status_code == 200: logger.debug('Deluge: 200 OK') # .text will ruin the encoding for some torrents torrentfile = r.content else: logger.debug( 'Deluge: Trying to GET %s returned status %d' % (_scrubber(link), r.status_code)) return False except Exception as e: logger.debug('Deluge: Download failed: %s' % str(e)) if 'announce' not in str(torrentfile)[:40]: logger.debug( 'Deluge: Contents of %s doesn\'t look like a torrent file' % _scrubber(link)) return False if not name: # Extract torrent name from .torrent try: logger.debug('Deluge: Getting torrent name length') name_length = int( re.findall('name([0-9]*)\:.*?\:', str(torrentfile))[0]) logger.debug('Deluge: Getting torrent name') name = re.findall('name[0-9]*\:(.*?)\:', str(torrentfile))[0][:name_length] except Exception as e: logger.debug( 'Deluge: Could not get torrent name, getting file name' ) # get last part of link/path (name only) name = link.split('\\')[-1].split('/')[-1] # remove '.torrent' suffix if name[-len('.torrent'):] == '.torrent': name = name[:-len('.torrent')] try: logger.debug( 'Deluge: Sending Deluge torrent with name %s and content [%s...]' % (name, str(torrentfile)[:40])) except: logger.debug( 'Deluge: Sending Deluge torrent with problematic name and some content' ) result = {'type': 'torrent', 'name': name, 'content': torrentfile} retid = _add_torrent_file(result) # elif link.endswith('.torrent') or data: elif not (link.lower().startswith('http://') or link.lower().startswith('https://')): if data: logger.debug('Deluge: Getting .torrent data') torrentfile = data else: logger.debug('Deluge: Getting .torrent file') with open(link, 'rb') as f: torrentfile = f.read() if not name: # Extract torrent name from .torrent try: logger.debug('Deluge: Getting torrent name length') name_length = int( re.findall('name([0-9]*)\:.*?\:', str(torrentfile))[0]) logger.debug('Deluge: Getting torrent name') name = re.findall('name[0-9]*\:(.*?)\:', str(torrentfile))[0][:name_length] except Exception as e: logger.debug( 'Deluge: Could not get torrent name, getting file name' ) # get last part of link/path (name only) name = link.split('\\')[-1].split('/')[-1] # remove '.torrent' suffix if name[-len('.torrent'):] == '.torrent': name = name[:-len('.torrent')] logger.debug( 'Deluge: Sending Deluge torrent with name %s and content [%s...]' % (name, str(torrentfile)[:40])) result = {'type': 'torrent', 'name': name, 'content': torrentfile} retid = _add_torrent_file(result) else: logger.error('Deluge: Unknown file type: %s' % link) if retid: logger.info('Deluge: Torrent sent to Deluge successfully (%s)' % retid) return retid else: logger.info('Deluge: Returned status %s' % retid) return False except Exception as e: logger.error(str(e)) formatted_lines = traceback.format_exc().splitlines() logger.error('; '.join(formatted_lines))
except OSError, e: raise RuntimeError("2nd fork failed: %s [%d]" % (e.strerror, e.errno)) dev_null = file('/dev/null', 'r') os.dup2(dev_null.fileno(), sys.stdin.fileno()) si = open('/dev/null', "r") so = open('/dev/null', "a+") se = open('/dev/null', "a+") os.dup2(si.fileno(), sys.stdin.fileno()) os.dup2(so.fileno(), sys.stdout.fileno()) os.dup2(se.fileno(), sys.stderr.fileno()) pid = str(os.getpid()) logger.info('Daemonized to PID: %s' % pid) if CREATEPID: logger.info("Writing PID " + pid + " to " + str(PIDFILE)) file(PIDFILE, 'w').write("%s\n" % pid) def launch_browser(host, port, root): if host == '0.0.0.0': host = 'localhost' try: webbrowser.open('http://%s:%i%s' % (host, port, root)) except Exception, e: logger.error('Could not launch browser: %s' % e)
def search(self, searchurl): """ Parse the search results and return valid torrent list """ try: headers = {'Referer': self.search_referer} r = self.session.get(url=searchurl, headers=headers, timeout=self.timeout) soup = BeautifulSoup(r.content, 'html5lib') # Debug # logger.debug (soup.prettify()) # Check if still logged in if not self.still_logged_in(soup): self.login() r = self.session.get(url=searchurl, timeout=self.timeout) soup = BeautifulSoup(r.content, 'html5lib') if not self.still_logged_in(soup): logger.error("Error getting rutracker data") return None # Process rulist = [] i = soup.find('table', id='tor-tbl') if not i: logger.info("No valid results found from rutracker") return None minimumseeders = int(headphones.CONFIG.NUMBEROFSEEDERS) - 1 for item in zip(i.find_all(class_='hl-tags'), i.find_all(class_='dl-stub'), i.find_all(class_='seedmed')): title = item[0].get_text() url = item[1].get('href') size_formatted = item[1].get_text()[:-2] seeds = item[2].get_text() size_parts = size_formatted.split() size = float(size_parts[0]) if size_parts[1] == 'KB': size *= 1024 if size_parts[1] == 'MB': size *= 1024**2 if size_parts[1] == 'GB': size *= 1024**3 if size_parts[1] == 'TB': size *= 1024**4 if size < self.maxsize and minimumseeders < int(seeds): logger.info('Found %s. Size: %s' % (title, size_formatted)) # Torrent topic page torrent_id = dict([ part.split('=') for part in urlparse(url)[4].split('&') ])['t'] topicurl = 'http://rutracker.org/forum/viewtopic.php?t=' + torrent_id rulist.append((title, size, topicurl, 'rutracker.org', 'torrent', True)) else: logger.info( "%s is larger than the maxsize or has too little seeders for this category, " "skipping. (Size: %i bytes, Seeders: %i)" % (title, size, int(seeds))) if not rulist: logger.info("No valid results found from rutracker") return rulist except Exception as e: logger.error( "An unknown error occurred in the rutracker parser: %s" % e) return None
def main(): """ Headphones application entry point. Parses arguments, setups encoding and initializes the application. """ # Fixed paths to Headphones if hasattr(sys, 'frozen'): headphones.FULL_PATH = os.path.abspath(sys.executable) else: headphones.FULL_PATH = os.path.abspath(__file__) headphones.PROG_DIR = os.path.dirname(headphones.FULL_PATH) headphones.ARGS = sys.argv[1:] # From sickbeard headphones.SYS_PLATFORM = sys.platform headphones.SYS_ENCODING = None try: locale.setlocale(locale.LC_ALL, "") if headphones.SYS_PLATFORM == 'win32': headphones.SYS_ENCODING = sys.getdefaultencoding().upper() else: headphones.SYS_ENCODING = locale.getpreferredencoding() except (locale.Error, IOError): pass # for OSes that are poorly configured I'll just force UTF-8 if not headphones.SYS_ENCODING or headphones.SYS_ENCODING in ( 'ANSI_X3.4-1968', 'US-ASCII', 'ASCII'): headphones.SYS_ENCODING = 'UTF-8' # Set up and gather command line arguments parser = argparse.ArgumentParser( description='Music add-on for SABnzbd+, Transmission and more.') parser.add_argument('-v', '--verbose', action='store_true', help='Increase console logging verbosity') parser.add_argument('-q', '--quiet', action='store_true', help='Turn off console logging') parser.add_argument('-d', '--daemon', action='store_true', help='Run as a daemon') parser.add_argument('-p', '--port', type=int, help='Force Headphones to run on a specified port') parser.add_argument( '--datadir', help='Specify a directory where to store your data files') parser.add_argument('--config', help='Specify a config file to use') parser.add_argument('--nolaunch', action='store_true', help='Prevent browser from launching on startup') parser.add_argument( '--pidfile', help='Create a pid file (only relevant when running as a daemon)') parser.add_argument('--host', help='Specify a host (default - localhost)') args = parser.parse_args() if args.verbose: headphones.VERBOSE = True if args.quiet: headphones.QUIET = True # Do an intial setup of the logger. logger.initLogger(console=not headphones.QUIET, log_dir=False, verbose=headphones.VERBOSE) if args.daemon: if sys.platform == 'win32': sys.stderr.write( "Daemonizing not supported under Windows, starting normally\n") else: headphones.DAEMON = True headphones.QUIET = True if args.pidfile: headphones.PIDFILE = str(args.pidfile) # If the pidfile already exists, headphones may still be running, so # exit if os.path.exists(headphones.PIDFILE): raise SystemExit("PID file '%s' already exists. Exiting." % headphones.PIDFILE) # The pidfile is only useful in daemon mode, make sure we can write the # file properly if headphones.DAEMON: headphones.CREATEPID = True try: with open(headphones.PIDFILE, 'w') as fp: fp.write("pid\n") except IOError as e: raise SystemExit("Unable to write PID file: %s", e) else: logger.warn("Not running in daemon mode. PID file creation " \ "disabled.") # Determine which data directory and config file to use if args.datadir: headphones.DATA_DIR = args.datadir else: headphones.DATA_DIR = headphones.PROG_DIR if args.config: config_file = args.config else: config_file = os.path.join(headphones.DATA_DIR, 'config.ini') # Try to create the DATA_DIR if it doesn't exist if not os.path.exists(headphones.DATA_DIR): try: os.makedirs(headphones.DATA_DIR) except OSError: raise SystemExit('Could not create data directory: ' + headphones.DATA_DIR + '. Exiting....') # Make sure the DATA_DIR is writeable if not os.access(headphones.DATA_DIR, os.W_OK): raise SystemExit('Cannot write to the data directory: ' + headphones.DATA_DIR + '. Exiting...') # Put the database in the DATA_DIR headphones.DB_FILE = os.path.join(headphones.DATA_DIR, 'headphones.db') # Read config and start logging try: headphones.initialize(config_file) except headphones.exceptions.SoftChrootError as e: raise SystemExit('FATAL ERROR') if headphones.DAEMON: headphones.daemonize() # Configure the connection to the musicbrainz database headphones.mb.startmb() # Force the http port if neccessary if args.port: http_port = args.port logger.info('Using forced web server port: %i', http_port) else: http_port = int(headphones.CONFIG.HTTP_PORT) # Force the http host if neccessary if args.host: http_host = args.host logger.info('Using forced web server host: %s', http_host) else: http_host = headphones.CONFIG.HTTP_HOST # Check if pyOpenSSL is installed. It is required for certificate generation # and for CherryPy. if headphones.CONFIG.ENABLE_HTTPS: try: import OpenSSL except ImportError: logger.warn("The pyOpenSSL module is missing. Install this " \ "module to enable HTTPS. HTTPS will be disabled.") headphones.CONFIG.ENABLE_HTTPS = False # Try to start the server. Will exit here is address is already in use. web_config = { 'http_port': http_port, 'http_host': http_host, 'http_root': headphones.CONFIG.HTTP_ROOT, 'http_proxy': headphones.CONFIG.HTTP_PROXY, 'enable_https': headphones.CONFIG.ENABLE_HTTPS, 'https_cert': headphones.CONFIG.HTTPS_CERT, 'https_key': headphones.CONFIG.HTTPS_KEY, 'http_username': headphones.CONFIG.HTTP_USERNAME, 'http_password': headphones.CONFIG.HTTP_PASSWORD, } webstart.initialize(web_config) # Start the background threads headphones.start() # Open webbrowser if headphones.CONFIG.LAUNCH_BROWSER and not args.nolaunch: headphones.launch_browser(headphones.CONFIG.HTTP_HOST, http_port, headphones.CONFIG.HTTP_ROOT) # Wait endlessy for a signal to happen while True: if not headphones.SIGNAL: try: time.sleep(1) except KeyboardInterrupt: headphones.SIGNAL = 'shutdown' else: logger.info('Received signal: %s', headphones.SIGNAL) if headphones.SIGNAL == 'shutdown': headphones.shutdown() elif headphones.SIGNAL == 'restart': headphones.shutdown(restart=True) else: headphones.shutdown(restart=True, update=True) headphones.SIGNAL = None
downloaded_track_list.append(os.path.join(r, files)) elif files.lower().endswith('.cue'): downloaded_cuecount += 1 # use xld to split cue if headphones.ENCODER == 'xld' and headphones.MUSIC_ENCODER and downloaded_cuecount and downloaded_cuecount >= len( downloaded_track_list): import getXldProfile (xldProfile, xldFormat, xldBitrate) = getXldProfile.getXldProfile(headphones.XLDPROFILE) if not xldFormat: logger.info( u'Details for xld profile "%s" not found, cannot split cue' % (xldProfile)) else: if headphones.ENCODERFOLDER: xldencoder = os.path.join(headphones.ENCODERFOLDER, 'xld') else: xldencoder = os.path.join('/Applications', 'xld') for r, d, f in os.walk(albumpath): xldfolder = r xldfile = '' xldcue = '' for file in f: if any(file.lower().endswith('.' + x.lower()) for x in headphones.MEDIA_FORMATS) and not xldfile: xldfile = os.path.join(r, file)
def verify(albumid, albumpath, Kind=None): myDB = db.DBConnection() release = myDB.action('SELECT * from albums WHERE AlbumID=?', [albumid]).fetchone() tracks = myDB.select('SELECT * from tracks WHERE AlbumID=?', [albumid]) if not release or not tracks: #the result of a manual post-process on an album that hasn't been inserted #from an RSS feed or etc #TODO: This should be a call to a class method.. copied it out of importer with only minor changes #TODO: odd things can happen when there are diacritic characters in the folder name, need to translate them? import mb release_list = None try: release_list = mb.getReleaseGroup(albumid) except Exception, e: logger.info( 'Unable to get release information for manual album with rgid: %s. Error: %s' % (albumid, e)) return if not release_list: logger.info( 'Unable to get release information for manual album with rgid: %s' % albumid) return # Since we're just using this to create the bare minimum information to insert an artist/album combo, use the first release releaseid = release_list[0]['id'] release_dict = mb.getRelease(releaseid) logger.info(u"Now adding/updating artist: " + release_dict['artist_name']) if release_dict['artist_name'].startswith('The '): sortname = release_dict['artist_name'][4:] else: sortname = release_dict['artist_name'] controlValueDict = {"ArtistID": release_dict['artist_id']} newValueDict = { "ArtistName": release_dict['artist_name'], "ArtistSortName": sortname, "DateAdded": helpers.today(), "Status": "Paused" } logger.info("ArtistID: " + release_dict['artist_id'] + " , ArtistName: " + release_dict['artist_name']) if headphones.INCLUDE_EXTRAS: newValueDict['IncludeExtras'] = 1 newValueDict['Extras'] = headphones.EXTRAS myDB.upsert("artists", newValueDict, controlValueDict) logger.info(u"Now adding album: " + release_dict['title']) controlValueDict = {"AlbumID": albumid} newValueDict = { "ArtistID": release_dict['artist_id'], "ArtistName": release_dict['artist_name'], "AlbumTitle": release_dict['title'], "AlbumASIN": release_dict['asin'], "ReleaseDate": release_dict['date'], "DateAdded": helpers.today(), "Type": release_dict['rg_type'], "Status": "Snatched" } myDB.upsert("albums", newValueDict, controlValueDict) # Delete existing tracks associated with this AlbumID since we're going to replace them and don't want any extras myDB.action('DELETE from tracks WHERE AlbumID=?', [albumid]) for track in release_dict['tracks']: controlValueDict = {"TrackID": track['id'], "AlbumID": albumid} newValueDict = { "ArtistID": release_dict['artist_id'], "ArtistName": release_dict['artist_name'], "AlbumTitle": release_dict['title'], "AlbumASIN": release_dict['asin'], "TrackTitle": track['title'], "TrackDuration": track['duration'], "TrackNumber": track['number'] } myDB.upsert("tracks", newValueDict, controlValueDict) controlValueDict = {"ArtistID": release_dict['artist_id']} newValueDict = {"Status": "Paused"} myDB.upsert("artists", newValueDict, controlValueDict) logger.info(u"Addition complete for: " + release_dict['title'] + " - " + release_dict['artist_name']) release = myDB.action('SELECT * from albums WHERE AlbumID=?', [albumid]).fetchone() tracks = myDB.select('SELECT * from tracks WHERE AlbumID=?', [albumid])
def _get_credentials(self, key): request_token = {} request_token['oauth_token'] = headphones.CONFIG.TWITTER_USERNAME request_token[ 'oauth_token_secret'] = headphones.CONFIG.TWITTER_PASSWORD request_token['oauth_callback_confirmed'] = 'true' token = oauth.Token(request_token['oauth_token'], request_token['oauth_token_secret']) token.set_verifier(key) logger.info( 'Generating and signing request for an access token using key ' + key) oauth_consumer = oauth.Consumer(key=self.consumer_key, secret=self.consumer_secret) logger.info('oauth_consumer: ' + str(oauth_consumer)) oauth_client = oauth.Client(oauth_consumer, token) logger.info('oauth_client: ' + str(oauth_client)) resp, content = oauth_client.request(self.ACCESS_TOKEN_URL, method='POST', body='oauth_verifier=%s' % key) logger.info('resp, content: ' + str(resp) + ',' + str(content)) access_token = dict(parse_qsl(content)) logger.info('access_token: ' + str(access_token)) logger.info('resp[status] = ' + str(resp['status'])) if resp['status'] != '200': logger.info( 'The request for a token with did not succeed: ' + str(resp['status']), logger.ERROR) return False else: logger.info('Your Twitter Access Token key: %s' % access_token['oauth_token']) logger.info('Access Token secret: %s' % access_token['oauth_token_secret']) headphones.CONFIG.TWITTER_USERNAME = access_token['oauth_token'] headphones.CONFIG.TWITTER_PASSWORD = access_token[ 'oauth_token_secret'] return True
def sig_handler(signum=None, frame=None): if signum is not None: logger.info("Signal %i caught, saving and exiting...", signum) shutdown()
def findArtist(name, limit=1): artistlist = [] artistResults = None chars = set('!?*-') if any((c in chars) for c in name): name = '"' + name + '"' criteria = {'artist': name.lower()} with mb_lock: try: artistResults = musicbrainzngs.search_artists( limit=limit, **criteria)['artist-list'] except musicbrainzngs.WebServiceError as e: logger.warn('Attempt to query MusicBrainz for %s failed (%s)' % (name, str(e))) mb_lock.snooze(5) if not artistResults: return False for result in artistResults: if 'disambiguation' in result: uniquename = unicode(result['sort-name'] + " (" + result['disambiguation'] + ")") else: uniquename = unicode(result['sort-name']) if result['name'] != uniquename and limit == 1: logger.info( 'Found an artist with a disambiguation: %s - doing an album based search' % name) artistdict = findArtistbyAlbum(name) if not artistdict: logger.info( 'Cannot determine the best match from an artist/album search. Using top match instead' ) artistlist.append({ # Just need the artist id if the limit is 1 # 'name': unicode(result['sort-name']), # 'uniquename': uniquename, 'id': unicode(result['id']), # 'url': unicode("http://musicbrainz.org/artist/" + result['id']),#probably needs to be changed # 'score': int(result['ext:score']) }) else: artistlist.append(artistdict) else: artistlist.append({ 'name': unicode(result['sort-name']), 'uniquename': uniquename, 'id': unicode(result['id']), 'url': unicode("http://musicbrainz.org/artist/" + result['id']), #probably needs to be changed 'score': int(result['ext:score']) }) return artistlist
def get_new_releases(rgid,includeExtras=False,forcefull=False): myDB = db.DBConnection() results = [] try: limit = 100 newResults = None while newResults == None or len(newResults) >= limit: newResults = musicbrainzngs.browse_releases(release_group=rgid,includes=['artist-credits','labels','recordings','release-groups','media'],limit=limit,offset=len(results)) if 'release-list' not in newResults: break #may want to raise an exception here instead ? newResults = newResults['release-list'] results += newResults except musicbrainzngs.WebServiceError as e: logger.warn('Attempt to retrieve information from MusicBrainz for release group "%s" failed (%s)' % (rgid, str(e))) time.sleep(5) return False if not results or len(results) == 0: return False #Clean all references to releases in dB that are no longer referenced in musicbrainz release_list = [] force_repackage1 = 0 if len(results) != 0: for release_mark in results: release_list.append(unicode(release_mark['id'])) release_title = release_mark['title'] remove_missing_releases = myDB.action("SELECT ReleaseID FROM allalbums WHERE AlbumID=?", [rgid]) if remove_missing_releases: for items in remove_missing_releases: if items['ReleaseID'] not in release_list and items['ReleaseID'] != rgid: # Remove all from albums/tracks that aren't in release myDB.action("DELETE FROM albums WHERE ReleaseID=?", [items['ReleaseID']]) myDB.action("DELETE FROM tracks WHERE ReleaseID=?", [items['ReleaseID']]) myDB.action("DELETE FROM allalbums WHERE ReleaseID=?", [items['ReleaseID']]) myDB.action("DELETE FROM alltracks WHERE ReleaseID=?", [items['ReleaseID']]) logger.info("Removing all references to release %s to reflect MusicBrainz" % items['ReleaseID']) force_repackage1 = 1 else: logger.info("There was either an error pulling data from MusicBrainz or there might not be any releases for this category") num_new_releases = 0 for releasedata in results: #releasedata.get will return None if it doesn't have a status #all official releases should have the Official status included if not includeExtras and releasedata.get('status') != 'Official': continue release = {} rel_id_check = releasedata['id'] artistid = unicode(releasedata['artist-credit'][0]['artist']['id']) album_checker = myDB.action('SELECT * from allalbums WHERE ReleaseID=?', [rel_id_check]).fetchone() if not album_checker or forcefull: #DELETE all references to this release since we're updating it anyway. myDB.action('DELETE from allalbums WHERE ReleaseID=?', [rel_id_check]) myDB.action('DELETE from alltracks WHERE ReleaseID=?', [rel_id_check]) release['AlbumTitle'] = unicode(releasedata['title']) release['AlbumID'] = unicode(rgid) release['AlbumASIN'] = unicode(releasedata['asin']) if 'asin' in releasedata else None release['ReleaseDate'] = unicode(releasedata['date']) if 'date' in releasedata else None release['ReleaseID'] = releasedata['id'] if 'release-group' not in releasedata: raise Exception('No release group associated with release id ' + releasedata['id'] + ' album id' + rgid) release['Type'] = unicode(releasedata['release-group']['type']) if release['Type'] == 'Album' and 'secondary-type-list' in releasedata['release-group']: secondary_type = unicode(releasedata['release-group']['secondary-type-list'][0]) if secondary_type != release['Type']: release['Type'] = secondary_type #making the assumption that the most important artist will be first in the list if 'artist-credit' in releasedata: release['ArtistID'] = unicode(releasedata['artist-credit'][0]['artist']['id']) release['ArtistName'] = unicode(releasedata['artist-credit-phrase']) else: logger.warn('Release ' + releasedata['id'] + ' has no Artists associated.') return False release['ReleaseCountry'] = unicode(releasedata['country']) if 'country' in releasedata else u'Unknown' #assuming that the list will contain media and that the format will be consistent try: additional_medium='' for position in releasedata['medium-list']: if position['format'] == releasedata['medium-list'][0]['format']: medium_count = int(position['position']) else: additional_medium = additional_medium+' + '+position['format'] if medium_count == 1: disc_number = '' else: disc_number = str(medium_count)+'x' packaged_medium = disc_number+releasedata['medium-list'][0]['format']+additional_medium release['ReleaseFormat'] = unicode(packaged_medium) except: release['ReleaseFormat'] = u'Unknown' release['Tracks'] = getTracksFromRelease(releasedata) # What we're doing here now is first updating the allalbums & alltracks table to the most # current info, then moving the appropriate release into the album table and its associated # tracks into the tracks table controlValueDict = {"ReleaseID" : release['ReleaseID']} newValueDict = {"ArtistID": release['ArtistID'], "ArtistName": release['ArtistName'], "AlbumTitle": release['AlbumTitle'], "AlbumID": release['AlbumID'], "AlbumASIN": release['AlbumASIN'], "ReleaseDate": release['ReleaseDate'], "Type": release['Type'], "ReleaseCountry": release['ReleaseCountry'], "ReleaseFormat": release['ReleaseFormat'] } myDB.upsert("allalbums", newValueDict, controlValueDict) for track in release['Tracks']: cleanname = helpers.cleanName(release['ArtistName'] + ' ' + release['AlbumTitle'] + ' ' + track['title']) controlValueDict = {"TrackID": track['id'], "ReleaseID": release['ReleaseID']} newValueDict = {"ArtistID": release['ArtistID'], "ArtistName": release['ArtistName'], "AlbumTitle": release['AlbumTitle'], "AlbumID": release['AlbumID'], "AlbumASIN": release['AlbumASIN'], "TrackTitle": track['title'], "TrackDuration": track['duration'], "TrackNumber": track['number'], "CleanName": cleanname } match = myDB.action('SELECT Location, BitRate, Format from have WHERE CleanName=?', [cleanname]).fetchone() if not match: match = myDB.action('SELECT Location, BitRate, Format from have WHERE ArtistName LIKE ? AND AlbumTitle LIKE ? AND TrackTitle LIKE ?', [release['ArtistName'], release['AlbumTitle'], track['title']]).fetchone() #if not match: #match = myDB.action('SELECT Location, BitRate, Format from have WHERE TrackID=?', [track['id']]).fetchone() if match: newValueDict['Location'] = match['Location'] newValueDict['BitRate'] = match['BitRate'] newValueDict['Format'] = match['Format'] #myDB.action('UPDATE have SET Matched="True" WHERE Location=?', [match['Location']]) myDB.action('UPDATE have SET Matched=? WHERE Location=?', (release['AlbumID'], match['Location'])) myDB.upsert("alltracks", newValueDict, controlValueDict) num_new_releases = num_new_releases + 1 #print releasedata['title'] #print num_new_releases if album_checker: logger.info('[%s] Existing release %s (%s) updated' % (release['ArtistName'], release['AlbumTitle'], rel_id_check)) else: logger.info('[%s] New release %s (%s) added' % (release['ArtistName'], release['AlbumTitle'], rel_id_check)) if force_repackage1 == 1: num_new_releases = -1 logger.info('[%s] Forcing repackage of %s, since dB releases have been removed' % (release['ArtistName'], release_title)) else: num_new_releases = num_new_releases return num_new_releases
def update_album_status(AlbumID=None, ArtistID=None): myDB = db.DBConnection() # logger.info('Counting matched tracks to mark albums as skipped/downloaded') if AlbumID: album_status_updater = myDB.action( 'SELECT' ' a.AlbumID, a.ArtistName, a.AlbumTitle, a.Status, AVG(t.Location IS NOT NULL) * 100 AS album_completion ' 'FROM' ' albums AS a ' 'JOIN tracks AS t ON t.AlbumID = a.AlbumID ' 'WHERE' ' a.AlbumID = ? AND a.Status != "Downloaded" ' 'GROUP BY' ' a.AlbumID ' 'HAVING' ' AVG(t.Location IS NOT NULL) * 100 >= ?', [AlbumID, headphones.CONFIG.ALBUM_COMPLETION_PCT]) else: album_status_updater = myDB.action( 'SELECT' ' a.AlbumID, a.ArtistID, a.ArtistName, a.AlbumTitle, a.Status, AVG(t.Location IS NOT NULL) * 100 AS album_completion ' 'FROM' ' albums AS a ' 'JOIN tracks AS t ON t.AlbumID = a.AlbumID ' 'WHERE' ' a.ArtistID = ? AND a.Status != "Downloaded" ' 'GROUP BY' ' a.AlbumID ' 'HAVING' ' AVG(t.Location IS NOT NULL) * 100 >= ?', [ArtistID, headphones.CONFIG.ALBUM_COMPLETION_PCT]) new_album_status = "Downloaded" albums = [] for album in album_status_updater: albums.append( [album['AlbumID'], album['ArtistName'], album['AlbumTitle']]) for album in albums: # I don't think we want to change Downloaded->Skipped..... # I think we can only automatically change Skipped->Downloaded when updating # There was a bug report where this was causing infinite downloads if the album was # recent, but matched to less than 80%. It would go Downloaded->Skipped->Wanted->Downloaded->Skipped->Wanted->etc.... # else: # if album['Status'] == "Skipped" or album['Status'] == "Downloaded": # new_album_status = "Skipped" # else: # new_album_status = album['Status'] # else: # new_album_status = album['Status'] # # myDB.upsert("albums", {'Status': new_album_status}, {'AlbumID': album['AlbumID']}) # if new_album_status != album['Status']: # logger.info('Album %s changed to %s' % (album['AlbumTitle'], new_album_status)) # logger.info('Album status update complete') myDB.action('UPDATE albums SET Status = ? WHERE AlbumID = ?', [new_album_status, album[0]]) logger.info('Album: %s - %s. Status updated to %s' % (album[1], album[2], new_album_status))
def sig_handler(signum=None, frame=None): if type(signum) != type(None): logger.info("Signal %i caught, saving and exiting..." % int(signum)) shutdown()
def libraryScan(dir=None, append=False, ArtistID=None, ArtistName=None, cron=False, artistScan=False): if cron and not headphones.CONFIG.LIBRARYSCAN: return if not dir: if not headphones.CONFIG.MUSIC_DIR: return else: dir = headphones.CONFIG.MUSIC_DIR # If we're appending a dir, it's coming from the post processor which is # already bytestring if not append or artistScan: dir = dir.encode(headphones.SYS_ENCODING) if not os.path.isdir(dir): logger.warn('Cannot find directory: %s. Not scanning' % dir.decode(headphones.SYS_ENCODING, 'replace')) return myDB = db.DBConnection() new_artists = [] logger.info('Scanning music directory: %s' % dir.decode(headphones.SYS_ENCODING, 'replace')) if not append: # Clean up bad filepaths. Queries can take some time, ensure all results are loaded before processing if ArtistID: tracks = myDB.action( 'SELECT Location FROM alltracks WHERE ArtistID = ? AND Location IS NOT NULL UNION SELECT Location FROM tracks WHERE ArtistID = ? AND Location ' 'IS NOT NULL', [ArtistID, ArtistID]) else: tracks = myDB.action( 'SELECT Location FROM alltracks WHERE Location IS NOT NULL UNION SELECT Location FROM tracks WHERE Location IS NOT NULL' ) locations = [] for track in tracks: locations.append(track['Location']) for location in locations: encoded_track_string = location.encode(headphones.SYS_ENCODING, 'replace') if not os.path.isfile(encoded_track_string): myDB.action( 'UPDATE tracks SET Location=?, BitRate=?, Format=? WHERE Location=?', [None, None, None, location]) myDB.action( 'UPDATE alltracks SET Location=?, BitRate=?, Format=? WHERE Location=?', [None, None, None, location]) if ArtistName: del_have_tracks = myDB.select( 'SELECT Location, Matched, ArtistName FROM have WHERE ArtistName = ? COLLATE NOCASE', [ArtistName]) else: del_have_tracks = myDB.select( 'SELECT Location, Matched, ArtistName FROM have') locations = [] for track in del_have_tracks: locations.append([track['Location'], track['ArtistName']]) for location in locations: encoded_track_string = location[0].encode(headphones.SYS_ENCODING, 'replace') if not os.path.isfile(encoded_track_string): if location[1]: # Make sure deleted files get accounted for when updating artist track counts new_artists.append(location[1]) myDB.action('DELETE FROM have WHERE Location=?', [location[0]]) logger.info( 'File %s removed from Headphones, as it is no longer on disk' % encoded_track_string.decode(headphones.SYS_ENCODING, 'replace')) bitrates = [] song_list = [] latest_subdirectory = [] new_song_count = 0 file_count = 0 for r, d, f in helpers.walk_directory(dir): # Filter paths based on config. Note that these methods work directly # on the inputs helpers.path_filter_patterns(d, headphones.CONFIG.IGNORED_FOLDERS, r) helpers.path_filter_patterns(f, headphones.CONFIG.IGNORED_FILES, r) for files in f: # MEDIA_FORMATS = music file extensions, e.g. mp3, flac, etc if any(files.lower().endswith('.' + x.lower()) for x in headphones.MEDIA_FORMATS): subdirectory = r.replace(dir, '') latest_subdirectory.append(subdirectory) if file_count == 0 and r.replace(dir, '') != '': logger.info( "[%s] Now scanning subdirectory %s" % (dir.decode(headphones.SYS_ENCODING, 'replace'), subdirectory.decode(headphones.SYS_ENCODING, 'replace'))) elif latest_subdirectory[file_count] != latest_subdirectory[ file_count - 1] and file_count != 0: logger.info( "[%s] Now scanning subdirectory %s" % (dir.decode(headphones.SYS_ENCODING, 'replace'), subdirectory.decode(headphones.SYS_ENCODING, 'replace'))) song = os.path.join(r, files) # We need the unicode path to use for logging, inserting into database unicode_song_path = song.decode(headphones.SYS_ENCODING, 'replace') # Try to read the metadata try: f = MediaFile(song) except (FileTypeError, UnreadableFileError): logger.warning( "Cannot read media file '%s', skipping. It may be corrupted or not a media file.", unicode_song_path) continue except IOError: logger.warning( "Cannnot read media file '%s', skipping. Does the file exists?", unicode_song_path) continue # Grab the bitrates for the auto detect bit rate option if f.bitrate: bitrates.append(f.bitrate) # Use the album artist over the artist if available if f.albumartist: f_artist = f.albumartist elif f.artist: f_artist = f.artist else: f_artist = None # Add the song to our song list - # TODO: skip adding songs without the minimum requisite information (just a matter of putting together the right if statements) if f_artist and f.album and f.title: CleanName = helpers.clean_name(f_artist + ' ' + f.album + ' ' + f.title) else: CleanName = None controlValueDict = {'Location': unicode_song_path} newValueDict = { 'TrackID': f.mb_trackid, # 'ReleaseID' : f.mb_albumid, 'ArtistName': f_artist, 'AlbumTitle': f.album, 'TrackNumber': f.track, 'TrackLength': f.length, 'Genre': f.genre, 'Date': f.date, 'TrackTitle': f.title, 'BitRate': f.bitrate, 'Format': f.format, 'CleanName': CleanName } # song_list.append(song_dict) check_exist_song = myDB.action( "SELECT * FROM have WHERE Location=?", [unicode_song_path]).fetchone() # Only attempt to match songs that are new, haven't yet been matched, or metadata has changed. if not check_exist_song: # This is a new track if f_artist: new_artists.append(f_artist) myDB.upsert("have", newValueDict, controlValueDict) new_song_count += 1 else: if check_exist_song[ 'ArtistName'] != f_artist or check_exist_song[ 'AlbumTitle'] != f.album or check_exist_song[ 'TrackTitle'] != f.title: # Important track metadata has been modified, need to run matcher again if f_artist and f_artist != check_exist_song[ 'ArtistName']: new_artists.append(f_artist) elif f_artist and f_artist == check_exist_song['ArtistName'] and \ check_exist_song['Matched'] != "Ignored": new_artists.append(f_artist) else: continue newValueDict['Matched'] = None myDB.upsert("have", newValueDict, controlValueDict) myDB.action( 'UPDATE tracks SET Location=?, BitRate=?, Format=? WHERE Location=?', [None, None, None, unicode_song_path]) myDB.action( 'UPDATE alltracks SET Location=?, BitRate=?, Format=? WHERE Location=?', [None, None, None, unicode_song_path]) new_song_count += 1 else: # This track information hasn't changed if f_artist and check_exist_song[ 'Matched'] != "Ignored": new_artists.append(f_artist) file_count += 1 # Now we start track matching logger.info("%s new/modified songs found and added to the database" % new_song_count) song_list = myDB.action( "SELECT * FROM have WHERE Matched IS NULL AND LOCATION LIKE ?", [dir.decode(headphones.SYS_ENCODING, 'replace') + "%"]) total_number_of_songs = \ myDB.action("SELECT COUNT(*) FROM have WHERE Matched IS NULL AND LOCATION LIKE ?", [dir.decode(headphones.SYS_ENCODING, 'replace') + "%"]).fetchone()[0] logger.info("Found " + str(total_number_of_songs) + " new/modified tracks in: '" + dir.decode(headphones.SYS_ENCODING, 'replace') + "'. Matching tracks to the appropriate releases....") # Sort the song_list by most vague (e.g. no trackid or releaseid) to most specific (both trackid & releaseid) # When we insert into the database, the tracks with the most specific information will overwrite the more general matches # song_list = helpers.multikeysort(song_list, ['ReleaseID', 'TrackID']) song_list = helpers.multikeysort(song_list, ['ArtistName', 'AlbumTitle']) # We'll use this to give a % completion, just because the track matching might take a while song_count = 0 latest_artist = [] last_completion_percentage = 0 prev_artist_name = None artistid = None for song in song_list: latest_artist.append(song['ArtistName']) if song_count == 0: logger.info("Now matching songs by %s" % song['ArtistName']) elif latest_artist[song_count] != latest_artist[song_count - 1] and song_count != 0: logger.info("Now matching songs by %s" % song['ArtistName']) song_count += 1 completion_percentage = math.floor( float(song_count) / total_number_of_songs * 1000) / 10 if completion_percentage >= (last_completion_percentage + 10): logger.info("Track matching is " + str(completion_percentage) + "% complete") last_completion_percentage = completion_percentage # THE "MORE-SPECIFIC" CLAUSES HERE HAVE ALL BEEN REMOVED. WHEN RUNNING A LIBRARY SCAN, THE ONLY CLAUSES THAT # EVER GOT HIT WERE [ARTIST/ALBUM/TRACK] OR CLEANNAME. ARTISTID & RELEASEID ARE NEVER PASSED TO THIS FUNCTION, # ARE NEVER FOUND, AND THE OTHER CLAUSES WERE NEVER HIT. FURTHERMORE, OTHER MATCHING FUNCTIONS IN THIS PROGRAM # (IMPORTER.PY, MB.PY) SIMPLY DO A [ARTIST/ALBUM/TRACK] OR CLEANNAME MATCH, SO IT'S ALL CONSISTENT. albumid = None if song['ArtistName'] and song['CleanName']: artist_name = song['ArtistName'] clean_name = song['CleanName'] # Only update if artist is in the db if artist_name != prev_artist_name: prev_artist_name = artist_name artistid = None artist_lookup = "\"" + artist_name.replace("\"", "\"\"") + "\"" try: dbartist = myDB.select( 'SELECT DISTINCT ArtistID, ArtistName FROM artists WHERE ArtistName LIKE ' + artist_lookup + '') except: dbartist = None if not dbartist: dbartist = myDB.select( 'SELECT DISTINCT ArtistID, ArtistName FROM tracks WHERE CleanName = ?', [clean_name]) if not dbartist: dbartist = myDB.select( 'SELECT DISTINCT ArtistID, ArtistName FROM alltracks WHERE CleanName = ?', [clean_name]) if not dbartist: clean_artist = helpers.clean_name(artist_name) if clean_artist: dbartist = myDB.select( 'SELECT DISTINCT ArtistID, ArtistName FROM tracks WHERE CleanName >= ? and CleanName < ?', [clean_artist, clean_artist + '{']) if not dbartist: dbartist = myDB.select( 'SELECT DISTINCT ArtistID, ArtistName FROM alltracks WHERE CleanName >= ? and CleanName < ?', [clean_artist, clean_artist + '{']) if dbartist: artistid = dbartist[0][0] if artistid: # This was previously using Artist, Album, Title with a SELECT LIKE ? and was not using an index # (Possible issue: https://stackoverflow.com/questions/37845854/python-sqlite3-not-using-index-with-like) # Now selects/updates using CleanName index (may have to revert if not working) # matching on CleanName should be enough, ensure it's the same artist just in case # Update tracks track = myDB.action( 'SELECT AlbumID, ArtistName FROM tracks WHERE CleanName = ? AND ArtistID = ?', [clean_name, artistid]).fetchone() if track: albumid = track['AlbumID'] myDB.action( 'UPDATE tracks SET Location = ?, BitRate = ?, Format = ? WHERE CleanName = ? AND ArtistID = ?', [ song['Location'], song['BitRate'], song['Format'], clean_name, artistid ]) # Update alltracks alltrack = myDB.action( 'SELECT AlbumID, ArtistName FROM alltracks WHERE CleanName = ? AND ArtistID = ?', [clean_name, artistid]).fetchone() if alltrack: albumid = alltrack['AlbumID'] myDB.action( 'UPDATE alltracks SET Location = ?, BitRate = ?, Format = ? WHERE CleanName = ? AND ArtistID = ?', [ song['Location'], song['BitRate'], song['Format'], clean_name, artistid ]) # Update have controlValueDict2 = {'Location': song['Location']} if albumid: newValueDict2 = {'Matched': albumid} else: newValueDict2 = {'Matched': "Failed"} myDB.upsert("have", newValueDict2, controlValueDict2) # myDB.action('INSERT INTO have (ArtistName, AlbumTitle, TrackNumber, TrackTitle, TrackLength, BitRate, Genre, Date, TrackID, Location, CleanName, Format) VALUES( ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', [song['ArtistName'], song['AlbumTitle'], song['TrackNumber'], song['TrackTitle'], song['TrackLength'], song['BitRate'], song['Genre'], song['Date'], song['TrackID'], song['Location'], CleanName, song['Format']]) logger.info('Completed matching tracks from directory: %s' % dir.decode(headphones.SYS_ENCODING, 'replace')) if not append or artistScan: logger.info('Updating scanned artist track counts') # Clean up the new artist list unique_artists = {}.fromkeys(new_artists).keys() # # Don't think we need to do this, check the db instead below # # # artist scan # if ArtistName: # current_artists = [[ArtistName]] # # directory scan # else: # current_artists = myDB.select('SELECT ArtistName, ArtistID FROM artists WHERE ArtistName IS NOT NULL') # # # There was a bug where artists with special characters (-,') would show up in new artists. # # # artist_list = scanned artists not in the db # artist_list = [ # x for x in unique_artists # if helpers.clean_name(x).lower() not in [ # helpers.clean_name(y[0]).lower() # for y in current_artists # ] # ] # # # artists_checked = scanned artists that exist in the db # artists_checked = [ # x for x in unique_artists # if helpers.clean_name(x).lower() in [ # helpers.clean_name(y[0]).lower() # for y in current_artists # ] # ] new_artist_list = [] for artist in unique_artists: if not artist: continue logger.info('Processing artist: %s' % artist) # check if artist is already in the db artist_lookup = "\"" + artist.replace("\"", "\"\"") + "\"" try: dbartist = myDB.select( 'SELECT DISTINCT ArtistID, ArtistName FROM artists WHERE ArtistName LIKE ' + artist_lookup + '') except: dbartist = None if not dbartist: clean_artist = helpers.clean_name(artist) if clean_artist: dbartist = myDB.select( 'SELECT DISTINCT ArtistID, ArtistName FROM tracks WHERE CleanName >= ? and CleanName < ?', [clean_artist, clean_artist + '{']) if not dbartist: dbartist = myDB.select( 'SELECT DISTINCT ArtistID, ArtistName FROM alltracks WHERE CleanName >= ? and CleanName < ?', [clean_artist, clean_artist + '{']) # new artist not in db, add to list if not dbartist: new_artist_list.append(artist) else: # artist in db, update have track counts artistid = dbartist[0][0] # Have tracks are selected from tracks table and not all tracks because of duplicates # We update the track count upon an album switch to compliment this # havetracks = ( # len(myDB.select( # 'SELECT TrackTitle from tracks WHERE ArtistName like ? AND Location IS NOT NULL', # [artist])) + len(myDB.select( # 'SELECT TrackTitle from have WHERE ArtistName like ? AND Matched = "Failed"', # [artist])) # ) try: havetracks = (len( myDB.select( 'SELECT ArtistID From tracks WHERE ArtistID = ? AND Location IS NOT NULL', [artistid]) ) + len( myDB.select( 'SELECT ArtistName FROM have WHERE ArtistName LIKE ' + artist_lookup + ' AND Matched = "Failed"'))) except Exception as e: logger.warn('Error updating counts for artist: %s: %s' % (artist, e)) # Note: some people complain about having "artist have tracks" > # of tracks total in artist official releases # (can fix by getting rid of second len statement) if havetracks: myDB.action( 'UPDATE artists SET HaveTracks = ? WHERE ArtistID = ?', [havetracks, artistid]) # Update albums to downloaded update_album_status(ArtistID=artistid) logger.info('Found %i new artists' % len(new_artist_list)) # Add scanned artists not in the db if new_artist_list: if headphones.CONFIG.AUTO_ADD_ARTISTS: logger.info('Importing %i new artists' % len(new_artist_list)) importer.artistlist_to_mbids(new_artist_list) else: logger.info( 'To add these artists, go to Manage->Manage New Artists') # myDB.action('DELETE from newartists') for artist in new_artist_list: myDB.action('INSERT OR IGNORE INTO newartists VALUES (?)', [artist]) if headphones.CONFIG.DETECT_BITRATE and bitrates: headphones.CONFIG.PREFERRED_BITRATE = sum(bitrates) / len( bitrates) / 1000 else: # If we're appending a new album to the database, update the artists total track counts logger.info('Updating artist track counts') artist_lookup = "\"" + ArtistName.replace("\"", "\"\"") + "\"" try: havetracks = len( myDB.select( 'SELECT ArtistID FROM tracks WHERE ArtistID = ? AND Location IS NOT NULL', [ArtistID]) ) + len( myDB.select( 'SELECT ArtistName FROM have WHERE ArtistName LIKE ' + artist_lookup + ' AND Matched = "Failed"')) except Exception as e: logger.warn('Error updating counts for artist: %s: %s' % (ArtistName, e)) if havetracks: myDB.action('UPDATE artists SET HaveTracks=? WHERE ArtistID=?', [havetracks, ArtistID]) # Moved above to call for each artist # if not append: # update_album_status() if not append and not artistScan: lastfm.getSimilar() if ArtistName: logger.info('Scanning complete for artist: %s', ArtistName) else: logger.info('Library scan complete')
def initialize(): with INIT_LOCK: global __INITIALIZED__, FULL_PATH, PROG_DIR, VERBOSE, DAEMON, SYS_PLATFORM, DATA_DIR, CONFIG_FILE, CFG, CONFIG_VERSION, LOG_DIR, CACHE_DIR, \ HTTP_PORT, HTTP_HOST, HTTP_USERNAME, HTTP_PASSWORD, HTTP_ROOT, HTTP_PROXY, LAUNCH_BROWSER, API_ENABLED, API_KEY, GIT_PATH, GIT_USER, GIT_BRANCH, \ CURRENT_VERSION, LATEST_VERSION, CHECK_GITHUB, CHECK_GITHUB_ON_STARTUP, CHECK_GITHUB_INTERVAL, MUSIC_DIR, DESTINATION_DIR, \ LOSSLESS_DESTINATION_DIR, PREFERRED_QUALITY, PREFERRED_BITRATE, DETECT_BITRATE, ADD_ARTISTS, CORRECT_METADATA, MOVE_FILES, \ RENAME_FILES, FOLDER_FORMAT, FILE_FORMAT, CLEANUP_FILES, INCLUDE_EXTRAS, EXTRAS, AUTOWANT_UPCOMING, AUTOWANT_ALL, KEEP_TORRENT_FILES, \ ADD_ALBUM_ART, ALBUM_ART_FORMAT, EMBED_ALBUM_ART, EMBED_LYRICS, DOWNLOAD_DIR, BLACKHOLE, BLACKHOLE_DIR, USENET_RETENTION, SEARCH_INTERVAL, \ TORRENTBLACKHOLE_DIR, NUMBEROFSEEDERS, ISOHUNT, KAT, MININOVA, WAFFLES, WAFFLES_UID, WAFFLES_PASSKEY, \ RUTRACKER, RUTRACKER_USER, RUTRACKER_PASSWORD, WHATCD, WHATCD_USERNAME, WHATCD_PASSWORD, DOWNLOAD_TORRENT_DIR, \ LIBRARYSCAN, LIBRARYSCAN_INTERVAL, DOWNLOAD_SCAN_INTERVAL, SAB_HOST, SAB_USERNAME, SAB_PASSWORD, SAB_APIKEY, SAB_CATEGORY, \ NZBGET_USERNAME, NZBGET_PASSWORD, NZBGET_CATEGORY, NZBGET_HOST, NZBMATRIX, NZBMATRIX_USERNAME, NZBMATRIX_APIKEY, NEWZNAB, NEWZNAB_HOST, NEWZNAB_APIKEY, NEWZNAB_ENABLED, EXTRA_NEWZNABS, \ NZBSORG, NZBSORG_UID, NZBSORG_HASH, NEWZBIN, NEWZBIN_UID, NEWZBIN_PASSWORD, NZBSRUS, NZBSRUS_UID, NZBSRUS_APIKEY, NZBX, \ NZB_DOWNLOADER, PREFERRED_WORDS, REQUIRED_WORDS, IGNORED_WORDS, \ LASTFM_USERNAME, INTERFACE, FOLDER_PERMISSIONS, ENCODERFOLDER, ENCODER_PATH, ENCODER, XLDPROFILE, BITRATE, SAMPLINGFREQUENCY, \ MUSIC_ENCODER, ADVANCEDENCODER, ENCODEROUTPUTFORMAT, ENCODERQUALITY, ENCODERVBRCBR, ENCODERLOSSLESS, DELETE_LOSSLESS_FILES, \ PROWL_ENABLED, PROWL_PRIORITY, PROWL_KEYS, PROWL_ONSNATCH, PUSHOVER_ENABLED, PUSHOVER_PRIORITY, PUSHOVER_KEYS, PUSHOVER_ONSNATCH, MIRRORLIST, \ MIRROR, CUSTOMHOST, CUSTOMPORT, CUSTOMSLEEP, HPUSER, HPPASS, XBMC_ENABLED, XBMC_HOST, XBMC_USERNAME, XBMC_PASSWORD, XBMC_UPDATE, \ XBMC_NOTIFY, NMA_ENABLED, NMA_APIKEY, NMA_PRIORITY, NMA_ONSNATCH, SYNOINDEX_ENABLED, ALBUM_COMPLETION_PCT, PREFERRED_BITRATE_HIGH_BUFFER, \ PREFERRED_BITRATE_LOW_BUFFER, PREFERRED_BITRATE_ALLOW_LOSSLESS, CACHE_SIZEMB, \ UMASK if __INITIALIZED__: return False # Make sure all the config sections exist CheckSection('General') CheckSection('SABnzbd') CheckSection('NZBget') CheckSection('NZBMatrix') CheckSection('Newznab') CheckSection('NZBsorg') CheckSection('NZBsRus') CheckSection('nzbX') CheckSection('Newzbin') CheckSection('Waffles') CheckSection('Rutracker') CheckSection('What.cd') CheckSection('Prowl') CheckSection('Pushover') CheckSection('XBMC') CheckSection('NMA') CheckSection('Synoindex') CheckSection('Advanced') # Set global variables based on config file or use defaults CONFIG_VERSION = check_setting_str(CFG, 'General', 'config_version', '0') try: HTTP_PORT = check_setting_int(CFG, 'General', 'http_port', 8181) except: HTTP_PORT = 8181 if HTTP_PORT < 21 or HTTP_PORT > 65535: HTTP_PORT = 8181 HTTP_HOST = check_setting_str(CFG, 'General', 'http_host', '0.0.0.0') HTTP_USERNAME = check_setting_str(CFG, 'General', 'http_username', '') HTTP_PASSWORD = check_setting_str(CFG, 'General', 'http_password', '') HTTP_ROOT = check_setting_str(CFG, 'General', 'http_root', '/') HTTP_PROXY = bool(check_setting_int(CFG, 'General', 'http_proxy', 0)) LAUNCH_BROWSER = bool( check_setting_int(CFG, 'General', 'launch_browser', 1)) API_ENABLED = bool(check_setting_int(CFG, 'General', 'api_enabled', 0)) API_KEY = check_setting_str(CFG, 'General', 'api_key', '') GIT_PATH = check_setting_str(CFG, 'General', 'git_path', '') GIT_USER = check_setting_str(CFG, 'General', 'git_user', 'rembo10') GIT_BRANCH = check_setting_str(CFG, 'General', 'git_branch', 'master') LOG_DIR = check_setting_str(CFG, 'General', 'log_dir', '') CACHE_DIR = check_setting_str(CFG, 'General', 'cache_dir', '') CHECK_GITHUB = bool( check_setting_int(CFG, 'General', 'check_github', 1)) CHECK_GITHUB_ON_STARTUP = bool( check_setting_int(CFG, 'General', 'check_github_on_startup', 1)) CHECK_GITHUB_INTERVAL = check_setting_int(CFG, 'General', 'check_github_interval', 360) MUSIC_DIR = check_setting_str(CFG, 'General', 'music_dir', '') DESTINATION_DIR = check_setting_str(CFG, 'General', 'destination_dir', '') LOSSLESS_DESTINATION_DIR = check_setting_str( CFG, 'General', 'lossless_destination_dir', '') PREFERRED_QUALITY = check_setting_int(CFG, 'General', 'preferred_quality', 0) PREFERRED_BITRATE = check_setting_str(CFG, 'General', 'preferred_bitrate', '') PREFERRED_BITRATE_HIGH_BUFFER = check_setting_int( CFG, 'General', 'preferred_bitrate_high_buffer', '') PREFERRED_BITRATE_LOW_BUFFER = check_setting_int( CFG, 'General', 'preferred_bitrate_low_buffer', '') PREFERRED_BITRATE_ALLOW_LOSSLESS = bool( check_setting_int(CFG, 'General', 'preferred_bitrate_allow_lossless', 0)) DETECT_BITRATE = bool( check_setting_int(CFG, 'General', 'detect_bitrate', 0)) ADD_ARTISTS = bool( check_setting_int(CFG, 'General', 'auto_add_artists', 1)) CORRECT_METADATA = bool( check_setting_int(CFG, 'General', 'correct_metadata', 0)) MOVE_FILES = bool(check_setting_int(CFG, 'General', 'move_files', 0)) RENAME_FILES = bool( check_setting_int(CFG, 'General', 'rename_files', 0)) FOLDER_FORMAT = check_setting_str(CFG, 'General', 'folder_format', 'Artist/Album [Year]') FILE_FORMAT = check_setting_str(CFG, 'General', 'file_format', 'Track Artist - Album [Year]- Title') CLEANUP_FILES = bool( check_setting_int(CFG, 'General', 'cleanup_files', 0)) ADD_ALBUM_ART = bool( check_setting_int(CFG, 'General', 'add_album_art', 0)) ALBUM_ART_FORMAT = check_setting_str(CFG, 'General', 'album_art_format', 'folder') EMBED_ALBUM_ART = bool( check_setting_int(CFG, 'General', 'embed_album_art', 0)) EMBED_LYRICS = bool( check_setting_int(CFG, 'General', 'embed_lyrics', 0)) NZB_DOWNLOADER = check_setting_int(CFG, 'General', 'nzb_downloader', 0) DOWNLOAD_DIR = check_setting_str(CFG, 'General', 'download_dir', '') BLACKHOLE = bool(check_setting_int(CFG, 'General', 'blackhole', 0)) BLACKHOLE_DIR = check_setting_str(CFG, 'General', 'blackhole_dir', '') USENET_RETENTION = check_setting_int(CFG, 'General', 'usenet_retention', '1500') INCLUDE_EXTRAS = bool( check_setting_int(CFG, 'General', 'include_extras', 0)) EXTRAS = check_setting_str(CFG, 'General', 'extras', '') AUTOWANT_UPCOMING = bool( check_setting_int(CFG, 'General', 'autowant_upcoming', 1)) AUTOWANT_ALL = bool( check_setting_int(CFG, 'General', 'autowant_all', 0)) KEEP_TORRENT_FILES = bool( check_setting_int(CFG, 'General', 'keep_torrent_files', 0)) SEARCH_INTERVAL = check_setting_int(CFG, 'General', 'search_interval', 1440) LIBRARYSCAN = bool(check_setting_int(CFG, 'General', 'libraryscan', 1)) LIBRARYSCAN_INTERVAL = check_setting_int(CFG, 'General', 'libraryscan_interval', 300) DOWNLOAD_SCAN_INTERVAL = check_setting_int(CFG, 'General', 'download_scan_interval', 5) TORRENTBLACKHOLE_DIR = check_setting_str(CFG, 'General', 'torrentblackhole_dir', '') NUMBEROFSEEDERS = check_setting_str(CFG, 'General', 'numberofseeders', '10') ISOHUNT = bool(check_setting_int(CFG, 'General', 'isohunt', 0)) KAT = bool(check_setting_int(CFG, 'General', 'kat', 0)) MININOVA = bool(check_setting_int(CFG, 'General', 'mininova', 0)) DOWNLOAD_TORRENT_DIR = check_setting_str(CFG, 'General', 'download_torrent_dir', '') WAFFLES = bool(check_setting_int(CFG, 'Waffles', 'waffles', 0)) WAFFLES_UID = check_setting_str(CFG, 'Waffles', 'waffles_uid', '') WAFFLES_PASSKEY = check_setting_str(CFG, 'Waffles', 'waffles_passkey', '') RUTRACKER = bool(check_setting_int(CFG, 'Rutracker', 'rutracker', 0)) RUTRACKER_USER = check_setting_str(CFG, 'Rutracker', 'rutracker_user', '') RUTRACKER_PASSWORD = check_setting_str(CFG, 'Rutracker', 'rutracker_password', '') WHATCD = bool(check_setting_int(CFG, 'What.cd', 'whatcd', 0)) WHATCD_USERNAME = check_setting_str(CFG, 'What.cd', 'whatcd_username', '') WHATCD_PASSWORD = check_setting_str(CFG, 'What.cd', 'whatcd_password', '') SAB_HOST = check_setting_str(CFG, 'SABnzbd', 'sab_host', '') SAB_USERNAME = check_setting_str(CFG, 'SABnzbd', 'sab_username', '') SAB_PASSWORD = check_setting_str(CFG, 'SABnzbd', 'sab_password', '') SAB_APIKEY = check_setting_str(CFG, 'SABnzbd', 'sab_apikey', '') SAB_CATEGORY = check_setting_str(CFG, 'SABnzbd', 'sab_category', '') NZBGET_USERNAME = check_setting_str(CFG, 'NZBget', 'nzbget_username', 'nzbget') NZBGET_PASSWORD = check_setting_str(CFG, 'NZBget', 'nzbget_password', '') NZBGET_CATEGORY = check_setting_str(CFG, 'NZBget', 'nzbget_category', '') NZBGET_HOST = check_setting_str(CFG, 'NZBget', 'nzbget_host', '') NZBMATRIX = bool(check_setting_int(CFG, 'NZBMatrix', 'nzbmatrix', 0)) NZBMATRIX_USERNAME = check_setting_str(CFG, 'NZBMatrix', 'nzbmatrix_username', '') NZBMATRIX_APIKEY = check_setting_str(CFG, 'NZBMatrix', 'nzbmatrix_apikey', '') NEWZNAB = bool(check_setting_int(CFG, 'Newznab', 'newznab', 0)) NEWZNAB_HOST = check_setting_str(CFG, 'Newznab', 'newznab_host', '') NEWZNAB_APIKEY = check_setting_str(CFG, 'Newznab', 'newznab_apikey', '') NEWZNAB_ENABLED = bool( check_setting_int(CFG, 'Newznab', 'newznab_enabled', 1)) # Need to pack the extra newznabs back into a list of tuples flattened_newznabs = check_setting_str(CFG, 'Newznab', 'extra_newznabs', [], log=False) EXTRA_NEWZNABS = list( itertools.izip(*[ itertools.islice(flattened_newznabs, i, None, 3) for i in range(3) ])) NZBSORG = bool(check_setting_int(CFG, 'NZBsorg', 'nzbsorg', 0)) NZBSORG_UID = check_setting_str(CFG, 'NZBsorg', 'nzbsorg_uid', '') NZBSORG_HASH = check_setting_str(CFG, 'NZBsorg', 'nzbsorg_hash', '') NEWZBIN = bool(check_setting_int(CFG, 'Newzbin', 'newzbin', 0)) NEWZBIN_UID = check_setting_str(CFG, 'Newzbin', 'newzbin_uid', '') NEWZBIN_PASSWORD = check_setting_str(CFG, 'Newzbin', 'newzbin_password', '') NZBSRUS = bool(check_setting_int(CFG, 'NZBsRus', 'nzbsrus', 0)) NZBSRUS_UID = check_setting_str(CFG, 'NZBsRus', 'nzbsrus_uid', '') NZBSRUS_APIKEY = check_setting_str(CFG, 'NZBsRus', 'nzbsrus_apikey', '') NZBX = bool(check_setting_int(CFG, 'nzbX', 'nzbx', 0)) PREFERRED_WORDS = check_setting_str(CFG, 'General', 'preferred_words', '') IGNORED_WORDS = check_setting_str(CFG, 'General', 'ignored_words', '') REQUIRED_WORDS = check_setting_str(CFG, 'General', 'required_words', '') LASTFM_USERNAME = check_setting_str(CFG, 'General', 'lastfm_username', '') INTERFACE = check_setting_str(CFG, 'General', 'interface', 'default') FOLDER_PERMISSIONS = check_setting_str(CFG, 'General', 'folder_permissions', '0755') ENCODERFOLDER = check_setting_str(CFG, 'General', 'encoderfolder', '') ENCODER_PATH = check_setting_str(CFG, 'General', 'encoder_path', '') ENCODER = check_setting_str(CFG, 'General', 'encoder', 'ffmpeg') XLDPROFILE = check_setting_str(CFG, 'General', 'xldprofile', '') BITRATE = check_setting_int(CFG, 'General', 'bitrate', 192) SAMPLINGFREQUENCY = check_setting_int(CFG, 'General', 'samplingfrequency', 44100) MUSIC_ENCODER = bool( check_setting_int(CFG, 'General', 'music_encoder', 0)) ADVANCEDENCODER = check_setting_str(CFG, 'General', 'advancedencoder', '') ENCODEROUTPUTFORMAT = check_setting_str(CFG, 'General', 'encoderoutputformat', 'mp3') ENCODERQUALITY = check_setting_int(CFG, 'General', 'encoderquality', 2) ENCODERVBRCBR = check_setting_str(CFG, 'General', 'encodervbrcbr', 'cbr') ENCODERLOSSLESS = bool( check_setting_int(CFG, 'General', 'encoderlossless', 1)) DELETE_LOSSLESS_FILES = bool( check_setting_int(CFG, 'General', 'delete_lossless_files', 1)) PROWL_ENABLED = bool( check_setting_int(CFG, 'Prowl', 'prowl_enabled', 0)) PROWL_KEYS = check_setting_str(CFG, 'Prowl', 'prowl_keys', '') PROWL_ONSNATCH = bool( check_setting_int(CFG, 'Prowl', 'prowl_onsnatch', 0)) PROWL_PRIORITY = check_setting_int(CFG, 'Prowl', 'prowl_priority', 0) XBMC_ENABLED = bool(check_setting_int(CFG, 'XBMC', 'xbmc_enabled', 0)) XBMC_HOST = check_setting_str(CFG, 'XBMC', 'xbmc_host', '') XBMC_USERNAME = check_setting_str(CFG, 'XBMC', 'xbmc_username', '') XBMC_PASSWORD = check_setting_str(CFG, 'XBMC', 'xbmc_password', '') XBMC_UPDATE = bool(check_setting_int(CFG, 'XBMC', 'xbmc_update', 0)) XBMC_NOTIFY = bool(check_setting_int(CFG, 'XBMC', 'xbmc_notify', 0)) NMA_ENABLED = bool(check_setting_int(CFG, 'NMA', 'nma_enabled', 0)) NMA_APIKEY = check_setting_str(CFG, 'NMA', 'nma_apikey', '') NMA_PRIORITY = check_setting_int(CFG, 'NMA', 'nma_priority', 0) NMA_ONSNATCH = bool(check_setting_int(CFG, 'NMA', 'nma_onsnatch', 0)) SYNOINDEX_ENABLED = bool( check_setting_int(CFG, 'Synoindex', 'synoindex_enabled', 0)) PUSHOVER_ENABLED = bool( check_setting_int(CFG, 'Pushover', 'pushover_enabled', 0)) PUSHOVER_KEYS = check_setting_str(CFG, 'Pushover', 'pushover_keys', '') PUSHOVER_ONSNATCH = bool( check_setting_int(CFG, 'Pushover', 'pushover_onsnatch', 0)) PUSHOVER_PRIORITY = check_setting_int(CFG, 'Pushover', 'pushover_priority', 0) MIRROR = check_setting_str(CFG, 'General', 'mirror', 'musicbrainz.org') CUSTOMHOST = check_setting_str(CFG, 'General', 'customhost', 'localhost') CUSTOMPORT = check_setting_int(CFG, 'General', 'customport', 5000) CUSTOMSLEEP = check_setting_int(CFG, 'General', 'customsleep', 1) HPUSER = check_setting_str(CFG, 'General', 'hpuser', '') HPPASS = check_setting_str(CFG, 'General', 'hppass', '') CACHE_SIZEMB = check_setting_int(CFG, 'Advanced', 'cache_sizemb', 32) ALBUM_COMPLETION_PCT = check_setting_int(CFG, 'Advanced', 'album_completion_pct', 80) # update folder formats in the config & bump up config version if CONFIG_VERSION == '0': from headphones.helpers import replace_all file_values = { 'tracknumber': 'Track', 'title': 'Title', 'artist': 'Artist', 'album': 'Album', 'year': 'Year' } folder_values = { 'artist': 'Artist', 'album': 'Album', 'year': 'Year', 'releasetype': 'Type', 'first': 'First', 'lowerfirst': 'first' } FILE_FORMAT = replace_all(FILE_FORMAT, file_values) FOLDER_FORMAT = replace_all(FOLDER_FORMAT, folder_values) CONFIG_VERSION = '1' if CONFIG_VERSION == '1': from headphones.helpers import replace_all file_values = { 'Track': '$Track', 'Title': '$Title', 'Artist': '$Artist', 'Album': '$Album', 'Year': '$Year', 'track': '$track', 'title': '$title', 'artist': '$artist', 'album': '$album', 'year': '$year' } folder_values = { 'Artist': '$Artist', 'Album': '$Album', 'Year': '$Year', 'Type': '$Type', 'First': '$First', 'artist': '$artist', 'album': '$album', 'year': '$year', 'type': '$type', 'first': '$first' } FILE_FORMAT = replace_all(FILE_FORMAT, file_values) FOLDER_FORMAT = replace_all(FOLDER_FORMAT, folder_values) CONFIG_VERSION = '2' if CONFIG_VERSION == '2': # Update the config to use direct path to the encoder rather than the encoder folder if ENCODERFOLDER: ENCODER_PATH = os.path.join(ENCODERFOLDER, ENCODER) CONFIG_VERSION = '3' if CONFIG_VERSION == '3': #Update the BLACKHOLE option to the NZB_DOWNLOADER format if BLACKHOLE: NZB_DOWNLOADER = 2 CONFIG_VERSION = '4' if not LOG_DIR: LOG_DIR = os.path.join(DATA_DIR, 'logs') if not os.path.exists(LOG_DIR): try: os.makedirs(LOG_DIR) except OSError: if VERBOSE: print 'Unable to create the log directory. Logging to screen only.' # Start the logger, silence console logging if we need to logger.headphones_log.initLogger(verbose=VERBOSE) if not CACHE_DIR: # Put the cache dir in the data dir for now CACHE_DIR = os.path.join(DATA_DIR, 'cache') if not os.path.exists(CACHE_DIR): try: os.makedirs(CACHE_DIR) except OSError: logger.error( 'Could not create cache dir. Check permissions of datadir: ' + DATA_DIR) # Sanity check for search interval. Set it to at least 6 hours if SEARCH_INTERVAL < 360: logger.info("Search interval too low. Resetting to 6 hour minimum") SEARCH_INTERVAL = 360 # Initialize the database logger.info('Checking to see if the database has all tables....') try: dbcheck() except Exception, e: logger.error("Can't connect to the database: %s" % e) # Get the currently installed version - returns None, 'win32' or the git hash # Also sets INSTALL_TYPE variable to 'win', 'git' or 'source' CURRENT_VERSION = versioncheck.getVersion() # Check for new versions if CHECK_GITHUB_ON_STARTUP: try: LATEST_VERSION = versioncheck.checkGithub() except: LATEST_VERSION = CURRENT_VERSION else: LATEST_VERSION = CURRENT_VERSION # Store the original umask UMASK = os.umask(0) os.umask(UMASK) __INITIALIZED__ = True return True
def initialize(config_file): with INIT_LOCK: global CONFIG global SOFT_CHROOT global _INITIALIZED global CURRENT_VERSION global LATEST_VERSION global UMASK CONFIG = headphones.config.Config(config_file) assert CONFIG is not None if _INITIALIZED: return False if CONFIG.HTTP_PORT < 21 or CONFIG.HTTP_PORT > 65535: headphones.logger.warn('HTTP_PORT out of bounds: 21 < %s < 65535', CONFIG.HTTP_PORT) CONFIG.HTTP_PORT = 8181 if CONFIG.HTTPS_CERT == '': CONFIG.HTTPS_CERT = os.path.join(DATA_DIR, 'server.crt') if CONFIG.HTTPS_KEY == '': CONFIG.HTTPS_KEY = os.path.join(DATA_DIR, 'server.key') if not CONFIG.LOG_DIR: CONFIG.LOG_DIR = os.path.join(DATA_DIR, 'logs') if not os.path.exists(CONFIG.LOG_DIR): try: os.makedirs(CONFIG.LOG_DIR) except OSError: CONFIG.LOG_DIR = None if not QUIET: sys.stderr.write("Unable to create the log directory. " "Logging to screen only.\n") # Start the logger, disable console if needed logger.initLogger(console=not QUIET, log_dir=CONFIG.LOG_DIR, verbose=VERBOSE) try: SOFT_CHROOT = SoftChroot(str(CONFIG.SOFT_CHROOT)) if SOFT_CHROOT.isEnabled(): logger.info("Soft-chroot enabled for dir: %s", str(CONFIG.SOFT_CHROOT)) except headphones.exceptions.SoftChrootError as e: logger.error("SoftChroot error: %s", e) raise e if not CONFIG.CACHE_DIR: # Put the cache dir in the data dir for now CONFIG.CACHE_DIR = os.path.join(DATA_DIR, 'cache') if not os.path.exists(CONFIG.CACHE_DIR): try: os.makedirs(CONFIG.CACHE_DIR) except OSError as e: logger.error("Could not create cache dir '%s': %s", DATA_DIR, e) # Sanity check for search interval. Set it to at least 6 hours if CONFIG.SEARCH_INTERVAL and CONFIG.SEARCH_INTERVAL < 360: logger.info( "Search interval too low. Resetting to 6 hour minimum.") CONFIG.SEARCH_INTERVAL = 360 # Initialize the database logger.info('Checking to see if the database has all tables....') try: dbcheck() except Exception as e: logger.error("Can't connect to the database: %s", e) # Get the currently installed version. Returns None, 'win32' or the git # hash. CURRENT_VERSION, CONFIG.GIT_BRANCH = versioncheck.getVersion() # Write current version to a file, so we know which version did work. # This allowes one to restore to that version. The idea is that if we # arrive here, most parts of Headphones seem to work. if CURRENT_VERSION: version_lock_file = os.path.join(DATA_DIR, "version.lock") try: with open(version_lock_file, "w") as fp: fp.write(CURRENT_VERSION) except IOError as e: logger.error( "Unable to write current version to file '%s': %s", version_lock_file, e) # Check for new versions if CONFIG.CHECK_GITHUB and CONFIG.CHECK_GITHUB_ON_STARTUP: try: LATEST_VERSION = versioncheck.checkGithub() except: logger.exception("Unhandled exception") LATEST_VERSION = CURRENT_VERSION else: LATEST_VERSION = CURRENT_VERSION # Store the original umask UMASK = os.umask(0) os.umask(UMASK) _INITIALIZED = True return True
except OSError, e: raise RuntimeError("2nd fork failed: %s [%d]", e.strerror, e.errno) dev_null = file('/dev/null', 'r') os.dup2(dev_null.fileno(), sys.stdin.fileno()) si = open('/dev/null', "r") so = open('/dev/null', "a+") se = open('/dev/null', "a+") os.dup2(si.fileno(), sys.stdin.fileno()) os.dup2(so.fileno(), sys.stdout.fileno()) os.dup2(se.fileno(), sys.stderr.fileno()) pid = os.getpid() logger.info('Daemonized to PID: %d', pid) if CREATEPID: logger.info("Writing PID %d to %s", pid, PIDFILE) with file(PIDFILE, 'w') as fp: fp.write("%s\n" % pid) def launch_browser(host, port, root): if host == '0.0.0.0': host = 'localhost' if CONFIG.ENABLE_HTTPS: protocol = 'https' else: protocol = 'http'
def checkGithub(): headphones.COMMITS_BEHIND = 0 # Get the latest version available from github logger.info('Retrieving latest version information from GitHub') url = 'https://api.github.com/repos/%s/headphones/commits/%s' % ( headphones.GIT_USER, headphones.GIT_BRANCH) version = request.request_json(url, timeout=20, validator=lambda x: type(x) == dict) if version is None: logger.warn( 'Could not get the latest version from GitHub. Are you running a local development version?' ) return headphones.CURRENT_VERSION headphones.LATEST_VERSION = version['sha'] logger.debug("Latest version is %s", headphones.LATEST_VERSION) # See how many commits behind we are if not headphones.CURRENT_VERSION: logger.info( 'You are running an unknown version of Headphones. Run the updater to identify your version' ) return headphones.LATEST_VERSION if headphones.LATEST_VERSION == headphones.CURRENT_VERSION: logger.info('Headphones is up to date') return headphones.LATEST_VERSION logger.info( 'Comparing currently installed version with latest GitHub version') url = 'https://api.github.com/repos/%s/headphones/compare/%s...%s' % ( headphones.GIT_USER, headphones.LATEST_VERSION, headphones.CURRENT_VERSION) commits = request.request_json(url, timeout=20, whitelist_status_code=404, validator=lambda x: type(x) == dict) if commits is None: logger.warn('Could not get commits behind from GitHub.') return headphones.LATEST_VERSION try: headphones.COMMITS_BEHIND = int(commits['behind_by']) logger.debug("In total, %d commits behind", headphones.COMMITS_BEHIND) except KeyError: logger.info( 'Cannot compare versions. Are you running a local development version?' ) headphones.COMMITS_BEHIND = 0 if headphones.COMMITS_BEHIND > 0: logger.info('New version is available. You are %s commits behind' % headphones.COMMITS_BEHIND) elif headphones.COMMITS_BEHIND == 0: logger.info('Headphones is up to date') return headphones.LATEST_VERSION
def forcePostProcess(): download_dirs = [] if headphones.DOWNLOAD_DIR: download_dirs.append( headphones.DOWNLOAD_DIR.encode(headphones.SYS_ENCODING, 'replace')) if headphones.DOWNLOAD_TORRENT_DIR: download_dirs.append( headphones.DOWNLOAD_TORRENT_DIR.encode(headphones.SYS_ENCODING, 'replace')) # If DOWNLOAD_DIR and DOWNLOAD_TORRENT_DIR are the same, remove the duplicate to prevent us from trying to process the same folder twice. download_dirs = list(set(download_dirs)) logger.info( 'Checking to see if there are any folders to process in download_dir(s): %s' % str(download_dirs).decode(headphones.SYS_ENCODING, 'replace')) # Get a list of folders in the download_dir folders = [] for download_dir in download_dirs: for folder in os.listdir(download_dir): path_to_folder = os.path.join(download_dir, folder) if os.path.isdir(path_to_folder): folders.append(path_to_folder) if len(folders): logger.info('Found %i folders to process' % len(folders)) else: logger.info( 'Found no folders to process in: %s' % str(download_dirs).decode(headphones.SYS_ENCODING, 'replace')) # Parse the folder names to get artist album info myDB = db.DBConnection() for folder in folders: folder_basename = os.path.basename(folder).decode( headphones.SYS_ENCODING, 'replace') logger.info('Processing: %s' % folder_basename) # First try to see if there's a match in the snatched table, then we'll try to parse the foldername # TODO: Iterate through underscores -> spaces, spaces -> dots, underscores -> dots (this might be hit or miss since it assumes # all spaces/underscores came from sab replacing values snatched = myDB.action( 'SELECT AlbumID, Title, Kind, Status from snatched WHERE FolderName LIKE ?', [folder_basename]).fetchone() if snatched: if headphones.KEEP_TORRENT_FILES and snatched[ 'Kind'] == 'torrent' and snatched['Status'] == 'Processed': logger.info( folder_basename + ' is a torrent folder being preserved for seeding and has already been processed. Skipping.' ) continue else: logger.info( 'Found a match in the database: %s. Verifying to make sure it is the correct album' % snatched['Title']) verify(snatched['AlbumID'], folder, snatched['Kind']) continue # Try to parse the folder name into a valid format # TODO: Add metadata lookup try: name, album, year = helpers.extract_data(folder_basename) except: name = None if name and album and year: release = myDB.action( 'SELECT AlbumID, ArtistName, AlbumTitle from albums WHERE ArtistName LIKE ? and AlbumTitle LIKE ?', [name, album]).fetchone() if release: logger.info( 'Found a match in the database: %s - %s. Verifying to make sure it is the correct album' % (release['ArtistName'], release['AlbumTitle'])) verify(release['AlbumID'], folder) else: logger.info( 'Querying MusicBrainz for the release group id for: %s - %s' % (name, album)) from headphones import mb try: rgid = mb.findAlbumID(helpers.latinToAscii(name), helpers.latinToAscii(album)) except: logger.error( 'Can not get release information for this album') continue if rgid: verify(rgid, folder) else: logger.info('No match found on MusicBrainz for: %s - %s' % (name, album)) continue else: try: possible_rgid = folder_basename[-36:] # re pattern match: [0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12} rgid = uuid.UUID(possible_rgid) except: logger.info( "Couldn't parse " + folder_basename + " into any valid format. If adding albums from another source, they must be in an 'Artist - Album [Year]' format, or end with the musicbrainz release group id" ) continue if rgid: rgid = possible_rgid release = myDB.action( 'SELECT ArtistName, AlbumTitle, AlbumID from albums WHERE AlbumID=?', [rgid]).fetchone() if release: logger.info( 'Found a match in the database: %s - %s. Verifying to make sure it is the correct album' % (release['ArtistName'], release['AlbumTitle'])) verify(release['AlbumID'], folder) else: logger.info( 'Found a (possibly) valid Musicbrainz identifier in album folder name - continuing post-processing' ) verify(rgid, folder)
def getXldProfile(xldProfile): xldProfileNotFound = xldProfile expandedPath = os.path.expanduser( '~/Library/Preferences/jp.tmkk.XLD.plist') try: preferences = plistlib.Plist.fromFile(expandedPath) except (expat.ExpatError): os.system("/usr/bin/plutil -convert xml1 %s" % expandedPath) try: preferences = plistlib.Plist.fromFile(expandedPath) except (ImportError): os.system("/usr/bin/plutil -convert binary1 %s" % expandedPath) logger.info( 'The plist at "%s" has a date in it, and therefore is not useable.' % expandedPath) return (xldProfileNotFound, None, None) except (ImportError): logger.info( 'The plist at "%s" has a date in it, and therefore is not useable.' % expandedPath) except: logger.info('Unexpected error:', sys.exc_info()[0]) return (xldProfileNotFound, None, None) xldProfile = xldProfile.lower() profiles = preferences.get('Profiles') for profile in profiles: profilename = profile.get('XLDProfileManager_ProfileName') xldProfileForCmd = profilename profilename = profilename.lower() xldFormat = None xldBitrate = None if profilename == xldProfile: OutputFormatName = profile.get('OutputFormatName') ShortDesc = profile.get('ShortDesc') # Determine format and bitrate if OutputFormatName == 'WAV': xldFormat = 'wav' elif OutputFormatName == 'AIFF': xldFormat = 'aiff' elif 'PCM' in OutputFormatName: xldFormat = 'pcm' elif OutputFormatName == 'Wave64': xldFormat = 'w64' elif OutputFormatName == 'MPEG-4 AAC': xldFormat = 'm4a' if 'CBR' in ShortDesc or 'ABR' in ShortDesc or 'CVBR' in ShortDesc: xldBitrate = int(profile.get('XLDAacOutput2_Bitrate')) elif 'TVBR' in ShortDesc: XLDAacOutput2_VBRQuality = int( profile.get('XLDAacOutput2_VBRQuality')) if XLDAacOutput2_VBRQuality > 122: xldBitrate = 320 elif XLDAacOutput2_VBRQuality > 113 and XLDAacOutput2_VBRQuality <= 122: xldBitrate = 285 elif XLDAacOutput2_VBRQuality > 104 and XLDAacOutput2_VBRQuality <= 113: xldBitrate = 255 elif XLDAacOutput2_VBRQuality > 95 and XLDAacOutput2_VBRQuality <= 104: xldBitrate = 225 elif XLDAacOutput2_VBRQuality > 86 and XLDAacOutput2_VBRQuality <= 95: xldBitrate = 195 elif XLDAacOutput2_VBRQuality > 77 and XLDAacOutput2_VBRQuality <= 86: xldBitrate = 165 elif XLDAacOutput2_VBRQuality > 68 and XLDAacOutput2_VBRQuality <= 77: xldBitrate = 150 elif XLDAacOutput2_VBRQuality > 58 and XLDAacOutput2_VBRQuality <= 68: xldBitrate = 135 elif XLDAacOutput2_VBRQuality > 49 and XLDAacOutput2_VBRQuality <= 58: xldBitrate = 115 elif XLDAacOutput2_VBRQuality > 40 and XLDAacOutput2_VBRQuality <= 49: xldBitrate = 105 elif XLDAacOutput2_VBRQuality > 31 and XLDAacOutput2_VBRQuality <= 40: xldBitrate = 95 elif XLDAacOutput2_VBRQuality > 22 and XLDAacOutput2_VBRQuality <= 31: xldBitrate = 80 elif XLDAacOutput2_VBRQuality > 13 and XLDAacOutput2_VBRQuality <= 22: xldBitrate = 75 elif XLDAacOutput2_VBRQuality > 4 and XLDAacOutput2_VBRQuality <= 13: xldBitrate = 45 elif XLDAacOutput2_VBRQuality >= 0 and XLDAacOutput2_VBRQuality <= 4: xldBitrate = 40 elif OutputFormatName == 'Apple Lossless': xldFormat = 'm4a' elif OutputFormatName == 'FLAC': if 'ogg' in ShortDesc: xldFormat = 'oga' else: xldFormat = 'flac' elif OutputFormatName == 'MPEG-4 HE-AAC': xldFormat = 'm4a' xldBitrate = int(profile.get('Bitrate')) elif OutputFormatName == 'LAME MP3': xldFormat = 'mp3' if 'VBR' in ShortDesc: VbrQuality = float(profile.get('VbrQuality')) if VbrQuality < 1: xldBitrate = 260 elif VbrQuality >= 1 and VbrQuality < 2: xldBitrate = 250 elif VbrQuality >= 2 and VbrQuality < 3: xldBitrate = 210 elif VbrQuality >= 3 and VbrQuality < 4: xldBitrate = 195 elif VbrQuality >= 4 and VbrQuality < 5: xldBitrate = 185 elif VbrQuality >= 5 and VbrQuality < 6: xldBitrate = 150 elif VbrQuality >= 6 and VbrQuality < 7: xldBitrate = 130 elif VbrQuality >= 7 and VbrQuality < 8: xldBitrate = 120 elif VbrQuality >= 8 and VbrQuality < 9: xldBitrate = 105 elif VbrQuality >= 9: xldBitrate = 85 elif 'CBR' in ShortDesc: xldBitrate = int(profile.get('Bitrate')) elif 'ABR' in ShortDesc: xldBitrate = int(profile.get('AbrBitrate')) elif OutputFormatName == 'Opus': xldFormat = 'opus' xldBitrate = int(profile.get('XLDOpusOutput_Bitrate')) elif OutputFormatName == 'Ogg Vorbis': xldFormat = 'ogg' XLDVorbisOutput_Quality = float( profile.get('XLDVorbisOutput_Quality')) if XLDVorbisOutput_Quality <= -2: xldBitrate = 32 elif XLDVorbisOutput_Quality > -2 and XLDVorbisOutput_Quality <= -1: xldBitrate = 48 elif XLDVorbisOutput_Quality > -1 and XLDVorbisOutput_Quality <= 0: xldBitrate = 64 elif XLDVorbisOutput_Quality > 0 and XLDVorbisOutput_Quality <= 1: xldBitrate = 80 elif XLDVorbisOutput_Quality > 1 and XLDVorbisOutput_Quality <= 2: xldBitrate = 96 elif XLDVorbisOutput_Quality > 2 and XLDVorbisOutput_Quality <= 3: xldBitrate = 112 elif XLDVorbisOutput_Quality > 3 and XLDVorbisOutput_Quality <= 4: xldBitrate = 128 elif XLDVorbisOutput_Quality > 4 and XLDVorbisOutput_Quality <= 5: xldBitrate = 160 elif XLDVorbisOutput_Quality > 5 and XLDVorbisOutput_Quality <= 6: xldBitrate = 192 elif XLDVorbisOutput_Quality > 6 and XLDVorbisOutput_Quality <= 7: xldBitrate = 224 elif XLDVorbisOutput_Quality > 7 and XLDVorbisOutput_Quality <= 8: xldBitrate = 256 elif XLDVorbisOutput_Quality > 8 and XLDVorbisOutput_Quality <= 9: xldBitrate = 320 elif XLDVorbisOutput_Quality > 9: xldBitrate = 500 elif OutputFormatName == 'WavPack': xldFormat = 'wv' if ShortDesc != 'normal': xldBitrate = int(profile.get('XLDWavpackOutput_BitRate')) # Lossless if xldFormat and not xldBitrate: xldBitrate = 500 return (xldProfileForCmd, xldFormat, xldBitrate) return (xldProfileNotFound, None, None)