def fetchURL(URL, headers=None, retry=True): """ Return the result of fetching a URL and True if success Otherwise return error message and False Allow one retry on timeout by default""" if headers is None: # some sites insist on having a user-agent, default is to add one # if you don't want any headers, send headers=[] headers = {'User-Agent': USER_AGENT} proxies = proxyList() try: timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30) r = requests.get(URL, headers=headers, timeout=timeout, proxies=proxies) if str(r.status_code).startswith('2'): # (200 OK etc) return r.content, True # noinspection PyBroadException try: # noinspection PyProtectedMember msg = requests.status_codes._codes[r.status_code][0] except Exception: msg = str(r.content) return "Response status %s: %s" % (r.status_code, msg), False except requests.exceptions.Timeout as e: if not retry: logger.error(u"fetchURL: Timeout getting response from %s" % URL) return "Timeout %s" % str(e), False logger.debug(u"fetchURL: retrying - got timeout on %s" % URL) result, success = fetchURL(URL, headers=headers, retry=False) return result, success except Exception as e: if hasattr(e, 'reason'): return "Exception %s: Reason: %s" % (type(e).__name__, str(e.reason)), False return "Exception %s: %s" % (type(e).__name__, str(e)), False
def getCommitDifferenceFromGit(): # See how many commits behind we are # Takes current latest version value and tries to diff it with the latest version in the current branch. commit_list = '' commits = -1 if lazylibrarian.CONFIG['LATEST_VERSION'] == 'Not_Available_From_GitHUB': commits = 0 # don't report a commit diff as we don't know anything commit_list = 'Unable to get latest version from GitHub' logmsg('info', commit_list) elif lazylibrarian.CONFIG['CURRENT_VERSION'] and commits != 0: url = 'https://api.github.com/repos/%s/LazyLibrarian/compare/%s...%s' % ( lazylibrarian.CONFIG['GIT_USER'], lazylibrarian.CONFIG['CURRENT_VERSION'], lazylibrarian.CONFIG['LATEST_VERSION']) logmsg('debug', 'Check for differences between local & repo by [%s]' % url) try: headers = {'User-Agent': USER_AGENT} proxies = proxyList() timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30) r = requests.get(url, timeout=timeout, headers=headers, proxies=proxies) git = r.json() if 'total_commits' in git: commits = int(git['total_commits']) msg = 'Github: Status [%s] - Ahead [%s] - Behind [%s] - Total Commits [%s]' % ( git['status'], git['ahead_by'], git['behind_by'], git['total_commits']) logmsg('debug', msg) else: logmsg( 'warn', 'Could not get difference status from GitHub: %s' % str(git)) if commits > 0: for item in git['commits']: commit_list = "%s\n%s" % (item['commit']['message'], commit_list) except Exception as e: logmsg( 'warn', 'Could not get difference status from GitHub: %s' % type(e).__name__) if commits > 1: logmsg('info', 'New version is available. You are %s commits behind' % commits) elif commits == 1: logmsg('info', 'New version is available. You are one commit behind') elif commits == 0: logmsg('info', 'Lazylibrarian is up to date') else: logmsg( 'info', 'Unknown version of lazylibrarian. Run the updater to identify your version' ) return commits, commit_list
def _sendAndroidPN(self, title, msg, url, username, broadcast): # build up the URL and parameters msg = msg.strip() if PY2: msg = msg.encode(lazylibrarian.SYS_ENCODING) data = { 'action': "send", 'broadcast': broadcast, 'uri': "", 'title': title, 'username': username, 'message': msg, } proxies = proxyList() # send the request try: timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30) r = requests.get(url, params=data, timeout=timeout, proxies=proxies) status = str(r.status_code) if status.startswith('2'): logger.debug("ANDROIDPN: Notification successful.") return True # HTTP status 404 if the provided email address isn't a AndroidPN user. if status == '404': logger.warn("ANDROIDPN: Username is wrong/not a AndroidPN email. AndroidPN will send an email to it") # For HTTP status code 401's, it is because you are passing in either an # invalid token, or the user has not added your service. elif status == '401': subscribeNote = self._sendAndroidPN(title, msg, url, username, broadcast) if subscribeNote: logger.debug("ANDROIDPN: Subscription sent") return True else: logger.error("ANDROIDPN: Subscription could not be sent") # If you receive an HTTP status code of 400, it is because you failed to send the proper parameters elif status == '400': logger.error("ANDROIDPN: Wrong data sent to AndroidPN") else: logger.error("ANDROIDPN: Got error code %s" % status) return False except Exception as e: # URLError only returns a reason, not a code. HTTPError gives a code # FIXME: Python 2.5 hack, it wrongly reports 201 as an error if hasattr(e, 'code') and e.code == 201: logger.debug("ANDROIDPN: Notification successful.") return True # if we get an error back that doesn't have an error code then who knows what's really happening if not hasattr(e, 'code'): logger.error("ANDROIDPN: Notification failed.") else: # noinspection PyUnresolvedReferences logger.error("ANDROIDPN: Notification failed. Error code: " + str(e.code)) return False
def getLatestVersion_FromGit(): # Don't call directly, use getLatestVersion as wrapper. # Also removed reference to global variable setting. latest_version = 'Unknown' # Can only work for non Windows driven installs, so check install type if lazylibrarian.CONFIG['INSTALL_TYPE'] == 'win': logmsg('debug', '(getLatestVersion_FromGit) Error - should not be called under a windows install') latest_version = 'WINDOWS INSTALL' else: # check current branch value of the local git repo as folks may pull from a branch not master branch = lazylibrarian.CONFIG['GIT_BRANCH'] if branch == 'InvalidBranch': logmsg('debug', '(getLatestVersion_FromGit) - Failed to get a valid branch name from local repo') else: if branch == 'Package': # check packages against master branch = 'master' # Get the latest commit available from github url = 'https://api.github.com/repos/%s/%s/commits/%s' % ( lazylibrarian.CONFIG['GIT_USER'], lazylibrarian.CONFIG['GIT_REPO'], branch) logmsg('debug', '(getLatestVersion_FromGit) Retrieving latest version information from github command=[%s]' % url) timestamp = check_int(lazylibrarian.CONFIG['GIT_UPDATED'], 0) age = '' if timestamp: # timestring for 'If-Modified-Since' needs to be english short day/month names and in gmt # we already have english month names stored in MONTHNAMES[] but need capitalising # so use hard coded versions here instead DAYNAMES = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'] MONNAMES = ['', 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'] tm = time.gmtime(timestamp) age = "%s, %02d %s %04d %02d:%02d:%02d GMT" %(DAYNAMES[tm.tm_wday], tm.tm_mday, MONNAMES[tm.tm_mon], tm.tm_year, tm.tm_hour, tm.tm_min, tm.tm_sec) try: headers = {'User-Agent': USER_AGENT} if age: logmsg('debug', '(getLatestVersion_FromGit) Checking if modified since %s' % age) headers.update({'If-Modified-Since': age}) proxies = proxyList() timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30) r = requests.get(url, timeout=timeout, headers=headers, proxies=proxies) if str(r.status_code).startswith('2'): git = r.json() latest_version = git['sha'] logmsg('debug', '(getLatestVersion_FromGit) Branch [%s] Latest Version has been set to [%s]' % ( branch, latest_version)) elif str(r.status_code) == '304': latest_version = lazylibrarian.CONFIG['CURRENT_VERSION'] logmsg('debug', '(getLatestVersion_FromGit) Not modified, currently on Latest Version') except Exception as e: logmsg('warn', '(getLatestVersion_FromGit) Could not get the latest commit from github') logmsg('debug', 'git %s for %s: %s' % (type(e).__name__, url, str(e))) latest_version = 'Not_Available_From_GitHUB' return latest_version
def fetchURL(URL, headers=None, retry=True, raw=None): """ Return the result of fetching a URL and True if success Otherwise return error message and False Return data as raw/bytes in python2 or if raw == True On python3 default to unicode, need to set raw=True for images/data Allow one retry on timeout by default""" if raw is None: if PY2: raw = True else: raw = False if headers is None: # some sites insist on having a user-agent, default is to add one # if you don't want any headers, send headers=[] headers = {'User-Agent': USER_AGENT} proxies = proxyList() try: # jackett query all indexers needs a longer timeout # /torznab/all/api?q= or v2.0/indexers/all/results/torznab/api?q= if '/torznab/' in URL and ('/all/' in URL or '/aggregate/' in URL): timeout = check_int(lazylibrarian.CONFIG['HTTP_EXT_TIMEOUT'], 90) else: timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30) r = requests.get(URL, headers=headers, timeout=timeout, proxies=proxies) if str(r.status_code).startswith('2'): # (200 OK etc) if raw: return r.content, True try: result = r.content.decode('utf-8') except UnicodeDecodeError: result = r.content.decode('latin-1') return result, True # noinspection PyBroadException try: # noinspection PyProtectedMember msg = requests.status_codes._codes[r.status_code][0] except Exception: msg = str(r.content) return "Response status %s: %s" % (r.status_code, msg), False except requests.exceptions.Timeout as e: if not retry: logger.error("fetchURL: Timeout getting response from %s" % URL) return "Timeout %s" % str(e), False logger.debug("fetchURL: retrying - got timeout on %s" % URL) result, success = fetchURL(URL, headers=headers, retry=False, raw=False) return result, success except Exception as e: if hasattr(e, 'reason'): return "Exception %s: Reason: %s" % (type(e).__name__, str(e.reason)), False return "Exception %s: %s" % (type(e).__name__, str(e)), False
def DirectDownloadMethod(bookid=None, tor_title=None, tor_url=None, bookname=None, library='eBook'): myDB = database.DBConnection() downloadID = False Source = "DIRECT" logger.debug("Starting Direct Download for [%s]" % bookname) proxies = proxyList() headers = {'Accept-encoding': 'gzip', 'User-Agent': USER_AGENT} try: r = requests.get(tor_url, headers=headers, timeout=90, proxies=proxies) except requests.exceptions.Timeout: logger.warn('Timeout fetching file from url: %s' % tor_url) return False except Exception as e: if hasattr(e, 'reason'): logger.warn('%s fetching file from url: %s, %s' % (type(e).__name__, tor_url, e.reason)) else: logger.warn('%s fetching file from url: %s, %s' % (type(e).__name__, tor_url, str(e))) return False bookname = '.'.join(bookname.rsplit(' ', 1)) # last word is the extension logger.debug("File download got %s bytes for %s/%s" % (len(r.content), tor_title, bookname)) destdir = os.path.join(lazylibrarian.DIRECTORY('Download'), tor_title) try: os.makedirs(destdir) setperm(destdir) except OSError as e: if not os.path.isdir(destdir): logger.debug("Error creating directory %s, %s" % (destdir, e)) destfile = os.path.join(destdir, bookname) try: with open(destfile, 'wb') as bookfile: bookfile.write(r.content) setperm(destfile) downloadID = True except Exception as e: logger.error("%s writing book to %s, %s" % (type(e).__name__, destfile, e)) if downloadID: logger.debug('File %s has been downloaded from %s' % (tor_title, tor_url)) if library == 'eBook': myDB.action('UPDATE books SET status="Snatched" WHERE BookID=?', (bookid,)) elif library == 'AudioBook': myDB.action('UPDATE books SET audiostatus="Snatched" WHERE BookID=?', (bookid,)) myDB.action('UPDATE wanted SET status="Snatched", Source=?, DownloadID=? WHERE NZBurl=?', (Source, downloadID, tor_url)) return True else: logger.error('Failed to download file @ <a href="%s">%s</a>' % (tor_url, tor_url)) myDB.action('UPDATE wanted SET status="Failed" WHERE NZBurl=?', (tor_url,)) return False
def getLatestVersion_FromGit(): # Don't call directly, use getLatestVersion as wrapper. # Also removed reference to global variable setting. latest_version = 'Unknown' # Can only work for non Windows driven installs, so check install type if lazylibrarian.CONFIG['INSTALL_TYPE'] == 'win': logmsg('debug', '(getLatestVersion_FromGit) Error - should not be called under a windows install') latest_version = 'WINDOWS INSTALL' else: # check current branch value of the local git repo as folks may pull from a branch not master branch = lazylibrarian.CONFIG['GIT_BRANCH'] if branch == 'InvalidBranch': logmsg('debug', '(getLatestVersion_FromGit) - Failed to get a valid branch name from local repo') else: if branch == 'Package': # check packages against master branch = 'master' # Get the latest commit available from github url = 'https://api.github.com/repos/%s/%s/commits/%s' % ( lazylibrarian.CONFIG['GIT_USER'], lazylibrarian.CONFIG['GIT_REPO'], branch) logmsg('debug', '(getLatestVersion_FromGit) Retrieving latest version information from github command=[%s]' % url) age = lazylibrarian.CONFIG['GIT_UPDATED'] try: headers = {'User-Agent': USER_AGENT} if age: logmsg('debug', '(getLatestVersion_FromGit) Checking if modified since %s' % age) headers.update({'If-Modified-Since': age}) proxies = proxyList() timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30) r = requests.get(url, timeout=timeout, headers=headers, proxies=proxies) if str(r.status_code).startswith('2'): git = r.json() latest_version = git['sha'] logmsg('debug', '(getLatestVersion_FromGit) Branch [%s] Latest Version has been set to [%s]' % ( branch, latest_version)) elif str(r.status_code) == '304': latest_version = lazylibrarian.CONFIG['CURRENT_VERSION'] logmsg('debug', '(getLatestVersion_FromGit) Not modified, currently on Latest Version') except Exception as e: logmsg('warn', '(getLatestVersion_FromGit) Could not get the latest commit from github') logmsg('debug', 'git %s for %s: %s' % (type(e).__name__, url, str(e))) latest_version = 'Not_Available_From_GitHUB' return latest_version
def getCommitDifferenceFromGit(): # See how many commits behind we are # Takes current latest version value and tries to diff it with the latest version in the current branch. commit_list = '' commits = -1 if lazylibrarian.CONFIG['LATEST_VERSION'] == 'Not_Available_From_GitHUB': commits = 0 # don't report a commit diff as we don't know anything commit_list = 'Unable to get latest version from GitHub' logmsg('info', commit_list) elif lazylibrarian.CONFIG['CURRENT_VERSION'] and commits != 0: url = 'https://api.github.com/repos/%s/LazyLibrarian/compare/%s...%s' % ( lazylibrarian.CONFIG['GIT_USER'], lazylibrarian.CONFIG['CURRENT_VERSION'], lazylibrarian.CONFIG['LATEST_VERSION']) logmsg('debug', 'Check for differences between local & repo by [%s]' % url) try: headers = {'User-Agent': USER_AGENT} proxies = proxyList() timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30) r = requests.get(url, timeout=timeout, headers=headers, proxies=proxies) git = r.json() if 'total_commits' in git: commits = int(git['total_commits']) msg = 'Github: Status [%s] - Ahead [%s] - Behind [%s] - Total Commits [%s]' % ( git['status'], git['ahead_by'], git['behind_by'], git['total_commits']) logmsg('debug', msg) else: logmsg('warn', 'Could not get difference status from GitHub: %s' % str(git)) if commits > 0: for item in git['commits']: commit_list = "%s\n%s" % (item['commit']['message'], commit_list) except Exception as e: logmsg('warn', 'Could not get difference status from GitHub: %s' % type(e).__name__) if commits > 1: logmsg('info', 'New version is available. You are %s commits behind' % commits) elif commits == 1: logmsg('info', 'New version is available. You are one commit behind') elif commits == 0: logmsg('info', 'Lazylibrarian is up to date') else: logmsg('info', 'Unknown version of lazylibrarian. Run the updater to identify your version') return commits, commit_list
def thingLang(isbn): # try searching librarything for a language code using the isbn # if no language found, librarything return value is "invalid" or "unknown" # librarything returns plain text, not xml BOOK_URL = 'http://www.librarything.com/api/thingLang.php?isbn=' + isbn proxies = proxyList() booklang = '' try: librarything_wait() timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30) r = requests.get(BOOK_URL, timeout=timeout, proxies=proxies) resp = r.text logger.debug("LibraryThing reports language [%s] for %s" % (resp, isbn)) if resp != 'invalid' and resp != 'unknown': booklang = resp except Exception as e: logger.error("%s finding language: %s" % (type(e).__name__, str(e))) finally: return booklang
def thingLang(isbn): # try searching librarything for a language code using the isbn # if no language found, librarything return value is "invalid" or "unknown" # librarything returns plain text, not xml BOOK_URL = 'http://www.librarything.com/api/thingLang.php?isbn=' + isbn proxies = proxyList() booklang = '' try: librarything_wait() timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30) r = requests.get(BOOK_URL, timeout=timeout, proxies=proxies) resp = r.text logger.debug("LibraryThing reports language [%s] for %s" % (resp, isbn)) if 'invalid' not in resp and 'unknown' not in resp and '<' not in resp: booklang = resp except Exception as e: logger.error("%s finding language: %s" % (type(e).__name__, str(e))) finally: return booklang
def torrentAction(method, arguments): host = lazylibrarian.CONFIG['TRANSMISSION_HOST'] port = check_int(lazylibrarian.CONFIG['TRANSMISSION_PORT'], 0) if not host or not port: logger.error('Invalid transmission host or port, check your config') return False username = lazylibrarian.CONFIG['TRANSMISSION_USER'] password = lazylibrarian.CONFIG['TRANSMISSION_PASS'] if not host.startswith('http'): host = 'http://' + host if host.endswith('/'): host = host[:-1] # Fix the URL. We assume that the user does not point to the RPC endpoint, # so add it if it is missing. parts = list(urlparse.urlparse(host)) if parts[0] not in ("http", "https"): parts[0] = "http" if ':' not in parts[1]: parts[1] += ":%s" % port if not parts[2].endswith("/rpc"): parts[2] += "/transmission/rpc" host = urlparse.urlunparse(parts) # Retrieve session id auth = (username, password) if username and password else None proxies = proxyList() timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30) response = requests.get(host, auth=auth, proxies=proxies, timeout=timeout) if response is None: logger.error("Error getting Transmission session ID") return # Parse response session_id = '' if response.status_code == 401: if auth: logger.error("Username and/or password not accepted by " "Transmission") else: logger.error("Transmission authorization required") return elif response.status_code == 409: session_id = response.headers['x-transmission-session-id'] if not session_id: logger.error("Expected a Session ID from Transmission") return # Prepare next request headers = {'x-transmission-session-id': session_id} data = {'method': method, 'arguments': arguments} proxies = proxyList() timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30) try: response = requests.post(host, data=json.dumps(data), headers=headers, proxies=proxies, auth=auth, timeout=timeout) response = response.json() except Exception as e: logger.debug('Transmission %s: %s' % (type(e).__name__, str(e))) response = '' if not response: logger.error("Error sending torrent to Transmission") return return response
def TORDownloadMethod(bookid=None, tor_title=None, tor_url=None, library='eBook'): myDB = database.DBConnection() downloadID = False Source = '' full_url = tor_url # keep the url as stored in "wanted" table if tor_url and tor_url.startswith('magnet'): torrent = tor_url # allow magnet link to write to blackhole and hash to utorrent/rtorrent else: # h = HTMLParser() # tor_url = h.unescape(tor_url) # HTMLParser is probably overkill, we only seem to get & # tor_url = tor_url.replace('&', '&') if '&file=' in tor_url: # torznab results need to be re-encoded # had a problem with torznab utf-8 encoded strings not matching # our utf-8 strings because of long/short form differences url, value = tor_url.split('&file=', 1) value = makeUnicode(value) # ensure unicode value = unicodedata.normalize('NFC', value) # normalize to short form value = value.encode('unicode-escape') # then escape the result value = value.replace(' ', '%20') # and encode any spaces tor_url = url + '&file=' + value # strip url back to the .torrent as some sites add parameters if not tor_url.endswith('.torrent'): if '.torrent' in tor_url: tor_url = tor_url.split('.torrent')[0] + '.torrent' headers = {'Accept-encoding': 'gzip', 'User-Agent': USER_AGENT} proxies = proxyList() try: r = requests.get(tor_url, headers=headers, timeout=90, proxies=proxies) except requests.exceptions.Timeout: logger.warn('Timeout fetching file from url: %s' % tor_url) return False except Exception as e: if hasattr(e, 'reason'): logger.warn('%s fetching file from url: %s, %s' % (type(e).__name__, tor_url, e.reason)) else: logger.warn('%s fetching file from url: %s, %s' % (type(e).__name__, tor_url, str(e))) return False torrent = r.content if lazylibrarian.CONFIG['TOR_DOWNLOADER_BLACKHOLE']: Source = "BLACKHOLE" logger.debug("Sending %s to blackhole" % tor_title) tor_name = cleanName(tor_title).replace(' ', '_') if tor_url and tor_url.startswith('magnet'): if lazylibrarian.CONFIG['TOR_CONVERT_MAGNET']: hashid = CalcTorrentHash(tor_url) tor_name = 'meta-' + hashid + '.torrent' tor_path = os.path.join(lazylibrarian.CONFIG['TORRENT_DIR'], tor_name) result = magnet2torrent(tor_url, tor_path) if result is not False: logger.debug('Magnet file saved as: %s' % tor_path) downloadID = Source else: tor_name += '.magnet' tor_path = os.path.join(lazylibrarian.CONFIG['TORRENT_DIR'], tor_name) msg = '' try: msg = 'Opening ' with open(tor_path, 'wb') as torrent_file: msg += 'Writing ' if isinstance(torrent, unicode): torrent = torrent.encode('iso-8859-1') torrent_file.write(torrent) msg += 'SettingPerm' setperm(tor_path) msg += 'Saved' logger.debug('Magnet file saved: %s' % tor_path) downloadID = Source except Exception as e: logger.debug("Failed to write magnet to file: %s %s" % (type(e).__name__, str(e))) logger.debug("Progress: %s" % msg) logger.debug("Filename [%s]" % (repr(tor_path))) return False else: tor_name += '.torrent' tor_path = os.path.join(lazylibrarian.CONFIG['TORRENT_DIR'], tor_name) msg = '' try: msg = 'Opening ' with open(tor_path, 'wb') as torrent_file: msg += 'Writing ' if isinstance(torrent, unicode): torrent = torrent.encode('iso-8859-1') torrent_file.write(torrent) msg += 'SettingPerm ' setperm(tor_path) msg += 'Saved' logger.debug('Torrent file saved: %s' % tor_name) downloadID = Source except Exception as e: logger.debug("Failed to write torrent to file: %s %s" % (type(e).__name__, str(e))) logger.debug("Progress: %s" % msg) logger.debug("Filename [%s]" % (repr(tor_path))) return False if lazylibrarian.CONFIG['TOR_DOWNLOADER_UTORRENT'] and lazylibrarian.CONFIG[ 'UTORRENT_HOST']: logger.debug("Sending %s to Utorrent" % tor_title) Source = "UTORRENT" hashid = CalcTorrentHash(torrent) downloadID = utorrent.addTorrent(tor_url, hashid) # returns hash or False if downloadID: tor_title = utorrent.nameTorrent(downloadID) if lazylibrarian.CONFIG['TOR_DOWNLOADER_RTORRENT'] and lazylibrarian.CONFIG[ 'RTORRENT_HOST']: logger.debug("Sending %s to rTorrent" % tor_title) Source = "RTORRENT" hashid = CalcTorrentHash(torrent) downloadID = rtorrent.addTorrent(tor_url, hashid) # returns hash or False if downloadID: tor_title = rtorrent.getName(downloadID) if lazylibrarian.CONFIG[ 'TOR_DOWNLOADER_QBITTORRENT'] and lazylibrarian.CONFIG[ 'QBITTORRENT_HOST']: logger.debug("Sending %s to qbittorrent" % tor_title) Source = "QBITTORRENT" hashid = CalcTorrentHash(torrent) status = qbittorrent.addTorrent(tor_url, hashid) # returns True or False if status: downloadID = hashid tor_title = qbittorrent.getName(hashid) if lazylibrarian.CONFIG[ 'TOR_DOWNLOADER_TRANSMISSION'] and lazylibrarian.CONFIG[ 'TRANSMISSION_HOST']: logger.debug("Sending %s to Transmission" % tor_title) Source = "TRANSMISSION" downloadID = transmission.addTorrent(tor_url) # returns id or False if downloadID: # transmission returns it's own int, but we store hashid instead downloadID = CalcTorrentHash(torrent) tor_title = transmission.getTorrentFolder(downloadID) if lazylibrarian.CONFIG['TOR_DOWNLOADER_SYNOLOGY'] and lazylibrarian.CONFIG['USE_SYNOLOGY'] and \ lazylibrarian.CONFIG['SYNOLOGY_HOST']: logger.debug("Sending %s to Synology" % tor_title) Source = "SYNOLOGY_TOR" downloadID = synology.addTorrent(tor_url) # returns id or False if downloadID: tor_title = synology.getName(downloadID) if lazylibrarian.CONFIG['TOR_DOWNLOADER_DELUGE'] and lazylibrarian.CONFIG[ 'DELUGE_HOST']: logger.debug("Sending %s to Deluge" % tor_title) if not lazylibrarian.CONFIG['DELUGE_USER']: # no username, talk to the webui Source = "DELUGEWEBUI" downloadID = deluge.addTorrent(tor_url) # returns hash or False if downloadID: tor_title = deluge.getTorrentFolder(downloadID) else: # have username, talk to the daemon Source = "DELUGERPC" client = DelugeRPCClient(lazylibrarian.CONFIG['DELUGE_HOST'], lazylibrarian.CONFIG['DELUGE_URL_BASE'], int(lazylibrarian.CONFIG['DELUGE_PORT']), lazylibrarian.CONFIG['DELUGE_USER'], lazylibrarian.CONFIG['DELUGE_PASS']) try: client.connect() args = {"name": tor_title} if tor_url.startswith('magnet'): downloadID = client.call('core.add_torrent_magnet', tor_url, args) else: downloadID = client.call('core.add_torrent_url', tor_url, args) if downloadID: if lazylibrarian.CONFIG['DELUGE_LABEL']: _ = client.call('label.set_torrent', downloadID, lazylibrarian.CONFIG['DELUGE_LABEL']) result = client.call('core.get_torrent_status', downloadID, {}) # for item in result: # logger.debug ('Deluge RPC result %s: %s' % (item, result[item])) if 'name' in result: tor_title = result['name'] except Exception as e: logger.debug('DelugeRPC failed %s %s' % (type(e).__name__, str(e))) return False if not Source: logger.warn('No torrent download method is enabled, check config.') return False if downloadID: if tor_title: if downloadID.upper() in tor_title.upper(): logger.warn( '%s: name contains hash, probably unresolved magnet' % Source) else: tor_title = unaccented_str(tor_title) # need to check against reject words list again as the name may have changed # library = magazine eBook AudioBook to determine which reject list # but we can't easily do the per-magazine rejects if library == 'magazine': reject_list = getList(lazylibrarian.CONFIG['REJECT_MAGS']) elif library == 'eBook': reject_list = getList(lazylibrarian.CONFIG['REJECT_WORDS']) elif library == 'AudioBook': reject_list = getList(lazylibrarian.CONFIG['REJECT_AUDIO']) else: logger.debug("Invalid library [%s] in TORDownloadMethod" % library) reject_list = [] rejected = False lower_title = tor_title.lower() for word in reject_list: if word in lower_title: rejected = True logger.debug("Rejecting torrent name %s, contains %s" % (tor_title, word)) break if rejected: myDB.action( 'UPDATE wanted SET status="Failed" WHERE NZBurl=?', (full_url, )) delete_task(Source, downloadID, True) return False else: logger.debug('%s setting torrent name to [%s]' % (Source, tor_title)) myDB.action('UPDATE wanted SET NZBtitle=? WHERE NZBurl=?', (tor_title, full_url)) if library == 'eBook': myDB.action('UPDATE books SET status="Snatched" WHERE BookID=?', (bookid, )) elif library == 'AudioBook': myDB.action( 'UPDATE books SET audiostatus="Snatched" WHERE BookID=?', (bookid, )) myDB.action( 'UPDATE wanted SET status="Snatched", Source=?, DownloadID=? WHERE NZBurl=?', (Source, downloadID, full_url)) return True logger.error('Failed to download torrent from %s, %s' % (Source, tor_url)) myDB.action('UPDATE wanted SET status="Failed" WHERE NZBurl=?', (full_url, )) return False
def TORDownloadMethod(bookid=None, tor_title=None, tor_url=None, library='eBook'): myDB = database.DBConnection() downloadID = False Source = '' torrent = '' full_url = tor_url # keep the url as stored in "wanted" table if 'magnet:?' in tor_url: # discard any other parameters and just use the magnet link tor_url = 'magnet:?' + tor_url.split('magnet:?')[1] else: # h = HTMLParser() # tor_url = h.unescape(tor_url) # HTMLParser is probably overkill, we only seem to get & # tor_url = tor_url.replace('&', '&') if '&file=' in tor_url: # torznab results need to be re-encoded # had a problem with torznab utf-8 encoded strings not matching # our utf-8 strings because of long/short form differences url, value = tor_url.split('&file=', 1) value = makeUnicode(value) # ensure unicode value = unicodedata.normalize('NFC', value) # normalize to short form value = value.encode('unicode-escape') # then escape the result value = makeUnicode(value) # ensure unicode value = value.replace(' ', '%20') # and encode any spaces tor_url = url + '&file=' + value # strip url back to the .torrent as some sites add extra parameters if not tor_url.endswith('.torrent') and '.torrent' in tor_url: tor_url = tor_url.split('.torrent')[0] + '.torrent' headers = {'Accept-encoding': 'gzip', 'User-Agent': getUserAgent()} proxies = proxyList() try: logger.debug("Fetching %s" % tor_url) r = requests.get(tor_url, headers=headers, timeout=90, proxies=proxies) if str(r.status_code).startswith('2'): torrent = r.content if not len(torrent): res = "Got empty response for %s" % tor_url logger.warn(res) return False, res elif len(torrent) < 100: res = "Only got %s bytes for %s" % (len(torrent), tor_url) logger.warn(res) return False, res else: logger.debug("Got %s bytes for %s" % (len(torrent), tor_url)) else: res = "Got a %s response for %s" % (r.status_code, tor_url) logger.warn(res) return False, res except requests.exceptions.Timeout: res = 'Timeout fetching file from url: %s' % tor_url logger.warn(res) return False, res except Exception as e: # some jackett providers redirect internally using http 301 to a magnet link # which requests can't handle, so throws an exception logger.debug("Requests exception: %s" % str(e)) if "magnet:?" in str(e): tor_url = 'magnet:?' + str(e).split('magnet:?')[1].strip("'") logger.debug("Redirecting to %s" % tor_url) else: if hasattr(e, 'reason'): res = '%s fetching file from url: %s, %s' % ( type(e).__name__, tor_url, e.reason) else: res = '%s fetching file from url: %s, %s' % ( type(e).__name__, tor_url, str(e)) logger.warn(res) return False, res if not torrent and not tor_url.startswith('magnet:?'): res = "No magnet or data, cannot continue" logger.warn(res) return False, res if lazylibrarian.CONFIG['TOR_DOWNLOADER_BLACKHOLE']: Source = "BLACKHOLE" logger.debug("Sending %s to blackhole" % tor_title) tor_name = cleanName(tor_title).replace(' ', '_') if tor_url and tor_url.startswith('magnet'): if lazylibrarian.CONFIG['TOR_CONVERT_MAGNET']: hashid = calculate_torrent_hash(tor_url) if not hashid: hashid = tor_name tor_name = 'meta-' + hashid + '.torrent' tor_path = os.path.join(lazylibrarian.CONFIG['TORRENT_DIR'], tor_name) result = magnet2torrent(tor_url, tor_path) if result is not False: logger.debug('Magnet file saved as: %s' % tor_path) downloadID = Source else: tor_name += '.magnet' tor_path = os.path.join(lazylibrarian.CONFIG['TORRENT_DIR'], tor_name) msg = '' try: msg = 'Opening ' with open(tor_path, 'wb') as torrent_file: msg += 'Writing ' if isinstance(torrent, text_type): torrent = torrent.encode('iso-8859-1') torrent_file.write(torrent) msg += 'SettingPerm ' setperm(tor_path) msg += 'Saved ' logger.debug('Magnet file saved: %s' % tor_path) downloadID = Source except Exception as e: res = "Failed to write magnet to file: %s %s" % ( type(e).__name__, str(e)) logger.warn(res) logger.debug("Progress: %s Filename [%s]" % (msg, repr(tor_path))) return False, res else: tor_name += '.torrent' tor_path = os.path.join(lazylibrarian.CONFIG['TORRENT_DIR'], tor_name) msg = '' try: msg = 'Opening ' with open(tor_path, 'wb') as torrent_file: msg += 'Writing ' if isinstance(torrent, text_type): torrent = torrent.encode('iso-8859-1') torrent_file.write(torrent) msg += 'SettingPerm ' setperm(tor_path) msg += 'Saved ' logger.debug('Torrent file saved: %s' % tor_name) downloadID = Source except Exception as e: res = "Failed to write torrent to file: %s %s" % ( type(e).__name__, str(e)) logger.warn(res) logger.debug("Progress: %s Filename [%s]" % (msg, repr(tor_path))) return False, res hashid = calculate_torrent_hash(tor_url, torrent) if not hashid: res = "Unable to calculate torrent hash from url/data" logger.error(res) logger.debug("url: %s" % tor_url) logger.debug("data: %s" % makeUnicode(str(torrent[:50]))) return False, res if lazylibrarian.CONFIG['TOR_DOWNLOADER_UTORRENT'] and lazylibrarian.CONFIG[ 'UTORRENT_HOST']: logger.debug("Sending %s to Utorrent" % tor_title) Source = "UTORRENT" downloadID, res = utorrent.addTorrent(tor_url, hashid) # returns hash or False if downloadID: tor_title = utorrent.nameTorrent(downloadID) if lazylibrarian.CONFIG['TOR_DOWNLOADER_RTORRENT'] and lazylibrarian.CONFIG[ 'RTORRENT_HOST']: logger.debug("Sending %s to rTorrent" % tor_title) Source = "RTORRENT" if torrent: logger.debug("Sending %s data to rTorrent" % tor_title) downloadID, res = rtorrent.addTorrent(tor_title, hashid, data=torrent) else: logger.debug("Sending %s url to rTorrent" % tor_title) downloadID, res = rtorrent.addTorrent( tor_url, hashid) # returns hash or False if downloadID: tor_title = rtorrent.getName(downloadID) if lazylibrarian.CONFIG[ 'TOR_DOWNLOADER_QBITTORRENT'] and lazylibrarian.CONFIG[ 'QBITTORRENT_HOST']: Source = "QBITTORRENT" if torrent: logger.debug("Sending %s data to qBittorrent" % tor_title) status, res = qbittorrent.addFile(torrent, hashid, tor_title) else: logger.debug("Sending %s url to qBittorrent" % tor_title) status, res = qbittorrent.addTorrent( tor_url, hashid) # returns True or False if status: downloadID = hashid tor_title = qbittorrent.getName(hashid) if lazylibrarian.CONFIG[ 'TOR_DOWNLOADER_TRANSMISSION'] and lazylibrarian.CONFIG[ 'TRANSMISSION_HOST']: Source = "TRANSMISSION" if torrent: logger.debug("Sending %s data to Transmission" % tor_title) # transmission needs b64encoded metainfo to be unicode, not bytes downloadID, res = transmission.addTorrent(None, metainfo=makeUnicode( b64encode(torrent))) else: logger.debug("Sending %s url to Transmission" % tor_title) downloadID, res = transmission.addTorrent( tor_url) # returns id or False if downloadID: # transmission returns it's own int, but we store hashid instead downloadID = hashid tor_title = transmission.getTorrentFolder(downloadID) if lazylibrarian.CONFIG['TOR_DOWNLOADER_SYNOLOGY'] and lazylibrarian.CONFIG['USE_SYNOLOGY'] and \ lazylibrarian.CONFIG['SYNOLOGY_HOST']: logger.debug("Sending %s url to Synology" % tor_title) Source = "SYNOLOGY_TOR" downloadID, res = synology.addTorrent(tor_url) # returns id or False if downloadID: tor_title = synology.getName(downloadID) if lazylibrarian.CONFIG['TOR_DOWNLOADER_DELUGE'] and lazylibrarian.CONFIG[ 'DELUGE_HOST']: if not lazylibrarian.CONFIG['DELUGE_USER']: # no username, talk to the webui Source = "DELUGEWEBUI" if torrent: logger.debug("Sending %s data to Deluge" % tor_title) downloadID, res = deluge.addTorrent(tor_title, data=b64encode(torrent)) else: logger.debug("Sending %s url to Deluge" % tor_title) downloadID, res = deluge.addTorrent( tor_url) # can be link or magnet, returns hash or False if downloadID: tor_title = deluge.getTorrentFolder(downloadID) else: return False, res else: # have username, talk to the daemon Source = "DELUGERPC" client = DelugeRPCClient(lazylibrarian.CONFIG['DELUGE_HOST'], int(lazylibrarian.CONFIG['DELUGE_PORT']), lazylibrarian.CONFIG['DELUGE_USER'], lazylibrarian.CONFIG['DELUGE_PASS']) try: client.connect() args = {"name": tor_title} if tor_url.startswith('magnet'): res = "Sending %s magnet to DelugeRPC" % tor_title logger.debug(res) downloadID = client.call('core.add_torrent_magnet', tor_url, args) elif torrent: res = "Sending %s data to DelugeRPC" % tor_title logger.debug(res) downloadID = client.call('core.add_torrent_file', tor_title, b64encode(torrent), args) else: res = "Sending %s url to DelugeRPC" % tor_title logger.debug(res) downloadID = client.call('core.add_torrent_url', tor_url, args) if downloadID: if lazylibrarian.CONFIG['DELUGE_LABEL']: _ = client.call( 'label.set_torrent', downloadID, lazylibrarian.CONFIG['DELUGE_LABEL'].lower()) result = client.call('core.get_torrent_status', downloadID, {}) if 'name' in result: tor_title = result['name'] else: res += ' failed' logger.error(res) return False, res except Exception as e: res = 'DelugeRPC failed %s %s' % (type(e).__name__, str(e)) logger.error(res) return False, res if not Source: res = 'No torrent download method is enabled, check config.' logger.warn(res) return False, res if downloadID: if tor_title: if downloadID.upper() in tor_title.upper(): logger.warn( '%s: name contains hash, probably unresolved magnet' % Source) else: tor_title = unaccented_str(tor_title) # need to check against reject words list again as the name may have changed # library = magazine eBook AudioBook to determine which reject list # but we can't easily do the per-magazine rejects if library == 'Magazine': reject_list = getList(lazylibrarian.CONFIG['REJECT_MAGS'], ',') elif library == 'eBook': reject_list = getList(lazylibrarian.CONFIG['REJECT_WORDS'], ',') elif library == 'AudioBook': reject_list = getList(lazylibrarian.CONFIG['REJECT_AUDIO'], ',') else: logger.debug("Invalid library [%s] in TORDownloadMethod" % library) reject_list = [] rejected = False lower_title = tor_title.lower() for word in reject_list: if word in lower_title: rejected = "Rejecting torrent name %s, contains %s" % ( tor_title, word) logger.debug(rejected) break if not rejected: rejected = check_contents(Source, downloadID, library, tor_title) if rejected: myDB.action( 'UPDATE wanted SET status="Failed",DLResult=? WHERE NZBurl=?', (rejected, full_url)) delete_task(Source, downloadID, True) return False else: logger.debug('%s setting torrent name to [%s]' % (Source, tor_title)) myDB.action('UPDATE wanted SET NZBtitle=? WHERE NZBurl=?', (tor_title, full_url)) if library == 'eBook': myDB.action('UPDATE books SET status="Snatched" WHERE BookID=?', (bookid, )) elif library == 'AudioBook': myDB.action( 'UPDATE books SET audiostatus="Snatched" WHERE BookID=?', (bookid, )) myDB.action( 'UPDATE wanted SET status="Snatched", Source=?, DownloadID=? WHERE NZBurl=?', (Source, downloadID, full_url)) return True, '' res = 'Failed to send torrent to %s' % Source logger.error(res) return False, res
def update(): if lazylibrarian.CONFIG['INSTALL_TYPE'] == 'win': logmsg('debug', '(update) Windows install - no update available') logmsg('info', '(update) Windows .exe updating not supported yet.') return False elif lazylibrarian.CONFIG['INSTALL_TYPE'] == 'package': logmsg('debug', '(update) Package install - no update available') logmsg('info', '(update) Please use your package manager to update') return False elif lazylibrarian.CONFIG['INSTALL_TYPE'] == 'git': branch = getCurrentGitBranch() _, _ = runGit('stash clear') output, err = runGit('pull origin ' + branch) success = True if not output: logmsg('error', '(update) Couldn\'t download latest version') success = False for line in output.split('\n'): if 'Already up-to-date.' in line: logmsg('info', '(update) No update available, not updating') logmsg('info', '(update) Output: ' + str(output)) success = False elif 'Aborting' in line or 'local changes' in line: logmsg('error', '(update) Unable to update from git: ' + line) logmsg('info', '(update) Output: ' + str(output)) success = False if success: lazylibrarian.CONFIG['GIT_UPDATED'] = str(int(time.time())) return True elif lazylibrarian.CONFIG['INSTALL_TYPE'] == 'source': tar_download_url = 'https://github.com/%s/%s/tarball/%s' % ( lazylibrarian.CONFIG['GIT_USER'], lazylibrarian.CONFIG['GIT_REPO'], lazylibrarian.CONFIG['GIT_BRANCH']) update_dir = os.path.join(lazylibrarian.PROG_DIR, 'update') try: logmsg('info', '(update) Downloading update from: ' + tar_download_url) headers = {'User-Agent': USER_AGENT} proxies = proxyList() timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30) r = requests.get(tar_download_url, timeout=timeout, headers=headers, proxies=proxies) except requests.exceptions.Timeout: logmsg( 'error', "(update) Timeout retrieving new version from " + tar_download_url) return False except Exception as e: if hasattr(e, 'reason'): errmsg = e.reason else: errmsg = str(e) logmsg( 'error', "(update) Unable to retrieve new version from " + tar_download_url + ", can't update: %s" % errmsg) return False download_name = r.url.split('/')[-1] tar_download_path = os.path.join(lazylibrarian.PROG_DIR, download_name) # Save tar to disk with open(tar_download_path, 'wb') as f: f.write(r.content) # Extract the tar to update folder logmsg('info', '(update) Extracting file ' + tar_download_path) try: with tarfile.open(tar_download_path) as tar: tar.extractall(update_dir) except Exception as e: logger.error('Failed to unpack tarfile %s (%s): %s' % (type(e).__name__, tar_download_path, str(e))) return False # Delete the tar.gz logmsg('info', '(update) Deleting file ' + tar_download_path) os.remove(tar_download_path) # Find update dir name update_dir_contents = [ x for x in os.listdir(update_dir) if os.path.isdir(os.path.join(update_dir, x)) ] if len(update_dir_contents) != 1: logmsg( 'error', "(update) Invalid update data, update failed: " + str(update_dir_contents)) return False content_dir = os.path.join(update_dir, update_dir_contents[0]) # walk temp folder and move files to main folder for rootdir, dirnames, filenames in os.walk(content_dir): rootdir = rootdir[len(content_dir) + 1:] for curfile in filenames: old_path = os.path.join(content_dir, rootdir, curfile) new_path = os.path.join(lazylibrarian.PROG_DIR, rootdir, curfile) if os.path.isfile(new_path): os.remove(new_path) os.renames(old_path, new_path) # Update version.txt and timestamo updateVersionFile(lazylibrarian.CONFIG['LATEST_VERSION']) lazylibrarian.CONFIG['GIT_UPDATED'] = str(int(time.time())) return True else: logmsg('error', "(update) Cannot perform update - Install Type not set") return False
def get_author_books(self, authorid=None, authorname=None, bookstatus="Skipped", entrystatus='Active', refresh=False): # noinspection PyBroadException try: logger.debug('[%s] Now processing books with Google Books API' % authorname) # google doesnt like accents in author names set_url = self.url + urllib.quote('inauthor:"%s"' % unaccented_str(authorname)) api_hits = 0 gr_lang_hits = 0 lt_lang_hits = 0 gb_lang_change = 0 cache_hits = 0 not_cached = 0 startindex = 0 resultcount = 0 removedResults = 0 duplicates = 0 ignored = 0 added_count = 0 updated_count = 0 book_ignore_count = 0 total_count = 0 number_results = 1 valid_langs = getList(lazylibrarian.CONFIG['IMP_PREFLANG']) # Artist is loading myDB = database.DBConnection() controlValueDict = {"AuthorID": authorid} newValueDict = {"Status": "Loading"} myDB.upsert("authors", newValueDict, controlValueDict) try: while startindex < number_results: self.params['startIndex'] = startindex URL = set_url + '&' + urllib.urlencode(self.params) try: jsonresults, in_cache = get_json_request(URL, useCache=not refresh) if jsonresults is None: number_results = 0 else: if not in_cache: api_hits += 1 number_results = jsonresults['totalItems'] except Exception as err: if hasattr(err, 'reason'): errmsg = err.reason else: errmsg = str(err) logger.warn('Google Books API Error [%s]: Check your API key or wait a while' % errmsg) break if number_results == 0: logger.warn('Found no results for %s' % authorname) break else: logger.debug('Found %s result%s for %s' % (number_results, plural(number_results), authorname)) startindex += 40 for item in jsonresults['items']: total_count += 1 # skip if no author, no author is no book. try: _ = item['volumeInfo']['authors'][0] except KeyError: logger.debug('Skipped a result without authorfield.') continue try: if item['volumeInfo']['industryIdentifiers'][0]['type'] == 'ISBN_10': bookisbn = item['volumeInfo'][ 'industryIdentifiers'][0]['identifier'] else: bookisbn = "" except KeyError: bookisbn = "" isbnhead = "" if len(bookisbn) == 10: isbnhead = bookisbn[0:3] elif len(bookisbn) == 13: isbnhead = bookisbn[3:6] try: booklang = item['volumeInfo']['language'] except KeyError: booklang = "Unknown" # do we care about language? if "All" not in valid_langs: if bookisbn != "": # seems google lies to us, sometimes tells us books are in english when they are not if booklang == "Unknown" or booklang == "en": googlelang = booklang match = False lang = myDB.match('SELECT lang FROM languages where isbn=?', (isbnhead,)) if lang: booklang = lang['lang'] cache_hits += 1 logger.debug("Found cached language [%s] for [%s]" % (booklang, isbnhead)) match = True if not match: # no match in cache, try lookup dict if isbnhead: if len(bookisbn) == 13 and bookisbn.startswith('979'): for lang in lazylibrarian.isbn_979_dict: if isbnhead.startswith(lang): booklang = lazylibrarian.isbn_979_dict[lang] logger.debug("ISBN979 returned %s for %s" % (booklang, isbnhead)) match = True break elif (len(bookisbn) == 10) or \ (len(bookisbn) == 13 and bookisbn.startswith('978')): for lang in lazylibrarian.isbn_978_dict: if isbnhead.startswith(lang): booklang = lazylibrarian.isbn_978_dict[lang] logger.debug("ISBN979 returned %s for %s" % (booklang, isbnhead)) match = True break if match: myDB.action('insert into languages values (?, ?)', (isbnhead, booklang)) logger.debug("GB language: " + booklang) if not match: # try searching librarything for a language code using the isbn # if no language found, librarything return value is "invalid" or "unknown" # librarything returns plain text, not xml BOOK_URL = 'http://www.librarything.com/api/thingLang.php?isbn=' + bookisbn proxies = proxyList() try: librarything_wait() timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30) r = requests.get(BOOK_URL, timeout=timeout, proxies=proxies) resp = r.text lt_lang_hits += 1 logger.debug( "LibraryThing reports language [%s] for %s" % (resp, isbnhead)) if resp != 'invalid' and resp != 'unknown': booklang = resp # found a language code match = True myDB.action('insert into languages values (?, ?)', (isbnhead, booklang)) logger.debug("LT language: " + booklang) except Exception as e: booklang = "" logger.error("%s finding language: %s" % (type(e).__name__, str(e))) if match: # We found a better language match if googlelang == "en" and booklang not in ["en-US", "en-GB", "eng"]: # these are all english, may need to expand this list booknamealt = item['volumeInfo']['title'] logger.debug("%s Google thinks [%s], we think [%s]" % (booknamealt, googlelang, booklang)) gb_lang_change += 1 else: # No match anywhere, accept google language booklang = googlelang # skip if language is in ignore list if booklang not in valid_langs: booknamealt = item['volumeInfo']['title'] logger.debug( 'Skipped [%s] with language %s' % (booknamealt, booklang)) ignored += 1 continue try: bookpub = item['volumeInfo']['publisher'] except KeyError: bookpub = "" try: booksub = item['volumeInfo']['subtitle'] except KeyError: booksub = "" if not booksub: series = "" seriesNum = "" else: try: series = booksub.split('(')[1].split(' Series ')[0] except IndexError: series = "" if series.endswith(')'): series = series[:-1] try: seriesNum = booksub.split('(')[1].split(' Series ')[1].split(')')[0] if seriesNum[0] == '#': seriesNum = seriesNum[1:] except IndexError: seriesNum = "" if not seriesNum and '#' in series: words = series.rsplit('#', 1) series = words[0].strip() seriesNum = words[1].strip() if not seriesNum and ' ' in series: words = series.rsplit(' ', 1) # has to be unicode for isnumeric() if (u"%s" % words[1]).isnumeric(): series = words[0] seriesNum = words[1] try: bookdate = item['volumeInfo']['publishedDate'] except KeyError: bookdate = '0000-00-00' try: bookimg = item['volumeInfo']['imageLinks']['thumbnail'] except KeyError: bookimg = 'images/nocover.png' try: bookrate = item['volumeInfo']['averageRating'] except KeyError: bookrate = 0 try: bookpages = item['volumeInfo']['pageCount'] except KeyError: bookpages = 0 try: bookgenre = item['volumeInfo']['categories'][0] except KeyError: bookgenre = "" try: bookdesc = item['volumeInfo']['description'] except KeyError: bookdesc = "" rejected = check_status = False bookname = item['volumeInfo']['title'] if not bookname: logger.debug('Rejecting bookid %s for %s, no bookname' % (bookid, authorname)) removedResults += 1 rejected = True else: bookname = replace_all(unaccented(bookname), {':': '.', '"': '', '\'': ''}).strip() booklink = item['volumeInfo']['canonicalVolumeLink'] bookrate = float(bookrate) bookid = item['id'] # GoodReads sometimes has multiple bookids for the same book (same author/title, different # editions) and sometimes uses the same bookid if the book is the same but the title is # slightly different. Not sure if googlebooks does too, but we only want one... existing_book = myDB.match('SELECT Status,Manual,BookAdded FROM books WHERE BookID=?', (bookid,)) if existing_book: book_status = existing_book['Status'] locked = existing_book['Manual'] added = existing_book['BookAdded'] if locked is None: locked = False elif locked.isdigit(): locked = bool(int(locked)) else: book_status = bookstatus # new_book status, or new_author status added = today() locked = False if not rejected and re.match('[^\w-]', bookname): # remove books with bad characters in title logger.debug("[%s] removed book for bad characters" % bookname) removedResults += 1 rejected = True if not rejected and lazylibrarian.CONFIG['NO_FUTURE']: # googlebooks sometimes gives yyyy, sometimes yyyy-mm, sometimes yyyy-mm-dd if bookdate > today()[:len(bookdate)]: logger.debug('Rejecting %s, future publication date %s' % (bookname, bookdate)) removedResults += 1 rejected = True if not rejected: cmd = 'SELECT BookID FROM books,authors WHERE books.AuthorID = authors.AuthorID' cmd += ' and BookName=? COLLATE NOCASE and AuthorName=? COLLATE NOCASE' match = myDB.match(cmd, (bookname.replace('"', '""'), authorname.replace('"', '""'))) if match: if match['BookID'] != bookid: # we have a different book with this author/title already logger.debug('Rejecting bookid %s for [%s][%s] already got %s' % (match['BookID'], authorname, bookname, bookid)) rejected = True duplicates += 1 if not rejected: cmd = 'SELECT AuthorName,BookName FROM books,authors' cmd += ' WHERE authors.AuthorID = books.AuthorID AND BookID=?' match = myDB.match(cmd, (bookid,)) if match: # we have a book with this bookid already if bookname != match['BookName'] or authorname != match['AuthorName']: logger.debug('Rejecting bookid %s for [%s][%s] already got bookid for [%s][%s]' % (bookid, authorname, bookname, match['AuthorName'], match['BookName'])) else: logger.debug('Rejecting bookid %s for [%s][%s] already got this book in database' % (bookid, authorname, bookname)) check_status = True duplicates += 1 rejected = True if check_status or not rejected: if book_status != "Ignored" and not locked: controlValueDict = {"BookID": bookid} newValueDict = { "AuthorID": authorid, "BookName": bookname, "BookSub": booksub, "BookDesc": bookdesc, "BookIsbn": bookisbn, "BookPub": bookpub, "BookGenre": bookgenre, "BookImg": bookimg, "BookLink": booklink, "BookRate": bookrate, "BookPages": bookpages, "BookDate": bookdate, "BookLang": booklang, "Status": book_status, "AudioStatus": lazylibrarian.CONFIG['NEWAUDIO_STATUS'], "BookAdded": added } resultcount += 1 myDB.upsert("books", newValueDict, controlValueDict) logger.debug("Book found: " + bookname + " " + bookdate) updated = False if 'nocover' in bookimg or 'nophoto' in bookimg: # try to get a cover from librarything workcover = getBookCover(bookid) if workcover: logger.debug('Updated cover for %s to %s' % (bookname, workcover)) controlValueDict = {"BookID": bookid} newValueDict = {"BookImg": workcover} myDB.upsert("books", newValueDict, controlValueDict) updated = True elif bookimg and bookimg.startswith('http'): link, success = cache_img("book", bookid, bookimg, refresh=refresh) if success: controlValueDict = {"BookID": bookid} newValueDict = {"BookImg": link} myDB.upsert("books", newValueDict, controlValueDict) updated = True else: logger.debug('Failed to cache image for %s' % bookimg) seriesdict = {} if lazylibrarian.CONFIG['ADD_SERIES']: # prefer series info from librarything seriesdict = getWorkSeries(bookid) if seriesdict: logger.debug('Updated series: %s [%s]' % (bookid, seriesdict)) updated = True # librarything doesn't have series info. Any in the title? elif series: seriesdict = {cleanName(unaccented(series)): seriesNum} setSeries(seriesdict, bookid) new_status = setStatus(bookid, seriesdict, bookstatus) if not new_status == book_status: book_status = new_status updated = True worklink = getWorkPage(bookid) if worklink: controlValueDict = {"BookID": bookid} newValueDict = {"WorkPage": worklink} myDB.upsert("books", newValueDict, controlValueDict) if not existing_book: logger.debug("[%s] Added book: %s [%s] status %s" % (authorname, bookname, booklang, book_status)) added_count += 1 elif updated: logger.debug("[%s] Updated book: %s [%s] status %s" % (authorname, bookname, booklang, book_status)) updated_count += 1 else: book_ignore_count += 1 except KeyError: pass deleteEmptySeries() logger.debug('[%s] The Google Books API was hit %s time%s to populate book list' % (authorname, api_hits, plural(api_hits))) cmd = 'SELECT BookName, BookLink, BookDate, BookImg from books WHERE AuthorID=?' cmd += ' AND Status != "Ignored" order by BookDate DESC' lastbook = myDB.match(cmd, (authorid,)) if lastbook: # maybe there are no books [remaining] for this author lastbookname = lastbook['BookName'] lastbooklink = lastbook['BookLink'] lastbookdate = lastbook['BookDate'] lastbookimg = lastbook['BookImg'] else: lastbookname = "" lastbooklink = "" lastbookdate = "" lastbookimg = "" controlValueDict = {"AuthorID": authorid} newValueDict = { "Status": entrystatus, "LastBook": lastbookname, "LastLink": lastbooklink, "LastDate": lastbookdate, "LastBookImg": lastbookimg } myDB.upsert("authors", newValueDict, controlValueDict) logger.debug("Found %s total book%s for author" % (total_count, plural(total_count))) logger.debug("Removed %s unwanted language result%s for author" % (ignored, plural(ignored))) logger.debug("Removed %s bad character or no-name result%s for author" % (removedResults, plural(removedResults))) logger.debug("Removed %s duplicate result%s for author" % (duplicates, plural(duplicates))) logger.debug("Found %s book%s by author marked as Ignored" % (book_ignore_count, plural(book_ignore_count))) logger.debug("Imported/Updated %s book%s for author" % (resultcount, plural(resultcount))) myDB.action('insert into stats values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', (authorname.replace('"', '""'), api_hits, gr_lang_hits, lt_lang_hits, gb_lang_change, cache_hits, ignored, removedResults, not_cached, duplicates)) if refresh: logger.info("[%s] Book processing complete: Added %s book%s / Updated %s book%s" % (authorname, added_count, plural(added_count), updated_count, plural(updated_count))) else: logger.info("[%s] Book processing complete: Added %s book%s to the database" % (authorname, added_count, plural(added_count))) except Exception: logger.error('Unhandled exception in GB.get_author_books: %s' % traceback.format_exc())
def update(): if lazylibrarian.CONFIG['INSTALL_TYPE'] == 'win': logmsg('info', 'Windows .exe updating not supported yet.') return False elif lazylibrarian.CONFIG['INSTALL_TYPE'] == 'package': logmsg('info', 'Please use your package manager to update') return False elif lazylibrarian.CONFIG['INSTALL_TYPE'] == 'git': branch = getCurrentGitBranch() _, _ = runGit('stash clear') output, err = runGit('pull origin ' + branch) if not output: logmsg('error', 'Couldn\'t download latest version') return False for line in output.split('\n'): if 'Already up-to-date.' in line: logmsg('info', 'No update available: ' + str(output)) return False elif 'Aborting' in line or 'local changes' in line: logmsg('error', 'Unable to update: ' + str(output)) return False # Update version.txt and timestamp updateVersionFile(lazylibrarian.CONFIG['LATEST_VERSION']) lazylibrarian.CONFIG['GIT_UPDATED'] = str(int(time.time())) return True elif lazylibrarian.CONFIG['INSTALL_TYPE'] == 'source': if 'gitlab' in lazylibrarian.CONFIG['GIT_HOST']: tar_download_url = 'https://%s/%s/%s/-/archive/%s/%s-%s.tar.gz' % ( lazylibrarian.GITLAB_TOKEN, lazylibrarian.CONFIG['GIT_USER'], lazylibrarian.CONFIG['GIT_REPO'], lazylibrarian.CONFIG['GIT_BRANCH'], lazylibrarian.CONFIG['GIT_REPO'], lazylibrarian.CONFIG['GIT_BRANCH']) else: tar_download_url = 'https://%s/%s/%s/tarball/%s' % ( lazylibrarian.CONFIG['GIT_HOST'], lazylibrarian.CONFIG['GIT_USER'], lazylibrarian.CONFIG['GIT_REPO'], lazylibrarian.CONFIG['GIT_BRANCH']) update_dir = os.path.join(lazylibrarian.PROG_DIR, 'update') try: logmsg('info', 'Downloading update from: ' + tar_download_url) headers = {'User-Agent': getUserAgent()} proxies = proxyList() timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30) r = requests.get(tar_download_url, timeout=timeout, headers=headers, proxies=proxies) except requests.exceptions.Timeout: logmsg('error', "Timeout retrieving new version from " + tar_download_url) return False except Exception as e: if hasattr(e, 'reason'): errmsg = e.reason else: errmsg = str(e) logmsg('error', "Unable to retrieve new version from " + tar_download_url + ", can't update: %s" % errmsg) return False download_name = r.url.split('/')[-1] tar_download_path = os.path.join(lazylibrarian.PROG_DIR, download_name) # Save tar to disk with open(tar_download_path, 'wb') as f: f.write(r.content) # Extract the tar to update folder logmsg('info', 'Extracting file ' + tar_download_path) try: with tarfile.open(tar_download_path) as tar: tar.extractall(update_dir) except Exception as e: logger.error('Failed to unpack tarfile %s (%s): %s' % (type(e).__name__, tar_download_path, str(e))) return False # Delete the tar.gz logmsg('info', 'Deleting file ' + tar_download_path) os.remove(tar_download_path) # Find update dir name update_dir_contents = [x for x in os.listdir(update_dir) if os.path.isdir(os.path.join(update_dir, x))] if len(update_dir_contents) != 1: logmsg('error', "Invalid update data, update failed: " + str(update_dir_contents)) return False content_dir = os.path.join(update_dir, update_dir_contents[0]) # walk temp folder and move files to main folder for rootdir, dirnames, filenames in os.walk(content_dir): rootdir = rootdir[len(content_dir) + 1:] for curfile in filenames: old_path = os.path.join(content_dir, rootdir, curfile) new_path = os.path.join(lazylibrarian.PROG_DIR, rootdir, curfile) if os.path.isfile(new_path): os.remove(new_path) os.renames(old_path, new_path) # Update version.txt and timestamp updateVersionFile(lazylibrarian.CONFIG['LATEST_VERSION']) lazylibrarian.CONFIG['GIT_UPDATED'] = str(int(time.time())) return True else: logmsg('error', "Cannot perform update - Install Type not set") return False
def getLatestVersion_FromGit(): # Don't call directly, use getLatestVersion as wrapper. latest_version = 'Unknown' # Can only work for non Windows driven installs, so check install type if lazylibrarian.CONFIG['INSTALL_TYPE'] == 'win': logmsg('debug', 'Error - should not be called under a windows install') latest_version = 'WINDOWS INSTALL' else: # check current branch value of the local git repo as folks may pull from a branch not master branch = lazylibrarian.CONFIG['GIT_BRANCH'] if branch == 'InvalidBranch': logmsg('debug', 'Failed to get a valid branch name from local repo') else: if branch == 'Package': # check packages against master branch = 'master' # Get the latest commit available from git if 'gitlab' in lazylibrarian.CONFIG['GIT_HOST']: url = 'https://%s/api/v4/projects/%s%%2F%s/repository/branches/%s' % ( lazylibrarian.GITLAB_TOKEN, lazylibrarian.CONFIG['GIT_USER'], lazylibrarian.CONFIG['GIT_REPO'], branch) else: url = 'https://api.%s/repos/%s/%s/commits/%s' % ( lazylibrarian.CONFIG['GIT_HOST'], lazylibrarian.CONFIG['GIT_USER'], lazylibrarian.CONFIG['GIT_REPO'], branch) logmsg('debug', 'Retrieving latest version information from git command=[%s]' % url) timestamp = check_int(lazylibrarian.CONFIG['GIT_UPDATED'], 0) age = '' if timestamp: # timestring for 'If-Modified-Since' needs to be english short day/month names and in gmt # we already have english month names stored in MONTHNAMES[] but need capitalising # so use hard coded versions here instead DAYNAMES = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'] MONNAMES = ['', 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'] tm = time.gmtime(timestamp) age = "%s, %02d %s %04d %02d:%02d:%02d GMT" % (DAYNAMES[tm.tm_wday], tm.tm_mday, MONNAMES[tm.tm_mon], tm.tm_year, tm.tm_hour, tm.tm_min, tm.tm_sec) try: headers = {'User-Agent': getUserAgent()} if age: logmsg('debug', 'Checking if modified since %s' % age) headers.update({'If-Modified-Since': age}) proxies = proxyList() timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30) r = requests.get(url, timeout=timeout, headers=headers, proxies=proxies) if str(r.status_code).startswith('2'): git = r.json() if 'gitlab' in lazylibrarian.CONFIG['GIT_HOST']: latest_version = git['commit']['id'] else: latest_version = git['sha'] logmsg('debug', 'Branch [%s] Latest Version has been set to [%s]' % ( branch, latest_version)) elif str(r.status_code) == '304': latest_version = lazylibrarian.CONFIG['CURRENT_VERSION'] logmsg('debug', 'Not modified, currently on Latest Version') else: logmsg('warn', 'Could not get the latest commit from git') logmsg('debug', 'git latest version returned %s' % r.status_code) latest_version = 'Not_Available_From_Git' except Exception as e: logmsg('warn', 'Could not get the latest commit from git') logmsg('debug', 'git %s for %s: %s' % (type(e).__name__, url, str(e))) latest_version = 'Not_Available_From_Git' return latest_version
def _sendBoxcar(self, msg, title, token, subscribe=False): """ Sends a boxcar notification to the address provided msg: The message to send (unicode) title: The title of the message email: The email address to send the message to (or to subscribe with) subscribe: If true then instead of sending a message this function will send a subscription notification (optional, default is False) returns: True if the message succeeded, False otherwise """ logger.debug('Boxcar notification: %s' % msg) logger.debug('Title: %s' % title) logger.debug('Token: %s' % token) logger.debug('Subscribe: %s' % subscribe) # build up the URL and parameters msg = msg.strip() if PY2: msg = msg.encode(lazylibrarian.SYS_ENCODING) title = title.encode(lazylibrarian.SYS_ENCODING) curUrl = API_URL # if this is a subscription notification then act accordingly if subscribe: data = {'email': token} curUrl += "/subscribe" # for normal requests we need all these parameters else: data = { 'user_credentials': token, 'notification[title]': title, 'notification[long_message]': msg, 'notification[sound]': "done" } proxies = proxyList() # send the request to boxcar try: timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30) r = requests.get(curUrl, params=data, timeout=timeout, proxies=proxies) status = str(r.status_code) if status.startswith('2'): logger.debug("BOXCAR: Notification successful.") return True # HTTP status 404 if the provided email address isn't a Boxcar user. if status == '404': logger.warn("BOXCAR: Username is wrong/not a boxcar email. Boxcar will send an email to it") # For HTTP status code 401's, it is because you are passing in either an # invalid token, or the user has not added your service. elif status == '401': # If the user has already added your service, we'll return an HTTP status code of 401. if subscribe: logger.error("BOXCAR: Already subscribed to service") # HTTP status 401 if the user doesn't have the service added else: subscribeNote = self._sendBoxcar(msg, title, token, True) if subscribeNote: logger.debug("BOXCAR: Subscription sent.") return True else: logger.error("BOXCAR: Subscription could not be sent.") # If you receive an HTTP status code of 400, it is because you failed to send the proper parameters elif status == '400': logger.error("BOXCAR: Wrong data send to boxcar.") else: logger.error("BOXCAR: Got error code %s" % status) return False except Exception as e: # if we get an error back that doesn't have an error code then who knows what's really happening # URLError doesn't return a code, just a reason. HTTPError gives a code if not hasattr(e, 'code'): logger.error("BOXCAR: Boxcar notification failed: %s" % str(e)) else: # noinspection PyUnresolvedReferences logger.error("BOXCAR: Boxcar notification failed. Error code: %s" % str(e.code)) return False
def getCommitDifferenceFromGit(): # See how many commits behind we are # Takes current latest version value and tries to diff it with the latest version in the current branch. commit_list = '' commits = -1 if lazylibrarian.CONFIG['LATEST_VERSION'] == 'Not_Available_From_Git': commits = 0 # don't report a commit diff as we don't know anything commit_list = 'Unable to get latest version from %s' % lazylibrarian.CONFIG[ 'GIT_HOST'] logmsg('info', commit_list) elif lazylibrarian.CONFIG['CURRENT_VERSION'] and commits != 0: if 'gitlab' in lazylibrarian.CONFIG['GIT_HOST']: url = 'https://%s/api/v4/projects/%s%%2F%s/repository/compare?from=%s&to=%s' % ( lazylibrarian.CONFIG['GIT_HOST'], lazylibrarian.CONFIG['GIT_USER'], lazylibrarian.CONFIG['GIT_REPO'], lazylibrarian.CONFIG['CURRENT_VERSION'], lazylibrarian.CONFIG['LATEST_VERSION']) else: url = 'https://api.%s/repos/%s/%s/compare/%s...%s' % ( lazylibrarian.CONFIG['GIT_HOST'], lazylibrarian.CONFIG['GIT_USER'], lazylibrarian.CONFIG['GIT_REPO'], lazylibrarian.CONFIG['CURRENT_VERSION'], lazylibrarian.CONFIG['LATEST_VERSION']) logmsg('debug', 'Check for differences between local & repo by [%s]' % url) try: headers = {'User-Agent': getUserAgent()} if 'gitlab' in lazylibrarian.CONFIG['GIT_HOST']: headers['Private-Token'] = '_G8Shnw1-xEWsXPi8fB_' proxies = proxyList() timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30) r = requests.get(url, timeout=timeout, headers=headers, proxies=proxies) git = r.json() # for gitlab, commits = len(git['commits']) no status/ahead/behind if 'gitlab' in lazylibrarian.CONFIG['GIT_HOST']: if 'commits' in git: commits = len(git['commits']) msg = 'Git: Total Commits [%s]' % commits logmsg('debug', msg) else: logmsg( 'warn', 'Could not get difference status from git: %s' % str(git)) if commits > 0: for item in git['commits']: commit_list = "%s\n%s" % (item['title'], commit_list) else: if 'total_commits' in git: commits = int(git['total_commits']) msg = 'Git: Status [%s] - Ahead [%s] - Behind [%s] - Total Commits [%s]' % ( git['status'], git['ahead_by'], git['behind_by'], git['total_commits']) logmsg('debug', msg) else: logmsg( 'warn', 'Could not get difference status from git: %s' % str(git)) if commits > 0: for item in git['commits']: commit_list = "%s\n%s" % (item['commit']['message'], commit_list) except Exception as e: logmsg( 'warn', 'Could not get difference status from git: %s' % type(e).__name__) if commits > 1: logmsg('info', 'New version is available. You are %s commits behind' % commits) elif commits == 1: logmsg('info', 'New version is available. You are one commit behind') elif commits == 0: logmsg('info', 'Lazylibrarian is up to date') else: logmsg( 'info', 'Unknown version of lazylibrarian. Run the updater to identify your version' ) return commits, commit_list
def torrentAction(method, arguments): global session_id, host_url username = lazylibrarian.CONFIG['TRANSMISSION_USER'] password = lazylibrarian.CONFIG['TRANSMISSION_PASS'] if host_url: if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms: logger.debug("Using existing host %s" % host_url) else: host = lazylibrarian.CONFIG['TRANSMISSION_HOST'] port = check_int(lazylibrarian.CONFIG['TRANSMISSION_PORT'], 0) if not host or not port: logger.error( 'Invalid transmission host or port, check your config') return False if not host.startswith("http://") and not host.startswith("https://"): host = 'http://' + host if host.endswith('/'): host = host[:-1] # Fix the URL. We assume that the user does not point to the RPC endpoint, # so add it if it is missing. parts = list(urlparse(host)) if parts[0] not in ("http", "https"): parts[0] = "http" if ':' not in parts[1]: parts[1] += ":%s" % port if not parts[2].endswith("/rpc"): parts[2] += "/transmission/rpc" host_url = urlunparse(parts) auth = (username, password) if username and password else None proxies = proxyList() timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30) # Retrieve session id if session_id: if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms: logger.debug('Using existing session_id %s' % session_id) else: response = requests.get(host_url, auth=auth, proxies=proxies, timeout=timeout) if response is None: logger.error("Error getting Transmission session ID") return # Parse response if response.status_code == 401: if auth: logger.error( "Username and/or password not accepted by Transmission") else: logger.error("Transmission authorization required") return elif response.status_code == 409: session_id = response.headers['x-transmission-session-id'] if not session_id: logger.error("Expected a Session ID from Transmission, got %s" % response.status_code) return # Prepare next request headers = {'x-transmission-session-id': session_id} data = {'method': method, 'arguments': arguments} try: response = requests.post(host_url, json=data, headers=headers, proxies=proxies, auth=auth, timeout=timeout) if response.status_code == 409: session_id = response.headers['x-transmission-session-id'] logger.debug("Retrying with new session_id %s" % session_id) headers = {'x-transmission-session-id': session_id} response = requests.post(host_url, json=data, headers=headers, proxies=proxies, auth=auth, timeout=timeout) if not str(response.status_code).startswith('2'): logger.error("Expected a response from Transmission, got %s" % response.status_code) return try: res = response.json() except ValueError: logger.error("Expected json, Transmission returned %s" % response.text) res = '' return res except Exception as e: logger.error('Transmission %s: %s' % (type(e).__name__, str(e))) return
def get_author_books(self, authorid=None, authorname=None, bookstatus="Skipped", entrystatus='Active', refresh=False): try: api_hits = 0 gr_lang_hits = 0 lt_lang_hits = 0 gb_lang_change = 0 cache_hits = 0 not_cached = 0 URL = 'https://www.goodreads.com/author/list/' + authorid + '.xml?' + urllib.urlencode(self.params) # Artist is loading myDB = database.DBConnection() controlValueDict = {"AuthorID": authorid} newValueDict = {"Status": "Loading"} myDB.upsert("authors", newValueDict, controlValueDict) try: rootxml, in_cache = get_xml_request(URL, useCache=not refresh) except Exception as e: logger.error("%s fetching author books: %s" % (type(e).__name__, str(e))) return if rootxml is None: logger.debug("Error requesting author books") return if not in_cache: api_hits += 1 resultxml = rootxml.getiterator('book') valid_langs = getList(lazylibrarian.CONFIG['IMP_PREFLANG']) resultsCount = 0 removedResults = 0 duplicates = 0 ignored = 0 added_count = 0 updated_count = 0 book_ignore_count = 0 total_count = 0 if resultxml is None: logger.warn('[%s] No books found for author with ID: %s' % (authorname, authorid)) else: logger.debug("[%s] Now processing books with GoodReads API" % authorname) logger.debug("url " + URL) authorNameResult = rootxml.find('./author/name').text # Goodreads sometimes puts extra whitespace in the author names! authorNameResult = ' '.join(authorNameResult.split()) logger.debug("GoodReads author name [%s]" % authorNameResult) loopCount = 1 while resultxml: for book in resultxml: total_count += 1 if book.find('publication_year').text is None: pubyear = "0000" else: pubyear = book.find('publication_year').text try: bookimg = book.find('image_url').text if 'nocover' in bookimg: bookimg = 'images/nocover.png' except (KeyError, AttributeError): bookimg = 'images/nocover.png' bookLanguage = "Unknown" find_field = "id" isbn = "" isbnhead = "" if "All" not in valid_langs: # do we care about language if book.find('isbn').text: find_field = "isbn" isbn = book.find('isbn').text isbnhead = isbn[0:3] else: if book.find('isbn13').text: find_field = "isbn13" isbn = book.find('isbn13').text isbnhead = isbn[3:6] # Try to use shortcut of ISBN identifier codes described here... # http://en.wikipedia.org/wiki/List_of_ISBN_identifier_groups if isbnhead: if find_field == "isbn13" and isbn.startswith('979'): for item in lazylibrarian.isbn_979_dict: if isbnhead.startswith(item): bookLanguage = lazylibrarian.isbn_979_dict[item] break if bookLanguage != "Unknown": logger.debug("ISBN979 returned %s for %s" % (bookLanguage, isbnhead)) elif (find_field == "isbn") or (find_field == "isbn13" and isbn.startswith('978')): for item in lazylibrarian.isbn_978_dict: if isbnhead.startswith(item): bookLanguage = lazylibrarian.isbn_978_dict[item] break if bookLanguage != "Unknown": logger.debug("ISBN978 returned %s for %s" % (bookLanguage, isbnhead)) if bookLanguage == "Unknown" and isbnhead: # Nothing in the isbn dictionary, try any cached results match = myDB.match('SELECT lang FROM languages where isbn=?', (isbnhead,)) if match: bookLanguage = match['lang'] cache_hits += 1 logger.debug("Found cached language [%s] for %s [%s]" % (bookLanguage, find_field, isbnhead)) else: # no match in cache, try searching librarything for a language code using the isbn # if no language found, librarything return value is "invalid" or "unknown" # returns plain text, not xml BOOK_URL = 'http://www.librarything.com/api/thingLang.php?isbn=' + isbn proxies = proxyList() try: librarything_wait() timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30) r = requests.get(BOOK_URL, timeout=timeout, proxies=proxies) resp = r.text lt_lang_hits += 1 logger.debug("LibraryThing reports language [%s] for %s" % (resp, isbnhead)) if 'invalid' in resp or 'Unknown' in resp: bookLanguage = "Unknown" else: bookLanguage = resp # found a language code myDB.action('insert into languages values (?, ?)', (isbnhead, bookLanguage)) logger.debug("LT language %s: %s" % (isbnhead, bookLanguage)) except Exception as e: logger.error("%s finding LT language result for [%s], %s" % (type(e).__name__, isbn, str(e))) if bookLanguage == "Unknown": # still no earlier match, we'll have to search the goodreads api try: if book.find(find_field).text: BOOK_URL = 'https://www.goodreads.com/book/show?id=' + \ book.find(find_field).text + '&' + urllib.urlencode(self.params) logger.debug("Book URL: " + BOOK_URL) time_now = int(time.time()) if time_now <= lazylibrarian.LAST_GOODREADS: time.sleep(1) bookLanguage = "" try: BOOK_rootxml, in_cache = get_xml_request(BOOK_URL) if BOOK_rootxml is None: logger.debug('Error requesting book language code') else: if not in_cache: # only update last_goodreads if the result wasn't found in the cache lazylibrarian.LAST_GOODREADS = time_now try: bookLanguage = BOOK_rootxml.find('./book/language_code').text except Exception as e: logger.debug("%s finding language_code in book xml: %s" % (type(e).__name__, str(e))) except Exception as e: logger.debug("%s getting book xml: %s" % (type(e).__name__, str(e))) if not in_cache: gr_lang_hits += 1 if not bookLanguage: bookLanguage = "Unknown" # At this point, give up? # WhatWork on author/title doesn't give us a language. # It might give us the "original language" of the book (but not always) # and our copy might not be in the original language anyway # eg "The Girl With the Dragon Tattoo" original language Swedish # If we have an isbn, try WhatISBN to get alternatives # in case any of them give us a language, but it seems if thinglang doesn't # have a language for the first isbn code, it doesn't for any of the # alternatives either # Goodreads search results don't include the language. Although sometimes # it's in the html page, it's not in the xml results if isbnhead != "": # if GR didn't give an isbn we can't cache it # just use language for this book myDB.action('insert into languages values (?, ?)', (isbnhead, bookLanguage)) logger.debug("GoodReads reports language [%s] for %s" % (bookLanguage, isbnhead)) else: not_cached += 1 logger.debug("GR language: " + bookLanguage) else: logger.debug("No %s provided for [%s]" % (find_field, book.find('title').text)) # continue except Exception as e: logger.debug("Goodreads language search failed: %s %s" % (type(e).__name__, str(e))) if bookLanguage not in valid_langs: logger.debug('Skipped %s with language %s' % (book.find('title').text, bookLanguage)) ignored += 1 continue rejected = False check_status = False bookname = book.find('title').text bookid = book.find('id').text bookdesc = book.find('description').text bookisbn = book.find('isbn').text bookpub = book.find('publisher').text booklink = book.find('link').text bookrate = float(book.find('average_rating').text) bookpages = book.find('num_pages').text booksub = series = seriesNum = '' if not bookname: logger.debug('Rejecting bookid %s for %s, no bookname' % (bookid, authorNameResult)) removedResults += 1 rejected = True else: bookname = unaccented(bookname) bookname, booksub = split_title(authorNameResult, bookname) dic = {':': '.', '"': ''} # do we need to strip apostrophes , '\'': ''} bookname = replace_all(bookname, dic) bookname = bookname.strip() # strip whitespace booksub = replace_all(booksub, dic) booksub = booksub.strip() # strip whitespace if booksub: series, seriesNum = bookSeries(booksub) else: series, seriesNum = bookSeries(bookname) if not rejected and re.match('[^\w-]', bookname): # reject books with bad characters in title logger.debug("removed result [" + bookname + "] for bad characters") removedResults += 1 rejected = True if not rejected and lazylibrarian.CONFIG['NO_FUTURE']: if pubyear > today()[:4]: logger.debug('Rejecting %s, future publication date %s' % (bookname, pubyear)) removedResults += 1 rejected = True if not rejected: anames = book.find('authors') amatch = False alist = '' for aname in anames: aid = aname.find('id').text anm = aname.find('name').text if alist: alist += ', ' alist += anm if aid == authorid: role = aname.find('role').text if role is None or 'author' in role.lower() \ or 'pseudonym' in role.lower() or 'pen name' in role.lower(): amatch = True else: logger.debug('Ignoring %s for %s, role is %s' % (bookname, authorNameResult, role)) # else: # multiple authors or wrong author # logger.debug('Ignoring %s for %s, authorid %s' % # (bookname, authorNameResult, aid)) if not amatch: logger.debug('Ignoring %s for %s, wrong author? (got %s)' % (bookname, authorNameResult, alist)) removedResults += 1 rejected = not amatch if not rejected: cmd = 'SELECT BookID FROM books,authors WHERE books.AuthorID = authors.AuthorID' cmd += ' and BookName=? COLLATE NOCASE and AuthorName=? COLLATE NOCASE' match = myDB.match(cmd, (bookname, authorNameResult.replace('"', '""'))) if match: if match['BookID'] != bookid: # we have a different book with this author/title already logger.debug('Rejecting bookid %s for [%s][%s] already got %s' % (match['BookID'], authorNameResult, bookname, bookid)) duplicates += 1 rejected = True if not rejected: cmd = 'SELECT AuthorName,BookName FROM books,authors' cmd += ' WHERE authors.AuthorID = books.AuthorID AND BookID=?' match = myDB.match(cmd, (bookid,)) if match: # we have a book with this bookid already if match['BookName'] == 'Untitled' and bookname != 'Untitled': # goodreads has updated the name logger.debug('Renaming bookid %s for [%s][%s] to [%s]' % (bookid, authorNameResult, match['BookName'], bookname)) check_status = True elif bookname != match['BookName'] or authorNameResult != match['AuthorName']: logger.debug('Rejecting bookid %s for [%s][%s] already got bookid for [%s][%s]' % (bookid, authorNameResult, bookname, match['AuthorName'], match['BookName'])) check_status = False else: logger.debug('Rejecting bookid %s for [%s][%s] already got this book in database' % (bookid, authorNameResult, bookname)) check_status = True duplicates += 1 rejected = True if check_status or not rejected: updated = False existing = myDB.match('SELECT Status,Manual,BookAdded,BookName FROM books WHERE BookID=?', (bookid,)) if existing: book_status = existing['Status'] locked = existing['Manual'] added = existing['BookAdded'] if bookname != existing['BookName']: updated = True if locked is None: locked = False elif locked.isdigit(): locked = bool(int(locked)) else: book_status = bookstatus # new_book status, or new_author status added = today() locked = False # Is the book already in the database? # Leave alone if locked or status "ignore" # don't update 'bookadded' if already there if not locked and book_status != "Ignored": controlValueDict = {"BookID": bookid} newValueDict = { "AuthorID": authorid, "BookName": bookname, "BookSub": booksub, "BookDesc": bookdesc, "BookIsbn": bookisbn, "BookPub": bookpub, "BookGenre": "", "BookImg": bookimg, "BookLink": booklink, "BookRate": bookrate, "BookPages": bookpages, "BookDate": pubyear, "BookLang": bookLanguage, "Status": book_status, "AudioStatus": lazylibrarian.CONFIG['NEWAUDIO_STATUS'], "BookAdded": added } resultsCount += 1 myDB.upsert("books", newValueDict, controlValueDict) logger.debug("Book found: " + book.find('title').text + " " + pubyear) if 'nocover' in bookimg or 'nophoto' in bookimg: # try to get a cover from librarything workcover = getBookCover(bookid) if workcover: logger.debug('Updated cover for %s to %s' % (bookname, workcover)) controlValueDict = {"BookID": bookid} newValueDict = {"BookImg": workcover} myDB.upsert("books", newValueDict, controlValueDict) updated = True elif bookimg and bookimg.startswith('http'): link, success = cache_img("book", bookid, bookimg, refresh=refresh) if success: controlValueDict = {"BookID": bookid} newValueDict = {"BookImg": link} myDB.upsert("books", newValueDict, controlValueDict) updated = True else: logger.debug('Failed to cache image for %s' % bookimg) seriesdict = {} if lazylibrarian.CONFIG['ADD_SERIES']: # prefer series info from librarything seriesdict = getWorkSeries(bookid) if seriesdict: logger.debug('Updated series: %s [%s]' % (bookid, seriesdict)) updated = True else: if series: seriesdict = {cleanName(unaccented(series)): seriesNum} setSeries(seriesdict, bookid) new_status = setStatus(bookid, seriesdict, bookstatus) if not new_status == book_status: book_status = new_status updated = True worklink = getWorkPage(bookid) if worklink: controlValueDict = {"BookID": bookid} newValueDict = {"WorkPage": worklink} myDB.upsert("books", newValueDict, controlValueDict) if not existing: logger.debug("[%s] Added book: %s [%s] status %s" % (authorname, bookname, bookLanguage, book_status)) added_count += 1 elif updated: logger.debug("[%s] Updated book: %s [%s] status %s" % (authorname, bookname, bookLanguage, book_status)) updated_count += 1 else: book_ignore_count += 1 loopCount += 1 if 0 < lazylibrarian.CONFIG['MAX_BOOKPAGES'] < loopCount: resultxml = None logger.warn('Maximum books page search reached, still more results available') else: URL = 'https://www.goodreads.com/author/list/' + authorid + '.xml?' + \ urllib.urlencode(self.params) + '&page=' + str(loopCount) resultxml = None try: rootxml, in_cache = get_xml_request(URL, useCache=not refresh) if rootxml is None: logger.debug('Error requesting next page of results') else: resultxml = rootxml.getiterator('book') if not in_cache: api_hits += 1 except Exception as e: resultxml = None logger.error("%s finding next page of results: %s" % (type(e).__name__, str(e))) if resultxml: if all(False for _ in resultxml): # returns True if iterator is empty resultxml = None deleteEmptySeries() cmd = 'SELECT BookName, BookLink, BookDate, BookImg from books WHERE AuthorID=?' cmd += ' AND Status != "Ignored" order by BookDate DESC' lastbook = myDB.match(cmd, (authorid,)) if lastbook: lastbookname = lastbook['BookName'] lastbooklink = lastbook['BookLink'] lastbookdate = lastbook['BookDate'] lastbookimg = lastbook['BookImg'] else: lastbookname = "" lastbooklink = "" lastbookdate = "" lastbookimg = "" controlValueDict = {"AuthorID": authorid} newValueDict = { "Status": entrystatus, "LastBook": lastbookname, "LastLink": lastbooklink, "LastDate": lastbookdate, "LastBookImg": lastbookimg } myDB.upsert("authors", newValueDict, controlValueDict) # This is here because GoodReads sometimes has several entries with the same BookID! modified_count = added_count + updated_count logger.debug("Found %s result%s" % (total_count, plural(total_count))) logger.debug("Removed %s unwanted language result%s" % (ignored, plural(ignored))) logger.debug( "Removed %s incorrect/incomplete result%s" % (removedResults, plural(removedResults))) logger.debug("Removed %s duplicate result%s" % (duplicates, plural(duplicates))) logger.debug("Found %s book%s by author marked as Ignored" % (book_ignore_count, plural(book_ignore_count))) logger.debug("Imported/Updated %s book%s" % (modified_count, plural(modified_count))) myDB.action('insert into stats values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', (authorname.replace('"', '""'), api_hits, gr_lang_hits, lt_lang_hits, gb_lang_change, cache_hits, ignored, removedResults, not_cached, duplicates)) if refresh: logger.info("[%s] Book processing complete: Added %s book%s / Updated %s book%s" % (authorname, added_count, plural(added_count), updated_count, plural(updated_count))) else: logger.info("[%s] Book processing complete: Added %s book%s to the database" % (authorname, added_count, plural(added_count))) except Exception: logger.error('Unhandled exception in GR.get_author_books: %s' % traceback.format_exc())
def SABnzbd(title=None, nzburl=None, remove_data=False): if nzburl in ['delete', 'delhistory'] and title == 'unknown': logger.debug('%s function unavailable in this version of sabnzbd, no nzo_ids' % nzburl) return False hostname = lazylibrarian.CONFIG['SAB_HOST'] port = check_int(lazylibrarian.CONFIG['SAB_PORT'], 0) if not hostname or not port: logger.error('Invalid sabnzbd host or port, check your config') return False if hostname.endswith('/'): hostname = hostname[:-1] if not hostname.startswith("http://") and not hostname.startswith("https://"): hostname = 'http://' + hostname HOST = "%s:%s" % (hostname, port) if lazylibrarian.CONFIG['SAB_SUBDIR']: HOST = HOST + "/" + lazylibrarian.CONFIG['SAB_SUBDIR'] params = {} if nzburl == 'auth' or nzburl == 'get_cats': # connection test, check auth mode or get_cats params['mode'] = nzburl params['output'] = 'json' if lazylibrarian.CONFIG['SAB_API']: params['apikey'] = lazylibrarian.CONFIG['SAB_API'] title = 'LL.(%s)' % nzburl elif nzburl == 'queue': params['mode'] = 'queue' params['output'] = 'json' if lazylibrarian.CONFIG['SAB_USER']: params['ma_username'] = lazylibrarian.CONFIG['SAB_USER'] if lazylibrarian.CONFIG['SAB_PASS']: params['ma_password'] = lazylibrarian.CONFIG['SAB_PASS'] if lazylibrarian.CONFIG['SAB_API']: params['apikey'] = lazylibrarian.CONFIG['SAB_API'] title = 'LL.(Queue)' elif nzburl == 'history': params['mode'] = 'history' params['output'] = 'json' if lazylibrarian.CONFIG['SAB_USER']: params['ma_username'] = lazylibrarian.CONFIG['SAB_USER'] if lazylibrarian.CONFIG['SAB_PASS']: params['ma_password'] = lazylibrarian.CONFIG['SAB_PASS'] if lazylibrarian.CONFIG['SAB_API']: params['apikey'] = lazylibrarian.CONFIG['SAB_API'] title = 'LL.(History)' elif nzburl == 'delete': # only deletes tasks if still in the queue, ie NOT completed tasks params['mode'] = 'queue' params['output'] = 'json' params['name'] = nzburl params['value'] = title if lazylibrarian.CONFIG['SAB_USER']: params['ma_username'] = lazylibrarian.CONFIG['SAB_USER'] if lazylibrarian.CONFIG['SAB_PASS']: params['ma_password'] = lazylibrarian.CONFIG['SAB_PASS'] if lazylibrarian.CONFIG['SAB_API']: params['apikey'] = lazylibrarian.CONFIG['SAB_API'] if remove_data: params['del_files'] = 1 title = 'LL.(Delete) ' + title elif nzburl == 'delhistory': params['mode'] = 'history' params['output'] = 'json' params['name'] = 'delete' params['value'] = title if lazylibrarian.CONFIG['SAB_USER']: params['ma_username'] = lazylibrarian.CONFIG['SAB_USER'] if lazylibrarian.CONFIG['SAB_PASS']: params['ma_password'] = lazylibrarian.CONFIG['SAB_PASS'] if lazylibrarian.CONFIG['SAB_API']: params['apikey'] = lazylibrarian.CONFIG['SAB_API'] if remove_data: params['del_files'] = 1 title = 'LL.(DelHistory) ' + title else: params['mode'] = 'addurl' params['output'] = 'json' if nzburl: params['name'] = nzburl if title: params['nzbname'] = title if lazylibrarian.CONFIG['SAB_USER']: params['ma_username'] = lazylibrarian.CONFIG['SAB_USER'] if lazylibrarian.CONFIG['SAB_PASS']: params['ma_password'] = lazylibrarian.CONFIG['SAB_PASS'] if lazylibrarian.CONFIG['SAB_API']: params['apikey'] = lazylibrarian.CONFIG['SAB_API'] if lazylibrarian.CONFIG['SAB_CAT']: params['cat'] = lazylibrarian.CONFIG['SAB_CAT'] if lazylibrarian.CONFIG['USENET_RETENTION']: params["maxage"] = lazylibrarian.CONFIG['USENET_RETENTION'] # FUTURE-CODE # if lazylibrarian.SAB_PRIO: # params["priority"] = lazylibrarian.SAB_PRIO # if lazylibrarian.SAB_PP: # params["script"] = lazylibrarian.SAB_SCRIPT URL = HOST + "/api?" + urlencode(params) # to debug because of api if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms: logger.debug('Request url for <a href="%s">SABnzbd</a>' % URL) proxies = proxyList() try: timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30) r = requests.get(URL, timeout=timeout, proxies=proxies) result = r.json() except requests.exceptions.Timeout: logger.error("Timeout connecting to SAB with URL: %s" % URL) return False except Exception as e: if hasattr(e, 'reason'): errmsg = e.reason elif hasattr(e, 'strerror'): errmsg = e.strerror else: errmsg = str(e) logger.error("Unable to connect to SAB with URL: %s, %s" % (URL, errmsg)) return False if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms: logger.debug("Result text from SAB: " + str(result)) if title: title = unaccented_str(title) if title.startswith('LL.('): return result if result['status'] is True: logger.info("%s sent to SAB successfully." % title) # sab versions earlier than 0.8.0 don't return nzo_ids if 'nzo_ids' in result: if result['nzo_ids']: # check its not empty return result['nzo_ids'][0] return 'unknown' elif result['status'] is False: logger.error("SAB returned Error: %s" % result['error']) return False else: logger.error("Unknown error: " + str(result)) return False
def fetchURL(URL, headers=None, retry=True, raw=None): """ Return the result of fetching a URL and True if success Otherwise return error message and False Return data as raw/bytes in python2 or if raw == True On python3 default to unicode, need to set raw=True for images/data Allow one retry on timeout by default""" if 'googleapis' in URL: lazylibrarian.GB_CALLS += 1 for entry in lazylibrarian.PROVIDER_BLOCKLIST: if entry["name"] == 'googleapis': if int(time.time()) < int(entry['resume']): return "Blocked", False else: lazylibrarian.PROVIDER_BLOCKLIST.remove(entry) lazylibrarian.GB_CALLS = 0 if raw is None: if PY2: raw = True else: raw = False if headers is None: # some sites insist on having a user-agent, default is to add one # if you don't want any headers, send headers=[] headers = {'User-Agent': getUserAgent()} proxies = proxyList() try: # jackett query all indexers needs a longer timeout # /torznab/all/api?q= or v2.0/indexers/all/results/torznab/api?q= if '/torznab/' in URL and ('/all/' in URL or '/aggregate/' in URL): timeout = check_int(lazylibrarian.CONFIG['HTTP_EXT_TIMEOUT'], 90) else: timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30) r = requests.get(URL, headers=headers, timeout=timeout, proxies=proxies) if str(r.status_code).startswith('2'): # (200 OK etc) if raw: return r.content, True try: result = r.content.decode('utf-8') except UnicodeDecodeError: result = r.content.decode('latin-1') return result, True elif r.status_code == 403 and 'googleapis' in URL: msg = makeUnicode(r.content) logger.debug(msg) # noinspection PyBroadException try: source = json.loads(msg) msg = source['error']['message'] except Exception: pass if 'Limit Exceeded' in msg: # how long until midnight Pacific Time when google reset the quotas delay = seconds_to_midnight() + 28800 # PT is 8hrs behind UTC if delay > 86400: delay -= 86400 # no roll-over to next day else: # might be forbidden for a different reason where midnight might not matter # eg "Cannot determine user location for geographically restricted operation" delay = 3600 for entry in lazylibrarian.PROVIDER_BLOCKLIST: if entry["name"] == 'googleapis': lazylibrarian.PROVIDER_BLOCKLIST.remove(entry) newentry = {"name": 'googleapis', "resume": int(time.time()) + delay, "reason": msg} lazylibrarian.PROVIDER_BLOCKLIST.append(newentry) # noinspection PyBroadException try: # noinspection PyProtectedMember msg = requests.status_codes._codes[r.status_code][0] except Exception: msg = str(r.content) return "Response status %s: %s" % (r.status_code, msg), False except requests.exceptions.Timeout as e: if not retry: logger.error("fetchURL: Timeout getting response from %s" % URL) return "Timeout %s" % str(e), False logger.debug("fetchURL: retrying - got timeout on %s" % URL) result, success = fetchURL(URL, headers=headers, retry=False, raw=False) return result, success except Exception as e: if hasattr(e, 'reason'): return "Exception %s: Reason: %s" % (type(e).__name__, str(e.reason)), False return "Exception %s: %s" % (type(e).__name__, str(e)), False
def getCommitDifferenceFromGit(): # See how many commits behind we are # Takes current latest version value and tries to diff it with the latest version in the current branch. commit_list = '' commits = -1 if lazylibrarian.CONFIG['LATEST_VERSION'] == 'Not_Available_From_GitHUB': commits = 0 # don't report a commit diff as we don't know anything if lazylibrarian.CONFIG['CURRENT_VERSION'] and commits != 0: logmsg( 'info', '[VersionCheck] - Comparing currently installed version with latest github version' ) url = 'https://api.github.com/repos/%s/LazyLibrarian/compare/%s...%s' % ( lazylibrarian.CONFIG['GIT_USER'], lazylibrarian.CONFIG['CURRENT_VERSION'], lazylibrarian.CONFIG['LATEST_VERSION']) logmsg( 'debug', '(getCommitDifferenceFromGit) - Check for differences between local & repo by [%s]' % url) try: headers = {'User-Agent': USER_AGENT} proxies = proxyList() timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30) r = requests.get(url, timeout=timeout, headers=headers, proxies=proxies) git = r.json() logmsg('debug', 'pull total_commits from json object') if 'total_commits' in str(git): commits = int(git['total_commits']) msg = '(getCommitDifferenceFromGit) - GitHub reports as follows ' msg += 'Status [%s] - Ahead [%s] - Behind [%s] - Total Commits [%s]' % ( git['status'], git['ahead_by'], git['behind_by'], git['total_commits']) logmsg('debug', msg) else: logmsg( 'warn', '(getCommitDifferenceFromGit) Could not get difference status from GitHub: %s' % str(git)) commits = -1 if int(git['total_commits']) > 0: messages = [] for item in git['commits']: messages.insert(0, item['commit']['message']) for line in messages: commit_list = "%s\n%s" % (commit_list, line) except Exception as e: logmsg( 'warn', '(getCommitDifferenceFromGit) %s - could not get difference status from GitHub' % type(e).__name__) if commits > 1: logmsg( 'info', '[VersionCheck] - New version is available. You are %s commits behind' % commits) elif commits == 1: logmsg( 'info', '[VersionCheck] - New version is available. You are one commit behind' ) elif commits == 0: logmsg('info', '[VersionCheck] - lazylibrarian is up to date ') elif commits < 0: msg = '[VersionCheck] - You are running an unknown version of lazylibrarian. ' msg += 'Run the updater to identify your version' logmsg('info', msg) elif lazylibrarian.CONFIG['LATEST_VERSION'] == 'Not_Available_From_GitHUB': commit_list = 'Unable to get latest version from GitHub' logmsg('info', commit_list) else: logmsg( 'info', 'You are running an unknown version of lazylibrarian. Run the updater to identify your version' ) logmsg( 'debug', '(getCommitDifferenceFromGit) - exiting with commit value of [%s]' % commits) # lazylibrarian.CONFIG['COMMITS_BEHIND'] = commits return commits, commit_list
def torrentAction(method, arguments): global session_id, host_url username = lazylibrarian.CONFIG['TRANSMISSION_USER'] password = lazylibrarian.CONFIG['TRANSMISSION_PASS'] if host_url: if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms: logger.debug("Using existing host %s" % host_url) else: host = lazylibrarian.CONFIG['TRANSMISSION_HOST'] port = check_int(lazylibrarian.CONFIG['TRANSMISSION_PORT'], 0) if not host or not port: res = 'Invalid transmission host or port, check your config' logger.error(res) return False, res if not host.startswith("http://") and not host.startswith("https://"): host = 'http://' + host if host.endswith('/'): host = host[:-1] # Fix the URL. We assume that the user does not point to the RPC endpoint, # so add it if it is missing. parts = list(urlparse(host)) if parts[0] not in ("http", "https"): parts[0] = "http" if ':' not in parts[1]: parts[1] += ":%s" % port if not parts[2].endswith("/rpc"): parts[2] += "/transmission/rpc" host_url = urlunparse(parts) auth = (username, password) if username and password else None proxies = proxyList() timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30) # Retrieve session id if session_id: if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms: logger.debug('Using existing session_id %s' % session_id) else: response = requests.get(host_url, auth=auth, proxies=proxies, timeout=timeout) if response is None: res = "Error getting Transmission session ID" logger.error(res) return False, res # Parse response if response.status_code == 401: if auth: res = "Username and/or password not accepted by Transmission" else: res = "Transmission authorization required" logger.error(res) return False, res elif response.status_code == 409: session_id = response.headers['x-transmission-session-id'] if not session_id: res = "Expected a Session ID from Transmission, got %s" % response.status_code logger.error(res) return False, res # Prepare next request headers = {'x-transmission-session-id': session_id} data = {'method': method, 'arguments': arguments} try: response = requests.post(host_url, json=data, headers=headers, proxies=proxies, auth=auth, timeout=timeout) if response.status_code == 409: session_id = response.headers['x-transmission-session-id'] logger.debug("Retrying with new session_id %s" % session_id) headers = {'x-transmission-session-id': session_id} response = requests.post(host_url, json=data, headers=headers, proxies=proxies, auth=auth, timeout=timeout) if not str(response.status_code).startswith('2'): res = "Expected a response from Transmission, got %s" % response.status_code logger.error(res) return False, res try: res = response.json() except ValueError: res = "Expected json, Transmission returned %s" % response.text logger.error(res) return False, res return res, '' except Exception as e: res = 'Transmission %s: %s' % (type(e).__name__, str(e)) logger.error(res) return False, res
def SABnzbd(title=None, nzburl=None, remove_data=False): if (nzburl == 'delete' or nzburl == 'delhistory') and title == 'unknown': logger.debug( 'Delete functions unavailable in this version of sabnzbd, no nzo_ids' ) return False hostname = lazylibrarian.CONFIG['SAB_HOST'] port = check_int(lazylibrarian.CONFIG['SAB_PORT'], 0) if not hostname or not port: logger.error('Invalid sabnzbd host or port, check your config') return False if hostname.endswith('/'): hostname = hostname[:-1] if not hostname.startswith("http"): hostname = 'http://' + hostname HOST = "%s:%s" % (hostname, port) if lazylibrarian.CONFIG['SAB_SUBDIR']: HOST = HOST + "/" + lazylibrarian.CONFIG['SAB_SUBDIR'] params = {} if nzburl == 'auth' or nzburl == 'get_cats': # connection test, check auth mode or get_cats params['mode'] = nzburl params['output'] = 'json' if lazylibrarian.CONFIG['SAB_API']: params['apikey'] = lazylibrarian.CONFIG['SAB_API'] title = 'LL.(%s)' % nzburl elif nzburl == 'delete': # only deletes tasks if still in the queue, ie NOT completed tasks params['mode'] = 'queue' params['output'] = 'json' params['name'] = nzburl params['value'] = title if lazylibrarian.CONFIG['SAB_USER']: params['ma_username'] = lazylibrarian.CONFIG['SAB_USER'] if lazylibrarian.CONFIG['SAB_PASS']: params['ma_password'] = lazylibrarian.CONFIG['SAB_PASS'] if lazylibrarian.CONFIG['SAB_API']: params['apikey'] = lazylibrarian.CONFIG['SAB_API'] if remove_data: params['del_files'] = 1 title = 'LL.(Delete) ' + title elif nzburl == 'delhistory': params['mode'] = 'history' params['output'] = 'json' params['name'] = 'delete' params['value'] = title if lazylibrarian.CONFIG['SAB_USER']: params['ma_username'] = lazylibrarian.CONFIG['SAB_USER'] if lazylibrarian.CONFIG['SAB_PASS']: params['ma_password'] = lazylibrarian.CONFIG['SAB_PASS'] if lazylibrarian.CONFIG['SAB_API']: params['apikey'] = lazylibrarian.CONFIG['SAB_API'] if remove_data: params['del_files'] = 1 title = 'LL.(DelHistory) ' + title else: params['mode'] = 'addurl' params['output'] = 'json' if nzburl: params['name'] = nzburl if title: params['nzbname'] = title if lazylibrarian.CONFIG['SAB_USER']: params['ma_username'] = lazylibrarian.CONFIG['SAB_USER'] if lazylibrarian.CONFIG['SAB_PASS']: params['ma_password'] = lazylibrarian.CONFIG['SAB_PASS'] if lazylibrarian.CONFIG['SAB_API']: params['apikey'] = lazylibrarian.CONFIG['SAB_API'] if lazylibrarian.CONFIG['SAB_CAT']: params['cat'] = lazylibrarian.CONFIG['SAB_CAT'] if lazylibrarian.CONFIG['USENET_RETENTION']: params["maxage"] = lazylibrarian.CONFIG['USENET_RETENTION'] # FUTURE-CODE # if lazylibrarian.SAB_PRIO: # params["priority"] = lazylibrarian.SAB_PRIO # if lazylibrarian.SAB_PP: # params["script"] = lazylibrarian.SAB_SCRIPT URL = HOST + "/api?" + urllib.urlencode(params) # to debug because of api logger.debug('Request url for <a href="%s">SABnzbd</a>' % URL) proxies = proxyList() try: timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30) r = requests.get(URL, timeout=timeout, proxies=proxies) result = r.json() except requests.exceptions.Timeout: logger.error("Timeout connecting to SAB with URL: %s" % URL) return False except Exception as e: if hasattr(e, 'reason'): errmsg = e.reason elif hasattr(e, 'strerror'): errmsg = e.strerror else: errmsg = str(e) logger.error("Unable to connect to SAB with URL: %s, %s" % (URL, errmsg)) return False logger.debug("Result text from SAB: " + str(result)) if title: title = unaccented_str(title) if title.startswith('LL.('): return result if result['status'] is True: logger.info("%s sent to SAB successfully." % title) # sab versions earlier than 0.8.0 don't return nzo_ids if 'nzo_ids' in result: if result['nzo_ids']: # check its not empty return result['nzo_ids'][0] return 'unknown' elif result['status'] is False: logger.error("SAB returned Error: %s" % result['error']) return False else: logger.error("Unknown error: " + str(result)) return False
def DirectDownloadMethod(bookid=None, dl_title=None, dl_url=None, library='eBook'): myDB = database.DBConnection() downloadID = False Source = "DIRECT" logger.debug("Starting Direct Download for [%s]" % dl_title) proxies = proxyList() headers = {'Accept-encoding': 'gzip', 'User-Agent': USER_AGENT} try: r = requests.get(dl_url, headers=headers, timeout=90, proxies=proxies) except requests.exceptions.Timeout: logger.warn('Timeout fetching file from url: %s' % dl_url) return False except Exception as e: if hasattr(e, 'reason'): logger.warn('%s fetching file from url: %s, %s' % (type(e).__name__, dl_url, e.reason)) else: logger.warn('%s fetching file from url: %s, %s' % (type(e).__name__, dl_url, str(e))) return False if not str(r.status_code).startswith('2'): logger.debug("Got a %s response for %s" % (r.status_code, dl_url)) elif len(r.content) < 1000: logger.debug("Only got %s bytes for %s, rejecting" % (len(r.content), dl_title)) else: extn = '' basename = '' if ' ' in dl_title: basename, extn = dl_title.rsplit(' ', 1) # last word is often the extension - but not always... if extn and extn in getList(lazylibrarian.CONFIG['EBOOK_TYPE']): dl_title = '.'.join(dl_title.rsplit(' ', 1)) elif magic: mtype = magic.from_buffer(r.content) if 'EPUB' in mtype: extn = '.epub' elif 'Mobipocket' in mtype: # also true for azw and azw3, does it matter? extn = '.mobi' elif 'PDF' in mtype: extn = '.pdf' else: logger.debug("magic reports %s" % mtype) basename = dl_title else: logger.warn("Don't know the filetype for %s" % dl_title) basename = dl_title logger.debug("File download got %s bytes for %s" % (len(r.content), dl_title)) destdir = os.path.join(lazylibrarian.DIRECTORY('Download'), basename) # destdir = os.path.join(lazylibrarian.DIRECTORY('Download'), '%s LL.(%s)' % (basename, bookid)) if not os.path.isdir(destdir): _ = mymakedirs(destdir) try: hashid = dl_url.split("md5=")[1].split("&")[0] except IndexError: hashid = sha1(encode(dl_url)).hexdigest() destfile = os.path.join(destdir, basename + extn) try: with open(destfile, 'wb') as bookfile: bookfile.write(r.content) setperm(destfile) downloadID = hashid except Exception as e: logger.error("%s writing book to %s, %s" % (type(e).__name__, destfile, e)) if downloadID: logger.debug('File %s has been downloaded from %s' % (dl_title, dl_url)) if library == 'eBook': myDB.action('UPDATE books SET status="Snatched" WHERE BookID=?', (bookid,)) elif library == 'AudioBook': myDB.action('UPDATE books SET audiostatus="Snatched" WHERE BookID=?', (bookid,)) myDB.action('UPDATE wanted SET status="Snatched", Source=?, DownloadID=? WHERE NZBurl=?', (Source, downloadID, dl_url)) return True else: logger.error('Failed to download file @ <a href="%s">%s</a>' % (dl_url, dl_url)) myDB.action('UPDATE wanted SET status="Failed" WHERE NZBurl=?', (dl_url,)) return False
def DirectDownloadMethod(bookid=None, dl_title=None, dl_url=None, library='eBook'): myDB = database.DBConnection() downloadID = False Source = "DIRECT" logger.debug("Starting Direct Download for [%s]" % dl_title) proxies = proxyList() headers = {'Accept-encoding': 'gzip', 'User-Agent': USER_AGENT} try: r = requests.get(dl_url, headers=headers, timeout=90, proxies=proxies) except requests.exceptions.Timeout: logger.warn('Timeout fetching file from url: %s' % dl_url) return False except Exception as e: if hasattr(e, 'reason'): logger.warn('%s fetching file from url: %s, %s' % (type(e).__name__, dl_url, e.reason)) else: logger.warn('%s fetching file from url: %s, %s' % (type(e).__name__, dl_url, str(e))) return False if not str(r.status_code).startswith('2'): logger.debug("Got a %s response for %s" % (r.status_code, dl_url)) elif len(r.content) < 1000: logger.debug("Only got %s bytes for %s, rejecting" % (len(r.content), dl_title)) else: extn = '' basename = '' if ' ' in dl_title: basename, extn = dl_title.rsplit( ' ', 1) # last word is often the extension - but not always... if extn and extn in getList(lazylibrarian.CONFIG['EBOOK_TYPE']): dl_title = '.'.join(dl_title.rsplit(' ', 1)) elif magic: mtype = magic.from_buffer(r.content) if 'EPUB' in mtype: extn = '.epub' elif 'Mobipocket' in mtype: # also true for azw and azw3, does it matter? extn = '.mobi' elif 'PDF' in mtype: extn = '.pdf' else: logger.debug("magic reports %s" % mtype) basename = dl_title else: logger.warn("Don't know the filetype for %s" % dl_title) basename = dl_title logger.debug("File download got %s bytes for %s" % (len(r.content), dl_title)) destdir = os.path.join(lazylibrarian.DIRECTORY('Download'), basename) # destdir = os.path.join(lazylibrarian.DIRECTORY('Download'), '%s LL.(%s)' % (basename, bookid)) if not os.path.isdir(destdir): _ = mymakedirs(destdir) try: hashid = dl_url.split("md5=")[1].split("&")[0] except IndexError: hashid = sha1(encode(dl_url)).hexdigest() destfile = os.path.join(destdir, basename + extn) try: with open(destfile, 'wb') as bookfile: bookfile.write(r.content) setperm(destfile) downloadID = hashid except Exception as e: logger.error("%s writing book to %s, %s" % (type(e).__name__, destfile, e)) if downloadID: logger.debug('File %s has been downloaded from %s' % (dl_title, dl_url)) if library == 'eBook': myDB.action('UPDATE books SET status="Snatched" WHERE BookID=?', (bookid, )) elif library == 'AudioBook': myDB.action( 'UPDATE books SET audiostatus="Snatched" WHERE BookID=?', (bookid, )) myDB.action( 'UPDATE wanted SET status="Snatched", Source=?, DownloadID=? WHERE NZBurl=?', (Source, downloadID, dl_url)) return True else: logger.error('Failed to download file @ <a href="%s">%s</a>' % (dl_url, dl_url)) myDB.action('UPDATE wanted SET status="Failed" WHERE NZBurl=?', (dl_url, )) return False
def TORDownloadMethod(bookid=None, tor_title=None, tor_url=None, library='eBook'): myDB = database.DBConnection() downloadID = False Source = '' full_url = tor_url # keep the url as stored in "wanted" table if tor_url and tor_url.startswith('magnet:?'): torrent = tor_url # allow magnet link to write to blackhole and hash to utorrent/rtorrent elif 'magnet:?' in tor_url: # discard any other parameters and just use the magnet link torrent = 'magnet:?' + tor_url.split('magnet:?')[1] else: # h = HTMLParser() # tor_url = h.unescape(tor_url) # HTMLParser is probably overkill, we only seem to get & # tor_url = tor_url.replace('&', '&') if '&file=' in tor_url: # torznab results need to be re-encoded # had a problem with torznab utf-8 encoded strings not matching # our utf-8 strings because of long/short form differences url, value = tor_url.split('&file=', 1) value = makeUnicode(value) # ensure unicode value = unicodedata.normalize('NFC', value) # normalize to short form value = value.encode('unicode-escape') # then escape the result value = makeUnicode(value) # ensure unicode value = value.replace(' ', '%20') # and encode any spaces tor_url = url + '&file=' + value # strip url back to the .torrent as some sites add extra parameters if not tor_url.endswith('.torrent'): if '.torrent' in tor_url: tor_url = tor_url.split('.torrent')[0] + '.torrent' headers = {'Accept-encoding': 'gzip', 'User-Agent': USER_AGENT} proxies = proxyList() try: r = requests.get(tor_url, headers=headers, timeout=90, proxies=proxies) torrent = r.content except requests.exceptions.Timeout: logger.warn('Timeout fetching file from url: %s' % tor_url) return False except Exception as e: # some jackett providers redirect internally using http 301 to a magnet link # which requests can't handle, so throws an exception if "magnet:?" in str(e): torrent = 'magnet:?' + str(e).split('magnet:?')[1]. strip("'") else: if hasattr(e, 'reason'): logger.warn('%s fetching file from url: %s, %s' % (type(e).__name__, tor_url, e.reason)) else: logger.warn('%s fetching file from url: %s, %s' % (type(e).__name__, tor_url, str(e))) return False if lazylibrarian.CONFIG['TOR_DOWNLOADER_BLACKHOLE']: Source = "BLACKHOLE" logger.debug("Sending %s to blackhole" % tor_title) tor_name = cleanName(tor_title).replace(' ', '_') if tor_url and tor_url.startswith('magnet'): if lazylibrarian.CONFIG['TOR_CONVERT_MAGNET']: hashid = CalcTorrentHash(tor_url) tor_name = 'meta-' + hashid + '.torrent' tor_path = os.path.join(lazylibrarian.CONFIG['TORRENT_DIR'], tor_name) result = magnet2torrent(tor_url, tor_path) if result is not False: logger.debug('Magnet file saved as: %s' % tor_path) downloadID = Source else: tor_name += '.magnet' tor_path = os.path.join(lazylibrarian.CONFIG['TORRENT_DIR'], tor_name) msg = '' try: msg = 'Opening ' with open(tor_path, 'wb') as torrent_file: msg += 'Writing ' if isinstance(torrent, text_type): torrent = torrent.encode('iso-8859-1') torrent_file.write(torrent) msg += 'SettingPerm ' setperm(tor_path) msg += 'Saved ' logger.debug('Magnet file saved: %s' % tor_path) downloadID = Source except Exception as e: logger.warn("Failed to write magnet to file: %s %s" % (type(e).__name__, str(e))) logger.debug("Progress: %s" % msg) logger.debug("Filename [%s]" % (repr(tor_path))) return False else: tor_name += '.torrent' tor_path = os.path.join(lazylibrarian.CONFIG['TORRENT_DIR'], tor_name) msg = '' try: msg = 'Opening ' with open(tor_path, 'wb') as torrent_file: msg += 'Writing ' if isinstance(torrent, text_type): torrent = torrent.encode('iso-8859-1') torrent_file.write(torrent) msg += 'SettingPerm ' setperm(tor_path) msg += 'Saved ' logger.debug('Torrent file saved: %s' % tor_name) downloadID = Source except Exception as e: logger.warn("Failed to write torrent to file: %s %s" % (type(e).__name__, str(e))) logger.debug("Progress: %s" % msg) logger.debug("Filename [%s]" % (repr(tor_path))) return False hashid = CalcTorrentHash(torrent) if lazylibrarian.CONFIG['TOR_DOWNLOADER_UTORRENT'] and lazylibrarian.CONFIG['UTORRENT_HOST']: logger.debug("Sending %s to Utorrent" % tor_title) Source = "UTORRENT" downloadID = utorrent.addTorrent(tor_url, hashid) # returns hash or False if downloadID: tor_title = utorrent.nameTorrent(downloadID) if lazylibrarian.CONFIG['TOR_DOWNLOADER_RTORRENT'] and lazylibrarian.CONFIG['RTORRENT_HOST']: logger.debug("Sending %s to rTorrent" % tor_title) Source = "RTORRENT" downloadID = rtorrent.addTorrent(tor_url, hashid) # returns hash or False if downloadID: tor_title = rtorrent.getName(downloadID) if lazylibrarian.CONFIG['TOR_DOWNLOADER_QBITTORRENT'] and lazylibrarian.CONFIG['QBITTORRENT_HOST']: logger.debug("Sending %s to qbittorrent" % tor_title) Source = "QBITTORRENT" if isinstance(torrent, binary_type) and torrent.startswith(b'magnet'): status = qbittorrent.addTorrent(torrent, hashid) elif isinstance(torrent, text_type) and torrent.startswith('magnet'): status = qbittorrent.addTorrent(torrent, hashid) else: status = qbittorrent.addTorrent(tor_url, hashid) # returns True or False if status: downloadID = hashid tor_title = qbittorrent.getName(hashid) if lazylibrarian.CONFIG['TOR_DOWNLOADER_TRANSMISSION'] and lazylibrarian.CONFIG['TRANSMISSION_HOST']: logger.debug("Sending %s to Transmission" % tor_title) if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms: logger.debug("TORRENT %s [%s] [%s]" % (len(torrent), torrent[:20], torrent[-20:])) Source = "TRANSMISSION" if isinstance(torrent, binary_type) and torrent.startswith(b'magnet'): downloadID = transmission.addTorrent(torrent) # returns id or False elif isinstance(torrent, text_type) and torrent.startswith('magnet'): downloadID = transmission.addTorrent(torrent) elif torrent: downloadID = transmission.addTorrent(None, metainfo=b64encode(torrent)) else: downloadID = transmission.addTorrent(tor_url) # returns id or False if downloadID: # transmission returns it's own int, but we store hashid instead downloadID = hashid tor_title = transmission.getTorrentFolder(downloadID) if lazylibrarian.CONFIG['TOR_DOWNLOADER_SYNOLOGY'] and lazylibrarian.CONFIG['USE_SYNOLOGY'] and \ lazylibrarian.CONFIG['SYNOLOGY_HOST']: logger.debug("Sending %s to Synology" % tor_title) Source = "SYNOLOGY_TOR" downloadID = synology.addTorrent(tor_url) # returns id or False if downloadID: tor_title = synology.getName(downloadID) if lazylibrarian.CONFIG['TOR_DOWNLOADER_DELUGE'] and lazylibrarian.CONFIG['DELUGE_HOST']: logger.debug("Sending %s to Deluge" % tor_title) if not lazylibrarian.CONFIG['DELUGE_USER']: # no username, talk to the webui Source = "DELUGEWEBUI" if isinstance(torrent, binary_type) and torrent.startswith(b'magnet'): downloadID = deluge.addTorrent(torrent) elif isinstance(torrent, text_type) and torrent.startswith('magnet'): downloadID = deluge.addTorrent(torrent) elif torrent: downloadID = deluge.addTorrent(tor_title, data=b64encode(torrent)) else: downloadID = deluge.addTorrent(tor_url) # can be link or magnet, returns hash or False if downloadID: tor_title = deluge.getTorrentFolder(downloadID) else: # have username, talk to the daemon Source = "DELUGERPC" client = DelugeRPCClient(lazylibrarian.CONFIG['DELUGE_HOST'], int(lazylibrarian.CONFIG['DELUGE_PORT']), lazylibrarian.CONFIG['DELUGE_USER'], lazylibrarian.CONFIG['DELUGE_PASS']) try: client.connect() args = {"name": tor_title} if tor_url.startswith('magnet'): downloadID = client.call('core.add_torrent_magnet', tor_url, args) elif isinstance(torrent, binary_type) and torrent.startswith(b'magnet'): downloadID = client.call('core.add_torrent_magnet', torrent, args) elif isinstance(torrent, text_type) and torrent.startswith('magnet'): downloadID = client.call('core.add_torrent_magnet', torrent, args) elif torrent: downloadID = client.call('core.add_torrent_file', tor_title, b64encode(torrent), args) else: downloadID = client.call('core.add_torrent_url', tor_url, args) if downloadID: if lazylibrarian.CONFIG['DELUGE_LABEL']: _ = client.call('label.set_torrent', downloadID, lazylibrarian.CONFIG['DELUGE_LABEL'].lower()) result = client.call('core.get_torrent_status', downloadID, {}) # for item in result: # logger.debug ('Deluge RPC result %s: %s' % (item, result[item])) if 'name' in result: tor_title = result['name'] except Exception as e: logger.error('DelugeRPC failed %s %s' % (type(e).__name__, str(e))) return False if not Source: logger.warn('No torrent download method is enabled, check config.') return False if downloadID: if tor_title: if downloadID.upper() in tor_title.upper(): logger.warn('%s: name contains hash, probably unresolved magnet' % Source) else: tor_title = unaccented_str(tor_title) # need to check against reject words list again as the name may have changed # library = magazine eBook AudioBook to determine which reject list # but we can't easily do the per-magazine rejects if library == 'magazine': reject_list = getList(lazylibrarian.CONFIG['REJECT_MAGS']) elif library == 'eBook': reject_list = getList(lazylibrarian.CONFIG['REJECT_WORDS']) elif library == 'AudioBook': reject_list = getList(lazylibrarian.CONFIG['REJECT_AUDIO']) else: logger.debug("Invalid library [%s] in TORDownloadMethod" % library) reject_list = [] rejected = False lower_title = tor_title.lower() for word in reject_list: if word in lower_title: rejected = True logger.debug("Rejecting torrent name %s, contains %s" % (tor_title, word)) break if rejected: myDB.action('UPDATE wanted SET status="Failed" WHERE NZBurl=?', (full_url,)) delete_task(Source, downloadID, True) return False else: logger.debug('%s setting torrent name to [%s]' % (Source, tor_title)) myDB.action('UPDATE wanted SET NZBtitle=? WHERE NZBurl=?', (tor_title, full_url)) if library == 'eBook': myDB.action('UPDATE books SET status="Snatched" WHERE BookID=?', (bookid,)) elif library == 'AudioBook': myDB.action('UPDATE books SET audiostatus="Snatched" WHERE BookID=?', (bookid,)) myDB.action('UPDATE wanted SET status="Snatched", Source=?, DownloadID=? WHERE NZBurl=?', (Source, downloadID, full_url)) return True logger.error('Failed to download torrent from %s, %s' % (Source, tor_url)) myDB.action('UPDATE wanted SET status="Failed" WHERE NZBurl=?', (full_url,)) return False
def _sendBoxcar(self, msg, title, token, subscribe=False): """ Sends a boxcar notification to the address provided msg: The message to send (unicode) title: The title of the message email: The email address to send the message to (or to subscribe with) subscribe: If true then instead of sending a message this function will send a subscription notification (optional, default is False) returns: True if the message succeeded, False otherwise """ logger.debug('Boxcar notification: %s' % msg) logger.debug('Title: %s' % title) logger.debug('Token: %s' % token) logger.debug('Subscribe: %s' % subscribe) # build up the URL and parameters msg = msg.strip() if PY2: msg = msg.encode(lazylibrarian.SYS_ENCODING) title = title.encode(lazylibrarian.SYS_ENCODING) curUrl = API_URL # if this is a subscription notification then act accordingly if subscribe: data = {'email': token} curUrl += "/subscribe" # for normal requests we need all these parameters else: data = { 'user_credentials': token, 'notification[title]': title, 'notification[long_message]': msg, 'notification[sound]': "done" } proxies = proxyList() # send the request to boxcar try: timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30) r = requests.get(curUrl, params=data, timeout=timeout, proxies=proxies) status = str(r.status_code) if status.startswith('2'): logger.debug("BOXCAR: Notification successful.") return True # HTTP status 404 if the provided email address isn't a Boxcar user. if status == '404': logger.warn( "BOXCAR: Username is wrong/not a boxcar email. Boxcar will send an email to it" ) # For HTTP status code 401's, it is because you are passing in either an # invalid token, or the user has not added your service. elif status == '401': # If the user has already added your service, we'll return an HTTP status code of 401. if subscribe: logger.error("BOXCAR: Already subscribed to service") # HTTP status 401 if the user doesn't have the service added else: subscribeNote = self._sendBoxcar(msg, title, token, True) if subscribeNote: logger.debug("BOXCAR: Subscription sent.") return True else: logger.error("BOXCAR: Subscription could not be sent.") # If you receive an HTTP status code of 400, it is because you failed to send the proper parameters elif status == '400': logger.error("BOXCAR: Wrong data send to boxcar.") else: logger.error("BOXCAR: Got error code %s" % status) return False except Exception as e: # if we get an error back that doesn't have an error code then who knows what's really happening # URLError doesn't return a code, just a reason. HTTPError gives a code if not hasattr(e, 'code'): logger.error("BOXCAR: Boxcar notification failed: %s" % str(e)) else: # noinspection PyUnresolvedReferences logger.error( "BOXCAR: Boxcar notification failed. Error code: %s" % str(e.code)) return False
def fetchURL(URL, headers=None, retry=True, raw=None): """ Return the result of fetching a URL and True if success Otherwise return error message and False Return data as raw/bytes in python2 or if raw == True On python3 default to unicode, need to set raw=True for images/data Allow one retry on timeout by default""" if 'googleapis' in URL: lazylibrarian.GB_CALLS += 1 for entry in lazylibrarian.PROVIDER_BLOCKLIST: if entry["name"] == 'googleapis': if int(time.time()) < int(entry['resume']): return "Blocked", False else: lazylibrarian.PROVIDER_BLOCKLIST.remove(entry) lazylibrarian.GB_CALLS = 0 if raw is None: if PY2: raw = True else: raw = False if headers is None: # some sites insist on having a user-agent, default is to add one # if you don't want any headers, send headers=[] headers = {'User-Agent': getUserAgent()} proxies = proxyList() try: # jackett query all indexers needs a longer timeout # /torznab/all/api?q= or v2.0/indexers/all/results/torznab/api?q= if '/torznab/' in URL and ('/all/' in URL or '/aggregate/' in URL): timeout = check_int(lazylibrarian.CONFIG['HTTP_EXT_TIMEOUT'], 90) else: timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30) r = requests.get(URL, headers=headers, timeout=timeout, proxies=proxies) if str(r.status_code).startswith('2'): # (200 OK etc) if raw: return r.content, True try: result = r.content.decode('utf-8') except UnicodeDecodeError: result = r.content.decode('latin-1') return result, True elif r.status_code == 403 and 'googleapis' in URL: msg = makeUnicode(r.content) logger.debug(msg) # noinspection PyBroadException try: source = json.loads(msg) msg = source['error']['message'] except Exception: pass if 'Limit Exceeded' in msg: # how long until midnight Pacific Time when google reset the quotas delay = seconds_to_midnight() + 28800 # PT is 8hrs behind UTC if delay > 86400: delay -= 86400 # no roll-over to next day else: # might be forbidden for a different reason where midnight might not matter # eg "Cannot determine user location for geographically restricted operation" delay = 3600 for entry in lazylibrarian.PROVIDER_BLOCKLIST: if entry["name"] == 'googleapis': lazylibrarian.PROVIDER_BLOCKLIST.remove(entry) newentry = { "name": 'googleapis', "resume": int(time.time()) + delay, "reason": msg } lazylibrarian.PROVIDER_BLOCKLIST.append(newentry) # noinspection PyBroadException try: # noinspection PyProtectedMember msg = requests.status_codes._codes[r.status_code][0] except Exception: msg = str(r.content) return "Response status %s: %s" % (r.status_code, msg), False except requests.exceptions.Timeout as e: if not retry: logger.error("fetchURL: Timeout getting response from %s" % URL) return "Timeout %s" % str(e), False logger.debug("fetchURL: retrying - got timeout on %s" % URL) result, success = fetchURL(URL, headers=headers, retry=False, raw=False) return result, success except Exception as e: if hasattr(e, 'reason'): return "Exception %s: Reason: %s" % (type(e).__name__, str( e.reason)), False return "Exception %s: %s" % (type(e).__name__, str(e)), False