def _Magazines(self, **kwargs): index = 0 if 'index' in kwargs: index = check_int(kwargs['index'], 0) myDB = database.DBConnection() feed = {'title': 'LazyLibrarian OPDS - Magazines', 'id': 'Magazines', 'updated': now()} links = [] entries = [] links.append(getLink(href=self.opdsroot, ftype='application/atom+xml; profile=opds-catalog; kind=navigation', rel='start', title='Home')) links.append(getLink(href='%s?cmd=Magazines' % self.opdsroot, ftype='application/atom+xml; profile=opds-catalog; kind=navigation', rel='self')) links.append(getLink(href='%s/opensearchmagazines.xml' % self.searchroot, ftype='application/opensearchdescription+xml', rel='search', title='Search Magazines')) cmd = 'select magazines.*,(select count(*) as counter from issues where magazines.title = issues.title)' cmd += ' as Iss_Cnt from magazines ' if 'query' in kwargs: cmd += "WHERE magazines.title LIKE '%" + kwargs['query'] + "%' " cmd += 'order by magazines.title' results = myDB.select(cmd) page = results[index:(index + self.PAGE_SIZE)] for mag in page: if mag['Iss_Cnt'] > 0: title = makeUnicode(mag['Title']) entry = { 'title': escape('%s (%s)' % (title, mag['Iss_Cnt'])), 'id': escape('magazine:%s' % title), 'updated': opdstime(mag['LastAcquired']), 'content': escape('%s' % title), 'href': '%s?cmd=Magazine&magid=%s' % (self.opdsroot, quote_plus(title)), 'kind': 'navigation', 'rel': 'subsection', } if lazylibrarian.CONFIG['OPDS_METAINFO']: res = cache_img('magazine', md5_utf8(mag['LatestCover']), mag['LatestCover'], refresh=True) entry['image'] = self.searchroot + '/' + res[0] entries.append(entry) if len(results) > (index + self.PAGE_SIZE): links.append( getLink(href='%s?cmd=Magazines&index=%s' % (self.opdsroot, index + self.PAGE_SIZE), ftype='application/atom+xml; profile=opds-catalog; kind=navigation', rel='next')) if index >= self.PAGE_SIZE: links.append( getLink(href='%s?cmd=Magazines&index=%s' % (self.opdsroot, index - self.PAGE_SIZE), ftype='application/atom+xml; profile=opds-catalog; kind=navigation', rel='previous')) feed['links'] = links feed['entries'] = entries logger.debug("Returning %s magazine%s" % (len(entries), plural(len(entries)))) self.data = feed return
def db_v23(myDB, upgradelog): if not has_column(myDB, "users", "Perms"): lazylibrarian.UPDATE_MSG = 'Adding Users table' upgradelog.write("%s v23: %s\n" % (time.ctime(), lazylibrarian.UPDATE_MSG)) cmd = 'CREATE TABLE IF NOT EXISTS users ' cmd += '(UserID TEXT UNIQUE, UserName TEXT UNIQUE, Password TEXT, Email TEXT, ' cmd += 'Name TEXT, Perms INTEGER)' myDB.action(cmd) cmd = 'INSERT into users (UserID, UserName, Name, Password, Email, Perms) VALUES (?, ?, ?, ?, ?, ?)' user = lazylibrarian.CONFIG['HTTP_USER'] pwd = lazylibrarian.CONFIG['HTTP_PASS'] email = lazylibrarian.CONFIG['ADMIN_EMAIL'] name = 'admin' if not user or not pwd: user = '******' pwd = 'admin' myDB.action(cmd, (pwd_generator(), user, name, md5_utf8(pwd), email, lazylibrarian.perm_admin)) logger.debug('Added admin user %s' % user) upgradelog.write("%s v23: complete\n" % time.ctime())
def get_cached_request(url, useCache=True, cache="XML"): # hashfilename = hash of url # if hashfilename exists in cache and isn't too old, return its contents # if not, read url and store the result in the cache # return the result, and boolean True if source was cache # cacheLocation = cache + "Cache" cacheLocation = os.path.join(lazylibrarian.CACHEDIR, cacheLocation) if not os.path.exists(cacheLocation): os.mkdir(cacheLocation) myhash = md5_utf8(url) valid_cache = False source = None hashfilename = cacheLocation + os.path.sep + myhash + "." + cache.lower() expiry = lazylibrarian.CONFIG[ 'CACHE_AGE'] * 24 * 60 * 60 # expire cache after this many seconds if useCache and os.path.isfile(hashfilename): cache_modified_time = os.stat(hashfilename).st_mtime time_now = time.time() if cache_modified_time < time_now - expiry: # Cache entry is too old, delete it logger.debug("Expiring %s" % myhash) os.remove(hashfilename) else: valid_cache = True if valid_cache: lazylibrarian.CACHE_HIT = int(lazylibrarian.CACHE_HIT) + 1 if lazylibrarian.LOGLEVEL & lazylibrarian.log_cache: logger.debug("CacheHandler: Returning CACHED response %s for %s" % (hashfilename, url)) if cache == "JSON": try: source = json.load(open(hashfilename)) except ValueError: logger.debug("Error decoding json from %s" % hashfilename) return None, False elif cache == "XML": with open(hashfilename, "rb") as cachefile: result = cachefile.read() if result and result.startswith(b'<?xml'): try: source = ElementTree.fromstring(result) except UnicodeEncodeError: # seems sometimes the page contains utf-16 but the header says it's utf-8 try: result = result.decode('utf-16').encode('utf-8') source = ElementTree.fromstring(result) except (ElementTree.ParseError, UnicodeEncodeError, UnicodeDecodeError): logger.debug("Error parsing xml from %s" % hashfilename) source = None except ElementTree.ParseError: logger.debug("Error parsing xml from %s" % hashfilename) source = None if source is None: logger.debug("Error reading xml from %s" % hashfilename) os.remove(hashfilename) return None, False else: lazylibrarian.CACHE_MISS = int(lazylibrarian.CACHE_MISS) + 1 if cache == 'XML': gr_api_sleep() result, success = fetchURL(url, raw=True) else: result, success = fetchURL(url) if success: logger.debug("CacheHandler: Storing %s %s for %s" % (cache, myhash, url)) if cache == "JSON": try: source = json.loads(result) if not expiry: return source, False except Exception as e: logger.error("%s decoding json from %s" % (type(e).__name__, url)) logger.debug("%s : %s" % (e, result)) return None, False json.dump(source, open(hashfilename, "w")) elif cache == "XML": if result and result.startswith(b'<?xml'): try: source = ElementTree.fromstring(result) if not expiry: return source, False except UnicodeEncodeError: # sometimes we get utf-16 data labelled as utf-8 try: result = result.decode('utf-16').encode('utf-8') source = ElementTree.fromstring(result) if not expiry: return source, False except (ElementTree.ParseError, UnicodeEncodeError, UnicodeDecodeError): logger.debug("Error parsing xml from %s" % url) source = None except ElementTree.ParseError: logger.debug("Error parsing xml from %s" % url) source = None if source is not None: with open(hashfilename, "wb") as cachefile: cachefile.write(result) else: logger.debug("Error getting xml data from %s" % url) return None, False else: logger.debug("Got error response for %s: %s" % (url, result)) return None, False return source, valid_cache
def createMagCover(issuefile=None, refresh=False, pagenum=1): if not lazylibrarian.CONFIG['IMP_MAGCOVER'] or not pagenum: return 'unwanted' if not issuefile or not os.path.isfile(issuefile): logger.debug('No issuefile %s' % issuefile) return 'failed' base, extn = os.path.splitext(issuefile) if not extn: logger.debug('Unable to create cover for %s, no extension?' % issuefile) return 'failed' coverfile = base + '.jpg' if os.path.isfile(coverfile): if refresh: os.remove(coverfile) else: logger.debug('Cover for %s exists' % issuefile) return 'exists' # quit if cover already exists and we didn't want to refresh logger.debug('Creating cover for %s' % issuefile) data = '' # result from unzip or unrar extn = extn.lower() if extn in ['.cbz', '.epub']: try: data = zipfile.ZipFile(issuefile) except Exception as why: logger.error("Failed to read zip file %s, %s %s" % (issuefile, type(why).__name__, str(why))) data = '' elif extn in ['.cbr']: try: # unrar will complain if the library isn't installed, needs to be compiled separately # see https://pypi.python.org/pypi/unrar/ for instructions # Download source from http://www.rarlab.com/rar_add.htm # note we need LIBRARY SOURCE not a binary package # make lib; sudo make install-lib; sudo ldconfig # lib.unrar should then be able to find libunrar.so from lib.unrar import rarfile data = rarfile.RarFile(issuefile) except Exception as why: logger.error("Failed to read rar file %s, %s %s" % (issuefile, type(why).__name__, str(why))) data = '' if data: img = None try: for member in data.namelist(): memlow = member.lower() if '-00.' in memlow or '000.' in memlow or 'cover.' in memlow: if memlow.endswith('.jpg') or memlow.endswith('.jpeg'): img = data.read(member) break if img: with open(coverfile, 'wb') as f: if PY2: f.write(img) else: f.write(img.encode()) return 'ok' else: logger.debug("Failed to find image in %s" % issuefile) except Exception as why: logger.error("Failed to extract image from %s, %s %s" % (issuefile, type(why).__name__, str(why))) elif extn == '.pdf': generator = "" if len(lazylibrarian.CONFIG['IMP_CONVERT'] ): # allow external convert to override libraries generator = "external program: %s" % lazylibrarian.CONFIG[ 'IMP_CONVERT'] if "gsconvert.py" in lazylibrarian.CONFIG['IMP_CONVERT']: msg = "Use of gsconvert.py is deprecated, equivalent functionality is now built in. " msg += "Support for gsconvert.py may be removed in a future release. See wiki for details." logger.warn(msg) converter = lazylibrarian.CONFIG['IMP_CONVERT'] postfix = '' # if not os.path.isfile(converter): # full path given, or just program_name? # converter = os.path.join(os.getcwd(), lazylibrarian.CONFIG['IMP_CONVERT']) if 'convert' in converter and 'gs' not in converter: # tell imagemagick to only convert first page postfix = '[0]' try: params = [ converter, '%s%s' % (issuefile, postfix), '%s' % coverfile ] res = subprocess.check_output(params, stderr=subprocess.STDOUT) res = makeUnicode(res).strip() if res: logger.debug('%s reports: %s' % (lazylibrarian.CONFIG['IMP_CONVERT'], res)) except Exception as e: # logger.debug(params) logger.warn('External "convert" failed %s %s' % (type(e).__name__, str(e))) elif platform.system() == "Windows": GS = os.path.join(os.getcwd(), "gswin64c.exe") generator = "local gswin64c" if not os.path.isfile(GS): GS = os.path.join(os.getcwd(), "gswin32c.exe") generator = "local gswin32c" if not os.path.isfile(GS): params = ["where", "gswin64c"] try: GS = subprocess.check_output(params, stderr=subprocess.STDOUT) GS = makeUnicode(GS).strip() generator = "gswin64c" except Exception as e: logger.debug("where gswin64c failed: %s %s" % (type(e).__name__, str(e))) if not os.path.isfile(GS): params = ["where", "gswin32c"] try: GS = subprocess.check_output(params, stderr=subprocess.STDOUT) GS = makeUnicode(GS).strip() generator = "gswin32c" except Exception as e: logger.debug("where gswin32c failed: %s %s" % (type(e).__name__, str(e))) if not os.path.isfile(GS): logger.debug("No gswin found") generator = "(no windows ghostscript found)" else: # noinspection PyBroadException try: params = [GS, "--version"] res = subprocess.check_output(params, stderr=subprocess.STDOUT) res = makeUnicode(res).strip() logger.debug("Found %s [%s] version %s" % (generator, GS, res)) generator = "%s version %s" % (generator, res) issuefile = issuefile.split('[')[0] params = [ GS, "-sDEVICE=jpeg", "-dNOPAUSE", "-dBATCH", "-dSAFER", "-dFirstPage=%d" % check_int(pagenum, 1), "-dLastPage=%d" % check_int(pagenum, 1), "-dUseCropBox", "-sOutputFile=%s" % coverfile, issuefile ] res = subprocess.check_output(params, stderr=subprocess.STDOUT) res = makeUnicode(res).strip() if not os.path.isfile(coverfile): logger.debug("Failed to create jpg: %s" % res) except Exception: # as why: logger.warn("Failed to create jpg for %s" % issuefile) logger.debug('Exception in gswin create_cover: %s' % traceback.format_exc()) else: # not windows try: # noinspection PyUnresolvedReferences from wand.image import Image interface = "wand" except ImportError: try: # No PythonMagick in python3 # noinspection PyUnresolvedReferences import PythonMagick interface = "pythonmagick" except ImportError: interface = "" try: if interface == 'wand': generator = "wand interface" with Image(filename=issuefile + '[' + str(check_int(pagenum, 1) - 1) + ']') as img: img.save(filename=coverfile) elif interface == 'pythonmagick': generator = "pythonmagick interface" img = PythonMagick.Image() # PythonMagick requires filenames to be bytestr, not unicode if type(issuefile) is text_type: issuefile = makeBytestr(issuefile) if type(coverfile) is text_type: coverfile = makeBytestr(coverfile) img.read(issuefile + '[' + str(check_int(pagenum, 1) - 1) + ']') img.write(coverfile) else: GS = os.path.join(os.getcwd(), "gs") generator = "local gs" if not os.path.isfile(GS): GS = "" params = ["which", "gs"] try: GS = subprocess.check_output( params, stderr=subprocess.STDOUT) GS = makeUnicode(GS).strip() generator = GS except Exception as e: logger.debug("which gs failed: %s %s" % (type(e).__name__, str(e))) if not os.path.isfile(GS): logger.debug("Cannot find gs") generator = "(no gs found)" else: params = [GS, "--version"] res = subprocess.check_output( params, stderr=subprocess.STDOUT) res = makeUnicode(res).strip() logger.debug("Found gs [%s] version %s" % (GS, res)) generator = "%s version %s" % (generator, res) issuefile = issuefile.split('[')[0] params = [ GS, "-sDEVICE=jpeg", "-dNOPAUSE", "-dBATCH", "-dSAFER", "-dFirstPage=%d" % check_int(pagenum, 1), "-dLastPage=%d" % check_int(pagenum, 1), "-dUseCropBox", "-sOutputFile=%s" % coverfile, issuefile ] res = subprocess.check_output( params, stderr=subprocess.STDOUT) res = makeUnicode(res).strip() if not os.path.isfile(coverfile): logger.debug("Failed to create jpg: %s" % res) except Exception as e: logger.warn("Unable to create cover for %s using %s %s" % (issuefile, type(e).__name__, generator)) logger.debug('Exception in create_cover: %s' % traceback.format_exc()) if os.path.isfile(coverfile): setperm(coverfile) logger.debug("Created cover (page %d) for %s using %s" % (check_int(pagenum, 1), issuefile, generator)) myhash = md5_utf8(coverfile) hashname = os.path.join(lazylibrarian.CACHEDIR, 'magazine', '%s.jpg' % myhash) copyfile(coverfile, hashname) setperm(hashname) return hashname # if not recognised extension or cover creation failed try: coverfile = safe_copy( os.path.join(lazylibrarian.PROG_DIR, 'data/images/nocover.jpg'), coverfile) setperm(coverfile) except Exception as why: logger.error("Failed to copy nocover file, %s %s" % (type(why).__name__, str(why))) return 'Failed'
def dbupgrade(db_current_version): with open(os.path.join(lazylibrarian.CONFIG['LOGDIR'], 'dbupgrade.log'), 'a') as upgradelog: # noinspection PyBroadException try: myDB = database.DBConnection() db_version = 0 result = myDB.match('PRAGMA user_version') if result and result[0]: value = str(result[0]) if value.isdigit(): db_version = int(value) check = myDB.match('PRAGMA integrity_check') if check and check[0]: result = check[0] if result == 'ok': logger.debug('Database integrity check: %s' % result) else: logger.error('Database integrity check: %s' % result) # should probably abort now if db_version < db_current_version: myDB = database.DBConnection() if db_version: lazylibrarian.UPDATE_MSG = 'Updating database to version %s, current version is %s' % ( db_current_version, db_version) logger.info(lazylibrarian.UPDATE_MSG) upgradelog.write("%s v0: %s\n" % (time.ctime(), lazylibrarian.UPDATE_MSG)) else: # it's a new database. Create tables but no need for any upgrading db_version = db_current_version lazylibrarian.UPDATE_MSG = 'Creating new database, version %s' % db_version upgradelog.write("%s v0: %s\n" % (time.ctime(), lazylibrarian.UPDATE_MSG)) logger.info(lazylibrarian.UPDATE_MSG) # sanity check for incomplete initialisations res = myDB.select("select name from sqlite_master where type is 'table'") for item in res: myDB.action("DROP TABLE IF EXISTS %s" % item['name']) # new set of database tables myDB.action('CREATE TABLE authors (AuthorID TEXT UNIQUE, AuthorName TEXT UNIQUE, ' + 'AuthorImg TEXT, AuthorLink TEXT, DateAdded TEXT, Status TEXT, LastBook TEXT, ' + 'LastBookImg TEXT, LastLink Text, LastDate TEXT, HaveBooks INTEGER, ' + 'TotalBooks INTEGER, AuthorBorn TEXT, AuthorDeath TEXT, UnignoredBooks INTEGER, ' + 'Manual TEXT, GRfollow TEXT)') myDB.action('CREATE TABLE books (AuthorID TEXT, BookName TEXT, BookSub TEXT, BookDesc TEXT, ' + 'BookGenre TEXT, BookIsbn TEXT, BookPub TEXT, BookRate INTEGER, BookImg TEXT, ' + 'BookPages INTEGER, BookLink TEXT, BookID TEXT UNIQUE, BookFile TEXT, ' + 'BookDate TEXT, BookLang TEXT, BookAdded TEXT, Status TEXT, WorkPage TEXT, ' + 'Manual TEXT, SeriesDisplay TEXT, BookLibrary TEXT, AudioFile TEXT, ' + 'AudioLibrary TEXT, AudioStatus TEXT, WorkID TEXT, ScanResult TEXT)') myDB.action('CREATE TABLE wanted (BookID TEXT, NZBurl TEXT, NZBtitle TEXT, NZBdate TEXT, ' + 'NZBprov TEXT, Status TEXT, NZBsize TEXT, AuxInfo TEXT, NZBmode TEXT, ' + 'Source TEXT, DownloadID TEXT, DLResult TEXT)') myDB.action('CREATE TABLE magazines (Title TEXT UNIQUE, Regex TEXT, Status TEXT, ' + 'MagazineAdded TEXT, LastAcquired TEXT, IssueDate TEXT, IssueStatus TEXT, ' + 'Reject TEXT, LatestCover TEXT, DateType TEXT)') myDB.action('CREATE TABLE languages (isbn TEXT, lang TEXT)') myDB.action('CREATE TABLE issues (Title TEXT, IssueID TEXT UNIQUE, IssueAcquired TEXT, ' + 'IssueDate TEXT, IssueFile TEXT)') myDB.action('CREATE TABLE stats (authorname text, GR_book_hits int, GR_lang_hits int, ' + 'LT_lang_hits int, GB_lang_change, cache_hits int, bad_lang int, bad_char int, ' + 'uncached int, duplicates int)') myDB.action('CREATE TABLE series (SeriesID INTEGER UNIQUE, SeriesName TEXT, Status TEXT, ' + 'Have TEXT, Total TEXT)') myDB.action('CREATE TABLE member (SeriesID INTEGER, BookID TEXT, WorkID TEXT, SeriesNum TEXT)') myDB.action('CREATE TABLE seriesauthors (SeriesID INTEGER, AuthorID TEXT, ' + 'UNIQUE (SeriesID,AuthorID))') myDB.action('CREATE TABLE downloads (Count INTEGER, Provider TEXT)') myDB.action('CREATE TABLE users (UserID TEXT UNIQUE, UserName TEXT UNIQUE, Password TEXT, ' + 'Email TEXT, Name TEXT, Perms INTEGER, HaveRead TEXT, ToRead TEXT, ' + 'CalibreRead TEXT, CalibreToRead TEXT, BookType TEXT)') myDB.action('CREATE TABLE sync (UserID TEXT, Label TEXT, Date TEXT, SyncList TEXT)') myDB.action('CREATE TABLE isbn (Words TEXT, ISBN TEXT)') # pastissues table has same layout as wanted table, code below is to save typos if columns change res = myDB.match("SELECT sql FROM sqlite_master WHERE type='table' AND name='wanted'") myDB.action(res['sql'].replace('wanted', 'pastissues')) cmd = 'INSERT into users (UserID, UserName, Name, Password, Perms) VALUES (?, ?, ?, ?, ?)' myDB.action(cmd, (pwd_generator(), 'admin', 'admin', md5_utf8('admin'), 65535)) logger.debug('Added admin user') myDB.action('CREATE INDEX issues_Title_index ON issues (Title)') myDB.action('CREATE INDEX books_index_authorid ON books(AuthorID)') myDB.action('CREATE INDEX books_index_status ON books(Status)') myDB.action('CREATE INDEX authors_index_status ON authors(Status)') myDB.action('CREATE INDEX wanted_index_status ON wanted(Status)') index = db_version + 1 while 'db_v%s' % index in globals(): upgrade_function = getattr(lazylibrarian.dbupgrade, 'db_v%s' % index) upgrade_function(myDB, upgradelog) index += 1 # Now do any non-version-specific tidying try: authors = myDB.select('SELECT AuthorID FROM authors WHERE AuthorName IS NULL') if authors: msg = 'Removing %s un-named author%s from database' % (len(authors), plural(len(authors))) logger.debug(msg) upgradelog.write("%s: %s\n" % (time.ctime(), msg)) for author in authors: authorid = author["AuthorID"] myDB.action('DELETE from authors WHERE AuthorID=?', (authorid,)) myDB.action('DELETE from books WHERE AuthorID=?', (authorid,)) except Exception as e: msg = 'Delete unnamed author error: %s %s' % (type(e).__name__, str(e)) logger.error(msg) upgradelog.write("%s: %s\n" % (time.ctime(), msg)) myDB.action('PRAGMA user_version=%s' % db_current_version) lazylibrarian.UPDATE_MSG = 'Cleaning Database' upgradelog.write("%s: %s\n" % (time.ctime(), lazylibrarian.UPDATE_MSG)) myDB.action('vacuum') lazylibrarian.UPDATE_MSG = 'Database updated to version %s' % db_current_version logger.info(lazylibrarian.UPDATE_MSG) upgradelog.write("%s: %s\n" % (time.ctime(), lazylibrarian.UPDATE_MSG)) restartJobs(start='Start') lazylibrarian.UPDATE_MSG = '' except Exception: msg = 'Unhandled exception in database upgrade: %s' % traceback.format_exc() upgradelog.write("%s: %s\n" % (time.ctime(), msg)) logger.error(msg) lazylibrarian.UPDATE_MSG = ''
def get_cached_request(url, useCache=True, cache="XML"): # hashfilename = hash of url # if hashfilename exists in cache and isn't too old, return its contents # if not, read url and store the result in the cache # return the result, and boolean True if source was cache # cacheLocation = cache + "Cache" cacheLocation = os.path.join(lazylibrarian.CACHEDIR, cacheLocation) if not os.path.exists(cacheLocation): os.mkdir(cacheLocation) myhash = md5_utf8(url) valid_cache = False source = None hashfilename = cacheLocation + os.path.sep + myhash + "." + cache.lower() expiry = lazylibrarian.CONFIG['CACHE_AGE'] * 24 * 60 * 60 # expire cache after this many seconds if useCache and os.path.isfile(hashfilename): cache_modified_time = os.stat(hashfilename).st_mtime time_now = time.time() if cache_modified_time < time_now - expiry: # Cache entry is too old, delete it if lazylibrarian.LOGLEVEL & lazylibrarian.log_cache: logger.debug("Expiring %s" % myhash) os.remove(hashfilename) else: valid_cache = True if valid_cache: lazylibrarian.CACHE_HIT = int(lazylibrarian.CACHE_HIT) + 1 if lazylibrarian.LOGLEVEL & lazylibrarian.log_cache: logger.debug("CacheHandler: Returning CACHED response %s for %s" % (hashfilename, url)) if cache == "JSON": try: source = json.load(open(hashfilename)) except ValueError: logger.error("Error decoding json from %s" % hashfilename) return None, False elif cache == "XML": with open(hashfilename, "rb") as cachefile: result = cachefile.read() if result and result.startswith(b'<?xml'): try: source = ElementTree.fromstring(result) except UnicodeEncodeError: # seems sometimes the page contains utf-16 but the header says it's utf-8 try: result = result.decode('utf-16').encode('utf-8') source = ElementTree.fromstring(result) except (ElementTree.ParseError, UnicodeEncodeError, UnicodeDecodeError): logger.error("Error parsing xml from %s" % hashfilename) source = None except ElementTree.ParseError: logger.error("Error parsing xml from %s" % hashfilename) source = None if source is None: logger.error("Error reading xml from %s" % hashfilename) os.remove(hashfilename) return None, False else: lazylibrarian.CACHE_MISS = int(lazylibrarian.CACHE_MISS) + 1 if cache == 'XML': gr_api_sleep() result, success = fetchURL(url, raw=True) else: result, success = fetchURL(url) if success: if lazylibrarian.LOGLEVEL & lazylibrarian.log_cache: logger.debug("CacheHandler: Storing %s %s for %s" % (cache, myhash, url)) if cache == "JSON": try: source = json.loads(result) if not expiry: return source, False except Exception as e: logger.error("%s decoding json from %s" % (type(e).__name__, url)) logger.debug("%s : %s" % (e, result)) return None, False json.dump(source, open(hashfilename, "w")) elif cache == "XML": result = makeBytestr(result) if result and result.startswith(b'<?xml'): try: source = ElementTree.fromstring(result) if not expiry: return source, False except UnicodeEncodeError: # sometimes we get utf-16 data labelled as utf-8 try: result = result.decode('utf-16').encode('utf-8') source = ElementTree.fromstring(result) if not expiry: return source, False except (ElementTree.ParseError, UnicodeEncodeError, UnicodeDecodeError): logger.error("Error parsing xml from %s" % url) source = None except ElementTree.ParseError: logger.error("Error parsing xml from %s" % url) source = None if source is not None: with open(hashfilename, "wb") as cachefile: cachefile.write(result) else: logger.error("Error getting xml data from %s" % url) return None, False else: logger.debug("Got error response for %s: %s" % (url, result.split('<')[0])) if 'goodreads' in url and '503' in result: time.sleep(1) return None, False return source, valid_cache
def createMagCover(issuefile=None, refresh=False, pagenum=1): if not lazylibrarian.CONFIG['IMP_MAGCOVER'] or not pagenum: return 'unwanted' if not issuefile or not os.path.isfile(issuefile): logger.debug('No issuefile %s' % issuefile) return 'failed' base, extn = os.path.splitext(issuefile) if not extn: logger.debug('Unable to create cover for %s, no extension?' % issuefile) return 'failed' coverfile = base + '.jpg' if os.path.isfile(coverfile): if refresh: os.remove(coverfile) else: logger.debug('Cover for %s exists' % issuefile) return 'exists' # quit if cover already exists and we didn't want to refresh logger.debug('Creating cover for %s' % issuefile) data = '' # result from unzip or unrar extn = extn.lower() if extn in ['.cbz', '.epub']: try: data = zipfile.ZipFile(issuefile) except Exception as why: logger.error("Failed to read zip file %s, %s %s" % (issuefile, type(why).__name__, str(why))) data = '' elif extn in ['.cbr']: try: # unrar will complain if the library isn't installed, needs to be compiled separately # see https://pypi.python.org/pypi/unrar/ for instructions # Download source from http://www.rarlab.com/rar_add.htm # note we need LIBRARY SOURCE not a binary package # make lib; sudo make install-lib; sudo ldconfig # lib.unrar should then be able to find libunrar.so from lib.unrar import rarfile data = rarfile.RarFile(issuefile) except Exception as why: logger.error("Failed to read rar file %s, %s %s" % (issuefile, type(why).__name__, str(why))) data = '' if data: img = None try: for member in data.namelist(): memlow = member.lower() if '-00.' in memlow or '000.' in memlow or 'cover.' in memlow: if memlow.endswith('.jpg') or memlow.endswith('.jpeg'): img = data.read(member) break if img: with open(coverfile, 'wb') as f: if PY2: f.write(img) else: f.write(img.encode()) return 'ok' else: logger.debug("Failed to find image in %s" % issuefile) except Exception as why: logger.error("Failed to extract image from %s, %s %s" % (issuefile, type(why).__name__, str(why))) elif extn == '.pdf': generator = "" if len(lazylibrarian.CONFIG['IMP_CONVERT']): # allow external convert to override libraries generator = "external program: %s" % lazylibrarian.CONFIG['IMP_CONVERT'] if "gsconvert.py" in lazylibrarian.CONFIG['IMP_CONVERT']: msg = "Use of gsconvert.py is deprecated, equivalent functionality is now built in. " msg += "Support for gsconvert.py may be removed in a future release. See wiki for details." logger.warn(msg) converter = lazylibrarian.CONFIG['IMP_CONVERT'] postfix = '' # if not os.path.isfile(converter): # full path given, or just program_name? # converter = os.path.join(os.getcwd(), lazylibrarian.CONFIG['IMP_CONVERT']) if 'convert' in converter and 'gs' not in converter: # tell imagemagick to only convert first page postfix = '[0]' try: params = [converter, '%s%s' % (issuefile, postfix), '%s' % coverfile] res = subprocess.check_output(params, stderr=subprocess.STDOUT) res = makeUnicode(res).strip() if res: logger.debug('%s reports: %s' % (lazylibrarian.CONFIG['IMP_CONVERT'], res)) except Exception as e: # logger.debug(params) logger.warn('External "convert" failed %s %s' % (type(e).__name__, str(e))) elif platform.system() == "Windows": GS = os.path.join(os.getcwd(), "gswin64c.exe") generator = "local gswin64c" if not os.path.isfile(GS): GS = os.path.join(os.getcwd(), "gswin32c.exe") generator = "local gswin32c" if not os.path.isfile(GS): params = ["where", "gswin64c"] try: GS = subprocess.check_output(params, stderr=subprocess.STDOUT) GS = makeUnicode(GS).strip() generator = "gswin64c" except Exception as e: logger.debug("where gswin64c failed: %s %s" % (type(e).__name__, str(e))) if not os.path.isfile(GS): params = ["where", "gswin32c"] try: GS = subprocess.check_output(params, stderr=subprocess.STDOUT) GS = makeUnicode(GS).strip() generator = "gswin32c" except Exception as e: logger.debug("where gswin32c failed: %s %s" % (type(e).__name__, str(e))) if not os.path.isfile(GS): logger.debug("No gswin found") generator = "(no windows ghostscript found)" else: # noinspection PyBroadException try: params = [GS, "--version"] res = subprocess.check_output(params, stderr=subprocess.STDOUT) res = makeUnicode(res).strip() logger.debug("Found %s [%s] version %s" % (generator, GS, res)) generator = "%s version %s" % (generator, res) issuefile = issuefile.split('[')[0] params = [GS, "-sDEVICE=jpeg", "-dNOPAUSE", "-dBATCH", "-dSAFER", "-dFirstPage=%d" % check_int(pagenum, 1), "-dLastPage=%d" % check_int(pagenum, 1), "-dUseCropBox", "-sOutputFile=%s" % coverfile, issuefile] res = subprocess.check_output(params, stderr=subprocess.STDOUT) res = makeUnicode(res).strip() if not os.path.isfile(coverfile): logger.debug("Failed to create jpg: %s" % res) except Exception: # as why: logger.warn("Failed to create jpg for %s" % issuefile) logger.debug('Exception in gswin create_cover: %s' % traceback.format_exc()) else: # not windows try: # noinspection PyUnresolvedReferences from wand.image import Image interface = "wand" except ImportError: try: # No PythonMagick in python3 # noinspection PyUnresolvedReferences import PythonMagick interface = "pythonmagick" except ImportError: interface = "" try: if interface == 'wand': generator = "wand interface" with Image(filename=issuefile + '[' + str(check_int(pagenum, 1) - 1) + ']') as img: img.save(filename=coverfile) elif interface == 'pythonmagick': generator = "pythonmagick interface" img = PythonMagick.Image() # PythonMagick requires filenames to be bytestr, not unicode if type(issuefile) is text_type: issuefile = makeBytestr(issuefile) if type(coverfile) is text_type: coverfile = makeBytestr(coverfile) img.read(issuefile + '[' + str(check_int(pagenum, 1) - 1) + ']') img.write(coverfile) else: GS = os.path.join(os.getcwd(), "gs") generator = "local gs" if not os.path.isfile(GS): GS = "" params = ["which", "gs"] try: GS = subprocess.check_output(params, stderr=subprocess.STDOUT) GS = makeUnicode(GS).strip() generator = GS except Exception as e: logger.debug("which gs failed: %s %s" % (type(e).__name__, str(e))) if not os.path.isfile(GS): logger.debug("Cannot find gs") generator = "(no gs found)" else: params = [GS, "--version"] res = subprocess.check_output(params, stderr=subprocess.STDOUT) res = makeUnicode(res).strip() logger.debug("Found gs [%s] version %s" % (GS, res)) generator = "%s version %s" % (generator, res) issuefile = issuefile.split('[')[0] params = [GS, "-sDEVICE=jpeg", "-dNOPAUSE", "-dBATCH", "-dSAFER", "-dFirstPage=%d" % check_int(pagenum, 1), "-dLastPage=%d" % check_int(pagenum, 1), "-dUseCropBox", "-sOutputFile=%s" % coverfile, issuefile] res = subprocess.check_output(params, stderr=subprocess.STDOUT) res = makeUnicode(res).strip() if not os.path.isfile(coverfile): logger.debug("Failed to create jpg: %s" % res) except Exception as e: logger.warn("Unable to create cover for %s using %s %s" % (issuefile, type(e).__name__, generator)) logger.debug('Exception in create_cover: %s' % traceback.format_exc()) if os.path.isfile(coverfile): setperm(coverfile) logger.debug("Created cover (page %d) for %s using %s" % (check_int(pagenum, 1), issuefile, generator)) myhash = md5_utf8(coverfile) hashname = os.path.join(lazylibrarian.CACHEDIR, 'magazine', '%s.jpg' % myhash) copyfile(coverfile, hashname) setperm(hashname) return hashname # if not recognised extension or cover creation failed try: coverfile = safe_copy(os.path.join(lazylibrarian.PROG_DIR, 'data/images/nocover.jpg'), coverfile) setperm(coverfile) except Exception as why: logger.error("Failed to copy nocover file, %s %s" % (type(why).__name__, str(why))) return 'Failed'