def books(self, oncard=None, end_session=True): print('#######' + inspect.currentframe().f_code.co_name) from calibre.devices.usbms.books import BookList booklist = BookList(oncard, None, None) if oncard: return booklist books = get_books() for name, id, date, size, authors, collections in books: b = Book(title=name, rm_id=id, size=size, authors=authors, datetime=date, collections=collections) # If we have a correspondance in cache, we retrieve data from it (it will help calibre find correspondances) if id in config['match_cache']: b.uuid = config['match_cache'][id][0] if not b.authors: b.authors = config['match_cache'][id][1] booklist.add_book(b, replace_metadata=True) return booklist
def books(self, oncard=None, end_session=True): print('#######' + inspect.currentframe().f_code.co_name) from calibre.devices.usbms.books import BookList booklist = BookList(oncard, None, None) # since it is a cloud connection, there is no such thing as memory card if oncard: return booklist self.rm_client.renew_token() meta_items = self.rm_client.get_meta_items() folders = filter(lambda x: x.Type == 'CollectionType', meta_items) documents = filter(lambda x: x.Type == 'DocumentType', meta_items) doc_hierarchy = { folder.ID: (folder.VissibleName, folder.Parent) for folder in folders } def get_full_hierarchy(parent_id): full_path = doc_hierarchy[parent_id][ 0] if parent_id in doc_hierarchy else "" parent_id = doc_hierarchy[parent_id][ 1] if parent_id in doc_hierarchy else None while parent_id in doc_hierarchy: full_path = doc_hierarchy[parent_id][0] + '/' + full_path parent_id = doc_hierarchy[parent_id][1] return full_path for doc in documents: if doc.ID in self.ignore_books: continue # very ugly hack, but the date has variable size (sometime millisec is missing) and is timezoned datetime = time.strptime( doc.ModifiedClient.split('.')[0].replace('Z', ''), '%Y-%m-%dT%H:%M:%S') b = Book(title=doc.VissibleName, rm_id=doc.ID, datetime=datetime) if doc.ID in config['match_cache']: b.uuid = config['match_cache'][doc.ID][0] b.authors = config['match_cache'][doc.ID][1] b.device_collections = [get_full_hierarchy(doc.Parent)] booklist.add_book(b, replace_metadata=True) print('booklist:', booklist) return booklist
def books(self, oncard=None, end_session=True): ''' Return a list of ebooks on the device. @param oncard: If 'carda' or 'cardb' return a list of ebooks on the specific storage card, otherwise return list of ebooks in main memory of device. If a card is specified and no books are on the card return empty list. @return: A BookList. ''' from calibre.ebooks.metadata import authors_to_string # Entry point booklist = BookList(oncard, None, None) if not oncard: self._log_location() start_time = time.time() cached_books = {} # Get a local copy of metadata db. If it doesn't exist on device, create it db_profile = self._localize_database_path(self.remote_metadata) self.local_metadata = db_profile['path'] con = sqlite3.connect(self.local_metadata) with con: con.row_factory = sqlite3.Row cur = con.cursor() # Get the last saved set of installed filenames from the db cur.execute('''SELECT filename FROM metadata ''') rows = cur.fetchall() cached_books = [row[b'filename'] for row in rows] #cached_books = [self._quote_sqlite_identifier(row[b'filename']) for row in rows] if self.prefs.get('development_mode', False): self._log("~~~ cached_books: ~~~") for b in sorted(cached_books): self._log("%s %s" % (b, repr(b))) # Get the currently installed filenames from the documents folder installed_books = self._get_nested_folder_contents( self.documents_folder) if self.prefs.get('development_mode', False): self._log("~~~ installed_books: ~~~") for b in sorted(installed_books): self._log("%s %s" % (b, repr(b))) moved_books = [] for i, book in enumerate(installed_books): book_moved = False if book in cached_books: # Retrieve the cached metadata this_book = self._get_cached_metadata(cur, book) booklist.add_book(this_book, False) else: # Check to see if a known book has been moved for cb in cached_books: if cb.rpartition('/')[2] == book.rpartition( '/')[2]: # Retrieve the cached metadata with the new location self._log("%s moved to %s" % (repr(cb), repr(book))) this_book = self._get_cached_metadata(cur, cb) this_book.path = book booklist.add_book(this_book, False) # Update metadata with new location cur.execute(''' UPDATE metadata SET filename = {0} WHERE filename = {1} '''.format( self._quote_sqlite_identifier(book), self._quote_sqlite_identifier(cb))) con.commit() book_moved = True moved_books.append(cb) break if book_moved: continue # Make a local copy of the book, get the stats remote_path = '/'.join([self.documents_folder, book]) stats = self.ios.stat(remote_path) local_path = self._localize_mobi('/'.join( [self.documents_folder, book])) book_stats = {'path': local_path, 'stats': stats} try: this_book = self._get_metadata(book, book_stats) os.remove(local_path) except: import traceback traceback.print_exc() self._log("ERROR reading metadata from %s" % book) os.remove(local_path) continue booklist.add_book(this_book, False) cached_books.append(book) # Add to calibre_metadata db cur.execute( ''' INSERT OR REPLACE INTO metadata (authors, author_sort, dateadded, filename, size, thumb_data, title, title_sort, uuid) VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?)''', (unicode('; '.join(this_book.authors)), unicode( this_book.author_sort), this_book.dateadded, this_book.path, this_book.size, this_book.thumb_data, unicode(this_book.title), unicode(this_book.title_sort), this_book.uuid)) if self.report_progress is not None: self.report_progress( float((i + 1) * 100 / len(installed_books)) / 100, '%(num)d of %(tot)d' % dict(num=i + 1, tot=len(installed_books))) # Remove orphans (books no longer in Kindle) from db ib = set(installed_books) mb = set(moved_books) orphans = [ x for x in cached_books if x not in ib and x not in mb ] if orphans: for book in orphans: # Remove from db, update device copy self._log("Removing orphan %s from metadata" % self._quote_sqlite_identifier(book)) cur.execute('''DELETE FROM metadata WHERE filename = {0} '''.format( self._quote_sqlite_identifier(book))) con.execute('''VACUUM''') # Copy the updated db to the iDevice self._log("updating remote_metadata") self.ios.copy_to_idevice(str(self.local_metadata), str(self.remote_metadata)) if self.report_progress is not None: self.report_progress(1.0, 'finished') self.cached_books = cached_books self.load_time = time.time() - start_time metrics = { 'book_count': len(booklist), 'load_time': self.load_time } #self._log_metrics(metrics=metrics) return booklist
def books(self, oncard=None, end_session=True): import sqlite3 as sqlite dummy_bl = BookList(None, None, None) if ( (oncard == 'carda' and not self._card_a_prefix) or (oncard and oncard != 'carda') ): self.report_progress(1.0, _('Getting list of books on device...')) return dummy_bl prefix = self._card_a_prefix if oncard == 'carda' else self._main_prefix # Let parent driver get the books self.booklist_class.rebuild_collections = self.rebuild_collections bl = USBMS.books(self, oncard=oncard, end_session=end_session) dbpath = self.normalize_path(prefix + DBPATH) debug_print("SQLite DB Path: " + dbpath) with closing(sqlite.connect(dbpath)) as connection: # Replace undecodable characters in the db instead of erroring out connection.text_factory = lambda x: unicode(x, "utf-8", "replace") cursor = connection.cursor() # Query collections query = ''' SELECT books._id, collection.title FROM collections LEFT OUTER JOIN books LEFT OUTER JOIN collection WHERE collections.content_id = books._id AND collections.collection_id = collection._id ''' cursor.execute(query) bl_collections = {} for i, row in enumerate(cursor): bl_collections.setdefault(row[0], []) bl_collections[row[0]].append(row[1]) # collect information on offsets, but assume any # offset we already calculated is correct if self.device_offset is None: query = 'SELECT file_path, modified_date FROM books' cursor.execute(query) time_offsets = {} for i, row in enumerate(cursor): try: comp_date = int(os.path.getmtime(self.normalize_path(prefix + row[0])) * 1000); except (OSError, IOError, TypeError): # In case the db has incorrect path info continue device_date = int(row[1]); offset = device_date - comp_date time_offsets.setdefault(offset, 0) time_offsets[offset] = time_offsets[offset] + 1 try: device_offset = max(time_offsets,key = lambda a: time_offsets.get(a)) debug_print("Device Offset: %d ms"%device_offset) self.device_offset = device_offset except ValueError: debug_print("No Books To Detect Device Offset.") for idx, book in enumerate(bl): query = 'SELECT _id, thumbnail FROM books WHERE file_path = ?' t = (book.lpath,) cursor.execute (query, t) for i, row in enumerate(cursor): book.device_collections = bl_collections.get(row[0], None) thumbnail = row[1] if thumbnail is not None: thumbnail = self.normalize_path(prefix + thumbnail) book.thumbnail = ImageWrapper(thumbnail) cursor.close() return bl
def books(self, oncard=None, end_session=True): """ Return a list of ebooks on the device. @param oncard: If 'carda' or 'cardb' return a list of ebooks on the specific storage card, otherwise return list of ebooks in main memory of device. If a card is specified and no books are on the card return empty list. @return: A BookList. """ from calibre.ebooks.metadata import authors_to_string # Entry point booklist = BookList(oncard, None, None) if not oncard: self._log_location() cached_books = {} # Get a local copy of metadata db. If it doesn't exist on device, create it db_profile = self._localize_database_path(self.remote_metadata) self.local_metadata = db_profile["path"] con = sqlite3.connect(self.local_metadata) with con: con.row_factory = sqlite3.Row cur = con.cursor() # Get the last saved set of installed filenames from the db cur.execute( """SELECT filename FROM metadata """ ) rows = cur.fetchall() cached_books = [row[b"filename"] for row in rows] # cached_books = [json.dumps(row[b'filename']) for row in rows] if self.prefs.get("development_mode", False): self._log("~~~ cached_books: ~~~") for b in sorted(cached_books): self._log("%s %s" % (b, repr(b))) # Get the currently installed filenames from the documents folder installed_books = self._get_nested_folder_contents(self.documents_folder) if self.prefs.get("development_mode", False): self._log("~~~ installed_books: ~~~") for b in sorted(installed_books): self._log("%s %s" % (b, repr(b))) moved_books = [] for i, book in enumerate(installed_books): book_moved = False if book in cached_books: # Retrieve the cached metadata this_book = self._get_cached_metadata(cur, book) booklist.add_book(this_book, False) else: # Check to see if a known book has been moved for cb in cached_books: if cb.rpartition("/")[2] == book.rpartition("/")[2]: # Retrieve the cached metadata with the new location self._log("%s moved to %s" % (repr(cb), repr(book))) this_book = self._get_cached_metadata(cur, cb) this_book.path = book booklist.add_book(this_book, False) # Update metadata with new location cur.execute( """ UPDATE metadata SET filename = {0} WHERE filename = {1} """.format( json.dumps(book), json.dumps(cb) ) ) con.commit() book_moved = True moved_books.append(cb) break if book_moved: continue # Make a local copy of the book, get the stats remote_path = "/".join([self.documents_folder, book]) stats = self.ios.stat(remote_path) local_path = self._localize_pdf("/".join([self.documents_folder, book])) pdf_stats = {"path": local_path, "stats": stats} try: this_book = self._get_metadata(book, pdf_stats) os.remove(local_path) except: import traceback traceback.print_exc() self._log("ERROR reading metadata from %s" % book) os.remove(local_path) continue booklist.add_book(this_book, False) cached_books.append(book) # Add to calibre_metadata db cur.execute( """ INSERT OR REPLACE INTO metadata (authors, author_sort, dateadded, filename, size, thumb_data, title, title_sort, uuid) VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?)""", ( unicode(" & ".join(this_book.authors)), unicode(this_book.author_sort), this_book.dateadded, this_book.path, this_book.size, this_book.thumb_data, unicode(this_book.title), unicode(this_book.title_sort), this_book.uuid, ), ) if self.report_progress is not None: self.report_progress( float((i + 1) * 100 / len(installed_books)) / 100, "%(num)d of %(tot)d" % dict(num=i + 1, tot=len(installed_books)), ) # Remove orphans (books no longer in GoodReader) from db ib = set(installed_books) mb = set(moved_books) orphans = [x for x in cached_books if x not in ib and x not in mb] if orphans: for book in orphans: # Remove from db, update device copy self._log("Removing orphan %s from metadata" % json.dumps(book)) cur.execute( """DELETE FROM metadata WHERE filename = {0} """.format( json.dumps(book) ) ) cur.close() con.commit() # Copy the updated db to the iDevice self._log("updating remote_metadata") self.ios.copy_to_idevice(str(self.local_metadata), str(self.remote_metadata)) if self.report_progress is not None: self.report_progress(1.0, "finished") self.cached_books = cached_books return booklist