def remove(self, book: Book): """ Remove all tracks of the given book from the cache. """ self._stop_processing() ids = {t.file_id for t in book.chapters} offline_elements = OfflineCacheModel.select().join(File).where( OfflineCacheModel.original_file.id << ids) for element in offline_elements: file_path = os.path.join(self.cache_dir, element.cached_file) if file_path == self.cache_dir: continue file = Gio.File.new_for_path(file_path) if file.query_exists(): file.delete() for item in self.queue: if self.current and item.id == self.current.id: self.filecopy_cancel.cancel() entries_to_delete = OfflineCacheModel.select().join(File).where( OfflineCacheModel.original_file.id << ids) ids_to_delete = [t.id for t in entries_to_delete] OfflineCacheModel.delete().where( OfflineCacheModel.id << ids_to_delete).execute() book.downloaded = False self.emit_event("book-offline-removed", book) self.queue = [] self._start_processing()
def remove(self, book): """ Remove all tracks of the given book from the cache. """ # self._stop_processing() tracks = get_tracks(book) ids = [t.id for t in tracks] offline_elements = OfflineCacheModel.select().where( OfflineCacheModel.track << ids) for element in offline_elements: file_path = os.path.join(self.cache_dir, element.file) if file_path == self.cache_dir: continue file = Gio.File.new_for_path(file_path) if file.query_exists(): file.delete() for item in self.queue: if self.current and item.id == self.current.id: self.filecopy_cancel.cancel() OfflineCacheModel.delete().where( OfflineCacheModel.track in ids).execute() if len(self.queue) > 0: self._start_processing()
def _process_queue(self): log.info("Startet processing queue") self.filecopy_cancel = Gio.Cancellable() self._fill_queue_from_db() self.total_batch_count = len(self.queue) self.current_batch_count = 0 if len(self.queue) > 0: self.current_book_processing = self.queue[0].track.book.id while len(self.queue) > 0: log.info("Processing item") self.current_batch_count += 1 item = self.queue[0] if self.thread.stopped(): break new_item = OfflineCacheModel.get(OfflineCacheModel.id == item.id) if self.current_book_processing != new_item.track.book.id: self.update_book_download_status( Book.get(Book.id == self.current_book_processing)) self.current_book_processing = new_item.track.book.id if not new_item.copied and os.path.exists(new_item.track.file): log.info("Copying item") Gdk.threads_add_idle(GLib.PRIORITY_DEFAULT_IDLE, self.ui.switch_to_working, _("Copying") + " " + tools.shorten_string(new_item.track.book.name, 30), False, False) self.current = new_item destination = Gio.File.new_for_path(os.path.join(self.cache_dir, new_item.file)) source = Gio.File.new_for_path(new_item.track.file) flags = Gio.FileCopyFlags.OVERWRITE try: copied = source.copy(destination, flags, self.filecopy_cancel, self.__update_copy_status, None) except Exception as e: if e.code == Gio.IOErrorEnum.CANCELLED: log.info("Download of book was cancelled.") self.thread.stop() break reporter.exception("offline_cache", e) log.error("Could not copy file to offline cache: " + new_item.track.file) log.error(e) self.queue.remove(item) continue if copied: OfflineCacheModel.update(copied=True).where( OfflineCacheModel.id == new_item.id).execute() self.queue.remove(item) if self.current_book_processing: self.update_book_download_status( Book.get(Book.id == self.current_book_processing)) self.current = None Gdk.threads_add_idle(GLib.PRIORITY_DEFAULT_IDLE, self.ui.switch_to_playing)
def update_cache(self, paths): """ Update the cached version of the given files. """ if OfflineCacheModel.select().count() > 0: OfflineCacheModel.update(copied=False).where( OfflineCacheModel.track.file in paths).execute() self._fill_queue_from_db()
def _fill_queue_from_db(self): with get_db(): for item in OfflineCacheModel.select().where( OfflineCacheModel.copied == False): if not any(item.id == queued.id for queued in self.queue): self.queue.append(item) self.total_batch_count += 1
def remove_all_for_storage(self, storage_path): """ """ for element in OfflineCacheModel.select().join(Track).where(storage_path in Track.file): file_path = os.path.join(self.cache_dir, element.file) if file_path == self.cache_dir: continue file = Gio.File.new_for_path(file_path) if file.query_exists(): file.delete() if element.track.book.offline == True: element.track.book.update(offline=False, downloaded=False).execute() OfflineCacheModel.delete().where(storage_path in OfflineCacheModel.track.file).execute()
def get_cached_path(self, chapter: Chapter): query = OfflineCacheModel.select().where( OfflineCacheModel.original_file == chapter.file_id, OfflineCacheModel.copied == True) if query.count() > 0: return os.path.join(self.cache_dir, query.get().cached_file) else: return None
def get_cached_path(self, track): """ """ query = OfflineCacheModel.select().where(OfflineCacheModel.track == track.id, OfflineCacheModel.copied == True) if query.count() > 0: return os.path.join(self.cache_dir, query.get().file) else: return None
def _is_book_downloaded(self, book: Book): file_ids = [chapter.file_id for chapter in book.chapters] offline_files = OfflineCacheModel.select().where( OfflineCacheModel.original_file << file_ids) offline_file_ids = [file.original_file.id for file in offline_files] for chapter in book.chapters: if chapter.file_id not in offline_file_ids: return False return True
def add(self, book): """ Add all tracks of a book to the offline cache and start copying. """ tracks = [] for track in get_tracks(book): file = str(uuid.uuid4()) tracks.append((track, file)) chunks = [tracks[x:x + 500] for x in range(0, len(tracks), 500)] for chunk in chunks: query = OfflineCacheModel.insert_many(chunk, fields=[OfflineCacheModel.track, OfflineCacheModel.file]) self.total_batch_count += len(chunk) query.execute() self._start_processing()
def update_book_download_status(self, book): """ Updates the downloaded status of a book. """ downloaded = True tracks = get_tracks(book) offline_tracks = OfflineCacheModel.select().where(OfflineCacheModel.track in tracks) if offline_tracks.count() < 1: downloaded = False else: for track in offline_tracks: if not track.copied: downloaded = False Book.update(downloaded=downloaded).where(Book.id == book.id).execute() if downloaded: self.emit_event("book-offline", book) else: self.emit_event("book-offline-removed", book)
def add(self, book: Book): """ Add all tracks of a book to the offline cache and start copying. """ files_to_cache = [] file_ids = {chapter.file_id for chapter in book.chapters} for file_id in file_ids: cached_file_name = str(uuid.uuid4()) files_to_cache.append((file_id, cached_file_name)) chunks = [ files_to_cache[x:x + 500] for x in range(0, len(files_to_cache), 500) ] for chunk in chunks: query = OfflineCacheModel.insert_many( chunk, fields=[ OfflineCacheModel.original_file, OfflineCacheModel.cached_file ]) self.total_batch_count += len(chunk) query.execute() self._start_processing()
def _process_queue(self): log.info("Started processing queue") self.filecopy_cancel = Gio.Cancellable() self._fill_queue_from_db() self.total_batch_count = len(self.queue) self.current_batch_count = 0 if len(self.queue) > 0: self.current_book_processing = self._get_book_to_file( self.queue[0].original_file).id self.emit_event_main_thread("start") while len(self.queue) > 0: log.info("Processing item") self.current_batch_count += 1 item = self.queue[0] if self.thread.stopped(): break query = OfflineCacheModel.select().where( OfflineCacheModel.id == item.id) if not query.exists(): continue new_item = OfflineCacheModel.get(OfflineCacheModel.id == item.id) book = self._get_book_to_file(new_item.original_file) if self.current_book_processing != book.id: self._update_book_download_status(self.current_book_processing) self.current_book_processing = book.id if not new_item.copied and os.path.exists( new_item.original_file.path): log.info("Copying item") self.emit_event_main_thread( "message", _("Copying") + " " + tools.shorten_string(book.name, 30)) self.current = new_item destination = Gio.File.new_for_path( os.path.join(self.cache_dir, new_item.cached_file)) source = Gio.File.new_for_path(new_item.original_file.path) flags = Gio.FileCopyFlags.OVERWRITE try: copied = source.copy(destination, flags, self.filecopy_cancel, self.__update_copy_status, None) except Exception as e: if e.code == Gio.IOErrorEnum.CANCELLED: log.info("Download of book was cancelled.") self.thread.stop() break reporter.exception("offline_cache", e) log.error("Could not copy file to offline cache: " + new_item.original_file.path) log.error(e) self.queue.remove(item) continue if copied: OfflineCacheModel.update(copied=True).where( OfflineCacheModel.id == new_item.id).execute() self.queue.remove(item) if self.current_book_processing: self._update_book_download_status(self.current_book_processing) self.current = None self.emit_event_main_thread("finished")