def on_method_call(self, connection, sender, object_path, interface_name, method_name, parameters, invocation): args = list(parameters.unpack()) for i, sig in enumerate(self.method_inargs[method_name]): if sig == "h": msg = invocation.get_message() fd_list = msg.get_unix_fd_list() args[i] = fd_list.get(args[i]) try: result = getattr(self, method_name)(*args) # out_args is atleast (signature1). # We therefore always wrap the result as a tuple. # Refer to https://bugzilla.gnome.org/show_bug.cgi?id=765603 result = (result,) out_args = self.method_outargs[method_name] if out_args and out_args != "()" and result: variant = GLib.Variant(out_args, result) invocation.return_value(variant) else: invocation.return_value(None) except Exception as e: log.error(e) reporter.exception("mpris", e) reporter.error("mrpis", "MPRIS method call failed with method name: {}".format(method_name)) invocation.return_value(None)
def copy_to_audiobook_folder(path): """ Copies the given path (folder or file) to the audio book folder. """ try: name = os.path.basename(os.path.normpath(path)) shutil.copytree( path, Storage.select().where(Storage.default == True).get().path + "/" + name) except OSError as exc: reporter.exception("importer", exc) if exc.errno == errno.ENOTDIR: try: shutil.copy( path, Storage.select().where(Storage.default == True).get().path) except OSError as e: if e.errno == 95: log.error("Could not import file " + path) log.error(exc) else: log.error(e) elif exc.errno == errno.ENOTSUP: log.error("Could not import file " + path) log.error(exc) else: log.error("Could not import file " + path) log.error(exc)
def _connect_db(db): try: db.connect(reuse_if_open=True) except Exception as e: reporter.exception("db", e) log.error("Could not connect to database. ") log.error(e)
def _notify(self, prop: str): try: for callback in self._observers[prop]: callback() except Exception as e: log.error(e) reporter.exception("observable", e)
def _process_queue(self): log.info("Startet processing queue") self.filecopy_cancel = Gio.Cancellable() self._fill_queue_from_db() self.total_batch_count = len(self.queue) self.current_batch_count = 0 if len(self.queue) > 0: self.current_book_processing = self.queue[0].track.book.id while len(self.queue) > 0: log.info("Processing item") self.current_batch_count += 1 item = self.queue[0] if self.thread.stopped(): break new_item = OfflineCacheModel.get(OfflineCacheModel.id == item.id) if self.current_book_processing != new_item.track.book.id: self.update_book_download_status( Book.get(Book.id == self.current_book_processing)) self.current_book_processing = new_item.track.book.id if not new_item.copied and os.path.exists(new_item.track.file): log.info("Copying item") Gdk.threads_add_idle(GLib.PRIORITY_DEFAULT_IDLE, self.ui.switch_to_working, _("Copying") + " " + tools.shorten_string(new_item.track.book.name, 30), False, False) self.current = new_item destination = Gio.File.new_for_path(os.path.join(self.cache_dir, new_item.file)) source = Gio.File.new_for_path(new_item.track.file) flags = Gio.FileCopyFlags.OVERWRITE try: copied = source.copy(destination, flags, self.filecopy_cancel, self.__update_copy_status, None) except Exception as e: if e.code == Gio.IOErrorEnum.CANCELLED: log.info("Download of book was cancelled.") self.thread.stop() break reporter.exception("offline_cache", e) log.error("Could not copy file to offline cache: " + new_item.track.file) log.error(e) self.queue.remove(item) continue if copied: OfflineCacheModel.update(copied=True).where( OfflineCacheModel.id == new_item.id).execute() self.queue.remove(item) if self.current_book_processing: self.update_book_download_status( Book.get(Book.id == self.current_book_processing)) self.current = None Gdk.threads_add_idle(GLib.PRIORITY_DEFAULT_IDLE, self.ui.switch_to_playing)
def handle_exception(self, exc_type, exc_value, exc_traceback): print("handle exception") try: reporter.exception("uncaught", exc_value, "\n".join(format_exception(exc_type, exc_value, exc_traceback))) except: pass self.old_except_hook(exc_type, exc_value, exc_traceback)
def refresh_book_object(self): """ Refresh the internal book object from the database. """ try: self.book = Book.get(Book.id == self.book.id) except Exception as e: reporter.exception("book_element", e)
def _count_files_to_scan(self) -> int: files_to_scan = self._get_files_to_scan() try: return max(1, len(list(files_to_scan))) except StopIteration as e: reporter.exception("importer", e, "_count_files_to_scan raised a stop iteration.") return 1
def _copy_file(self, source_path: str, dest_path: str): log.info("Copy file {} to {}".format(source_path, dest_path)) source = Gio.File.new_for_path(source_path) destination = Gio.File.new_for_path(dest_path) flags = Gio.FileCopyFlags.OVERWRITE self.filecopy_cancel = Gio.Cancellable() try: copied = source.copy(destination, flags, self.filecopy_cancel, self._update_copy_status, None) except Exception as e: if e.code == Gio.IOErrorEnum.CANCELLED: pass reporter.exception("files", e) log.error("Failed to copy file: {}".format(e)) self._file_progess += 1
def import_file(self, path: str): if not os.path.isfile(path): return None try: media_detector = MediaDetector(path) media_data = media_detector.get_media_data() except NotAnAudioFile as e: return None except AudioFileCouldNotBeDiscovered as e: return unquote(urlparse(str(e)).path) except Exception as e: reporter.exception("media_detector", e) return None return media_data
def _filter_unchanged_files(self, files: List[str]) -> List[str]: """Filter all files that are already imported and that have not changed from a list of paths.""" imported_files = self._library.files for file in files: if file in imported_files: try: chapter = next(chapter for chapter in self._library.chapters if chapter.file == file) except StopIteration as e: reporter.exception("importer", e, "_filter_unchanged_files raised a stop iteration.") yield file continue if int(os.path.getmtime(file)) > chapter.modified: yield file continue yield file
def _copy_file(self, source_path: str, dest_path: str): log.info("Copy file {} to {}".format(source_path, dest_path)) source = Gio.File.new_for_path(source_path) destination = Gio.File.new_for_path(dest_path) flags = Gio.FileCopyFlags.OVERWRITE self.filecopy_cancel = Gio.Cancellable() try: copied = source.copy(destination, flags, self.filecopy_cancel, self._update_copy_status, None) except Exception as e: if e.code == Gio.IOErrorEnum.CANCELLED: pass elif e.code == Gio.IOErrorEnum.READ_ONLY: self._info_bar.show(_("Cannot copy: Audiobook directory is read only")) elif e.code == Gio.IOErrorEnum.NO_SPACE: self._info_bar.show(_("Cannot copy: Disk is full")) elif e.code == Gio.IOErrorEnum.PERMISSION_DENIED: self._info_bar.show(_("Cannot copy: Permission denied")) else: reporter.exception("files", e) log.error("Failed to copy file: {}".format(e)) self._file_progess += 1
def update_database(ui, force=False): """ Scans the audio book directory for changes and new files. Also removes entries from the db that are no longer existent. """ paths = [] for location in Storage.select(): if os.path.exists(location.path): paths.append(location.path) # clean artwork cache artwork_cache.delete_artwork_cache() # are UI buttons currently blocked? player_blocked, importer_blocked = ui.get_ui_buttons_blocked() i = 0 percent_counter = 0 file_count = 0 for path in paths: file_count += sum([len(files) for r, d, files in os.walk(path)]) percent_threshold = file_count / 1000 failed = "" tracks_to_import = [] # Tracks which changed and need to be updated if they are cached tracks_cache_update = [] start = time.time() for path in paths: for directory, subdirectories, files in os.walk(path): for file in files: if file.lower().endswith(('.mp3', '.ogg', '.flac', '.m4a', '.m4b', '.wav', '.opus')): path = os.path.join(directory, file) imported = True try: if force: imported, ignore = import_file( file, directory, path, True) tracks_cache_update.append(path) # Is the track already in the database? elif Track.select().where( Track.file == path).count() < 1: imported, track_data = import_file( file, directory, path) if track_data: tracks_to_import.append(track_data) # Has the modified date changed? elif (Track.select().where( Track.file == path).first().modified < os.path.getmtime(path)): imported, ignore = import_file(file, directory, path, update=True) tracks_cache_update.append(path) if not imported: failed += path + "\n" except UnicodeEncodeError as e: log.warning( "Could not import file because of invalid path or filename: " + path) reporter.exception("importer", e) failed += path + "\n" except Exception as e: log.warning("Could not import file: " + path) log.warning(traceback.format_exc()) reporter.exception("importer", e) failed += path + "\n" i = i + 1 if len(tracks_to_import) > 100: write_tracks_to_db(tracks_to_import) tracks_to_import = [] # don't flood gui updates if percent_counter < percent_threshold: percent_counter = percent_counter + 1 else: percent_counter = 1 Gdk.threads_add_idle( GLib.PRIORITY_DEFAULT_IDLE, ui.titlebar.progress_bar.set_fraction, i / file_count) Gdk.threads_add_idle( GLib.PRIORITY_DEFAULT_IDLE, ui.titlebar.update_progress_bar.set_fraction, i / file_count) write_tracks_to_db(tracks_to_import) end = time.time() log.info("Total import time: " + str(end - start)) # remove entries from the db that are no longer existent remove_invalid_entries() artwork_cache.generate_artwork_cache() Gdk.threads_add_idle(GLib.PRIORITY_DEFAULT_IDLE, importer.emit_event, "import-finished") Gdk.threads_add_idle(GLib.PRIORITY_DEFAULT_IDLE, ui.switch_to_playing) Gdk.threads_add_idle(GLib.PRIORITY_DEFAULT_IDLE, ui.check_for_tracks) if len(failed) > 0: Gdk.threads_add_idle(GLib.PRIORITY_DEFAULT_IDLE, ui.display_failed_imports, failed) OfflineCache().update_cache(tracks_cache_update) OfflineCache()._process_queue()
def _process_queue(self): log.info("Started processing queue") self.filecopy_cancel = Gio.Cancellable() self._fill_queue_from_db() self.total_batch_count = len(self.queue) self.current_batch_count = 0 if len(self.queue) > 0: self.current_book_processing = self._get_book_to_file( self.queue[0].original_file).id self.emit_event_main_thread("start") while len(self.queue) > 0: log.info("Processing item") self.current_batch_count += 1 item = self.queue[0] if self.thread.stopped(): break query = OfflineCacheModel.select().where( OfflineCacheModel.id == item.id) if not query.exists(): continue new_item = OfflineCacheModel.get(OfflineCacheModel.id == item.id) book = self._get_book_to_file(new_item.original_file) if self.current_book_processing != book.id: self._update_book_download_status(self.current_book_processing) self.current_book_processing = book.id if not new_item.copied and os.path.exists( new_item.original_file.path): log.info("Copying item") self.emit_event_main_thread( "message", _("Copying") + " " + tools.shorten_string(book.name, 30)) self.current = new_item destination = Gio.File.new_for_path( os.path.join(self.cache_dir, new_item.cached_file)) source = Gio.File.new_for_path(new_item.original_file.path) flags = Gio.FileCopyFlags.OVERWRITE try: copied = source.copy(destination, flags, self.filecopy_cancel, self.__update_copy_status, None) except Exception as e: if e.code == Gio.IOErrorEnum.CANCELLED: log.info("Download of book was cancelled.") self.thread.stop() break reporter.exception("offline_cache", e) log.error("Could not copy file to offline cache: " + new_item.original_file.path) log.error(e) self.queue.remove(item) continue if copied: OfflineCacheModel.update(copied=True).where( OfflineCacheModel.id == new_item.id).execute() self.queue.remove(item) if self.current_book_processing: self._update_book_download_status(self.current_book_processing) self.current = None self.emit_event_main_thread("finished")
def Raise(self): try: self.__app.ui.window.present_with_time(Gtk.get_current_event_time()) except Exception as e: reporter.exception("mpris", e)
def update_db(): db = get_sqlite_database() # First test for version 1 try: next(c for c in db.get_columns("settings") if c.name == "version") except Exception as e: if len(db.get_tables()) == 0: data_dir = get_data_dir() if os.path.exists(os.path.join(data_dir, "cozy.db")): os.remove(os.path.join(data_dir, "cozy.db")) os.remove(os.path.join(data_dir, "cozy.db-shm")) os.remove(os.path.join(data_dir, "cozy.db-wal")) __update_db_1(db) version = Settings.get().version # then for version 2 and so on if version < 2: __update_db_2(db) if version < 3: __update_db_3(db) if version < 4: __update_db_4(db) if version < 5: __update_db_5(db) if version < 6: __update_db_6(db) if version < 7: __update_db_7(db) if version < 8: __update_db_8(db) if version < 9: backup_dir_name = _backup_db(db) try: _update_db_9(db) except Exception as e: log.error(e) reporter.exception("db_updator", e) db.stop() _restore_db(backup_dir_name) from cozy.ui.db_migration_failed_view import DBMigrationFailedView dialog = DBMigrationFailedView() dialog.show() exit(1) if version < 10: backup_dir_name = _backup_db(db) try: _update_db_10(db) except Exception as e: log.error(e) reporter.exception("db_updator", e) db.stop() _restore_db(backup_dir_name) from cozy.ui.db_migration_failed_view import DBMigrationFailedView dialog = DBMigrationFailedView() dialog.show() exit(1)