def __init__(self, files): QMainWindow.__init__(self) sys.excepthook = self.excepthook self.profile = create_profile(files) self.view = View(self.profile, self) self.profile.setParent(self.view) self.view.set_title.connect(self.set_title) self.view.refresh_all.connect(self.refresh_all) self.setCentralWidget(self.view) self.files = files self.directories = {os.path.dirname(f['path']) for f in files.values()} self.file_watcher = QFileSystemWatcher( [f['path'] for f in files.values()] + list(self.directories), self) self.file_watcher.fileChanged.connect(self.file_changed, type=Qt.QueuedConnection) self.file_watcher.directoryChanged.connect(self.directory_changed, type=Qt.QueuedConnection) self.changed_files = set() self.changed_dirs = set() self.debounce_files, self.debounce_dirs = QTimer(), QTimer() for t in self.debounce_files, self.debounce_dirs: t.setInterval(1000), t.setSingleShot(True) self.debounce_files.timeout.connect(self.do_file_changed) self.debounce_dirs.timeout.connect(self.do_dir_changed) self.set_title(None)
def __init__(self, path, parent): QObject.__init__(self, parent) if path and os.path.isdir(path) and os.access(path, os.R_OK|os.W_OK): self.watcher = QFileSystemWatcher(self) self.worker = Worker(path, self.metadata_read.emit) self.watcher.directoryChanged.connect(self.dir_changed, type=Qt.QueuedConnection) self.metadata_read.connect(self.add_to_db, type=Qt.QueuedConnection) QTimer.singleShot(2000, self.initialize) self.auto_convert.connect(self.do_auto_convert, type=Qt.QueuedConnection) elif path: prints(path, 'is not a valid directory to watch for new ebooks, ignoring')
def __init__(self, path, parent): QObject.__init__(self, parent) if path and os.path.isdir(path) and os.access(path, os.R_OK | os.W_OK): self.watcher = QFileSystemWatcher(self) self.worker = Worker(path, self.metadata_read.emit) self.watcher.directoryChanged.connect(self.dir_changed, type=Qt.QueuedConnection) self.metadata_read.connect(self.add_to_db, type=Qt.QueuedConnection) QTimer.singleShot(2000, self.initialize) self.auto_convert.connect(self.do_auto_convert, type=Qt.QueuedConnection) elif path: prints(path, "is not a valid directory to watch for new ebooks, ignoring")
def __init__(self, parent=None): super().__init__(parent) self._sessionsMetaDataList = [] # QList<SessionMetaData> self._firstBackupSession = DataPaths.currentProfilePath() + '/session.dat.old' self._secondBackupSession = DataPaths.currentProfilePath() + '/session.dat.old1' self._lastActiveSessionPath = '' sessionFileWatcher = QFileSystemWatcher([DataPaths.path(DataPaths.Sessions)], self) sessionFileWatcher.directoryChanged.connect(self._sessionsDirectoryChanged) sessionFileWatcher.directoryChanged.connect(self.sessionsMetaDataChanged) self.loadSettings()
class AutoAdder(QObject): metadata_read = pyqtSignal(object) auto_convert = pyqtSignal(object) def __init__(self, path, parent): QObject.__init__(self, parent) if path and os.path.isdir(path) and os.access(path, os.R_OK|os.W_OK): self.watcher = QFileSystemWatcher(self) self.worker = Worker(path, self.metadata_read.emit) self.watcher.directoryChanged.connect(self.dir_changed, type=Qt.QueuedConnection) self.metadata_read.connect(self.add_to_db, type=Qt.QueuedConnection) QTimer.singleShot(2000, self.initialize) self.auto_convert.connect(self.do_auto_convert, type=Qt.QueuedConnection) elif path: prints(path, 'is not a valid directory to watch for new ebooks, ignoring') def read_rules(self): if hasattr(self, 'worker'): self.worker.read_rules() def initialize(self): try: if os.listdir(self.worker.path): self.dir_changed() except: pass self.watcher.addPath(self.worker.path) def dir_changed(self, *args): if os.path.isdir(self.worker.path) and os.access(self.worker.path, os.R_OK|os.W_OK): if not self.worker.is_alive(): self.worker.start() self.worker.wake_up.set() def stop(self): if hasattr(self, 'worker'): self.worker.keep_running = False self.worker.wake_up.set() def wait(self): if hasattr(self, 'worker'): self.worker.join() def add_to_db(self, data): from calibre.ebooks.metadata.opf2 import OPF gui = self.parent() if gui is None: return m = gui.library_view.model() count = 0 needs_rescan = False duplicates = [] added_ids = set() for fname, tdir in data.iteritems(): paths = [os.path.join(self.worker.path, fname)] sz = os.path.join(tdir, 'size.txt') try: with open(sz, 'rb') as f: sz = int(f.read()) if sz != os.stat(paths[0]).st_size: raise Exception('Looks like the file was written to after' ' we tried to read metadata') except: needs_rescan = True try: self.worker.staging.remove(fname) except KeyError: pass continue mi = os.path.join(tdir, 'metadata.opf') if not os.access(mi, os.R_OK): continue mi = OPF(open(mi, 'rb'), tdir, populate_spine=False).to_book_metadata() if gprefs.get('tag_map_on_add_rules'): from calibre.ebooks.metadata.tag_mapper import map_tags mi.tags = map_tags(mi.tags, gprefs['tag_map_on_add_rules']) mi = [mi] dups, ids = m.add_books(paths, [os.path.splitext(fname)[1][1:].upper()], mi, add_duplicates=not gprefs['auto_add_check_for_duplicates'], return_ids=True) added_ids |= set(ids) num = len(ids) if dups: path = dups[0][0] with open(os.path.join(tdir, 'dup_cache.'+dups[1][0].lower()), 'wb') as dest, open(path, 'rb') as src: shutil.copyfileobj(src, dest) dups[0][0] = dest.name duplicates.append(dups) try: os.remove(paths[0]) self.worker.staging.remove(fname) except: import traceback traceback.print_exc() count += num if duplicates: paths, formats, metadata = [], [], [] for p, f, mis in duplicates: paths.extend(p) formats.extend(f) metadata.extend(mis) dups = [(mic, mic.cover, [p]) for mic, p in zip(metadata, paths)] d = DuplicatesQuestion(m.db, dups, parent=gui) dups = tuple(d.duplicates) if dups: paths, formats, metadata = [], [], [] for mi, cover, book_paths in dups: paths.extend(book_paths) formats.extend([p.rpartition('.')[-1] for p in book_paths]) metadata.extend([mi for i in book_paths]) ids = m.add_books(paths, formats, metadata, add_duplicates=True, return_ids=True)[1] added_ids |= set(ids) num = len(ids) count += num for tdir in data.itervalues(): try: shutil.rmtree(tdir) except: pass if added_ids and gprefs['auto_add_auto_convert']: self.auto_convert.emit(added_ids) if count > 0: m.books_added(count) gui.status_bar.show_message(_( 'Added %(num)d book(s) automatically from %(src)s') % dict(num=count, src=self.worker.path), 2000) gui.refresh_cover_browser() if needs_rescan: QTimer.singleShot(2000, self.dir_changed) def do_auto_convert(self, added_ids): gui = self.parent() gui.iactions['Convert Books'].auto_convert_auto_add(added_ids)
class AutoAdder(QObject): metadata_read = pyqtSignal(object) auto_convert = pyqtSignal(object) def __init__(self, path, parent): QObject.__init__(self, parent) if path and os.path.isdir(path) and os.access(path, os.R_OK|os.W_OK): self.watcher = QFileSystemWatcher(self) self.worker = Worker(path, self.metadata_read.emit) self.watcher.directoryChanged.connect(self.dir_changed, type=Qt.QueuedConnection) self.metadata_read.connect(self.add_to_db, type=Qt.QueuedConnection) QTimer.singleShot(2000, self.initialize) self.auto_convert.connect(self.do_auto_convert, type=Qt.QueuedConnection) elif path: prints(path, 'is not a valid directory to watch for new ebooks, ignoring') def initialize(self): try: if os.listdir(self.worker.path): self.dir_changed() except: pass self.watcher.addPath(self.worker.path) def dir_changed(self, *args): if os.path.isdir(self.worker.path) and os.access(self.worker.path, os.R_OK|os.W_OK): if not self.worker.is_alive(): self.worker.start() self.worker.wake_up.set() def stop(self): if hasattr(self, 'worker'): self.worker.keep_running = False self.worker.wake_up.set() def wait(self): if hasattr(self, 'worker'): self.worker.join() def add_to_db(self, data): from calibre.ebooks.metadata.opf2 import OPF gui = self.parent() if gui is None: return m = gui.library_view.model() count = 0 needs_rescan = False duplicates = [] added_ids = set() for fname, tdir in data.iteritems(): paths = [os.path.join(self.worker.path, fname)] sz = os.path.join(tdir, 'size.txt') try: with open(sz, 'rb') as f: sz = int(f.read()) if sz != os.stat(paths[0]).st_size: raise Exception('Looks like the file was written to after' ' we tried to read metadata') except: needs_rescan = True try: self.worker.staging.remove(fname) except KeyError: pass continue mi = os.path.join(tdir, 'metadata.opf') if not os.access(mi, os.R_OK): continue mi = [OPF(open(mi, 'rb'), tdir, populate_spine=False).to_book_metadata()] dups, ids = m.add_books(paths, [os.path.splitext(fname)[1][1:].upper()], mi, add_duplicates=not gprefs['auto_add_check_for_duplicates'], return_ids=True) added_ids |= set(ids) num = len(ids) if dups: path = dups[0][0] with open(os.path.join(tdir, 'dup_cache.'+dups[1][0].lower()), 'wb') as dest, open(path, 'rb') as src: shutil.copyfileobj(src, dest) dups[0][0] = dest.name duplicates.append(dups) try: os.remove(paths[0]) self.worker.staging.remove(fname) except: import traceback traceback.print_exc() count += num if duplicates: paths, formats, metadata = [], [], [] for p, f, mis in duplicates: paths.extend(p) formats.extend(f) metadata.extend(mis) dups = [(mic, mic.cover, [p]) for mic, p in zip(metadata, paths)] d = DuplicatesQuestion(m.db, dups, parent=gui) dups = tuple(d.duplicates) if dups: paths, formats, metadata = [], [], [] for mi, cover, book_paths in dups: paths.extend(book_paths) formats.extend([p.rpartition('.')[-1] for p in book_paths]) metadata.extend([mi for i in book_paths]) ids = m.add_books(paths, formats, metadata, add_duplicates=True, return_ids=True)[1] added_ids |= set(ids) num = len(ids) count += num for tdir in data.itervalues(): try: shutil.rmtree(tdir) except: pass if added_ids and gprefs['auto_add_auto_convert']: self.auto_convert.emit(added_ids) if count > 0: m.books_added(count) gui.status_bar.show_message(_( 'Added %(num)d book(s) automatically from %(src)s') % dict(num=count, src=self.worker.path), 2000) if hasattr(gui, 'db_images'): gui.db_images.beginResetModel(), gui.db_images.endResetModel() if needs_rescan: QTimer.singleShot(2000, self.dir_changed) def do_auto_convert(self, added_ids): gui = self.parent() gui.iactions['Convert Books'].auto_convert_auto_add(added_ids)
class MainWindow(QMainWindow): def __init__(self, files): QMainWindow.__init__(self) sys.excepthook = self.excepthook self.profile = create_profile(files) self.view = View(self.profile, self) self.profile.setParent(self.view) self.view.set_title.connect(self.set_title) self.view.refresh_all.connect(self.refresh_all) self.setCentralWidget(self.view) self.files = files self.directories = {os.path.dirname(f['path']) for f in files.values()} self.file_watcher = QFileSystemWatcher( [f['path'] for f in files.values()] + list(self.directories), self) self.file_watcher.fileChanged.connect(self.file_changed, type=Qt.QueuedConnection) self.file_watcher.directoryChanged.connect(self.directory_changed, type=Qt.QueuedConnection) self.changed_files = set() self.changed_dirs = set() self.debounce_files, self.debounce_dirs = QTimer(), QTimer() for t in self.debounce_files, self.debounce_dirs: t.setInterval(1000), t.setSingleShot(True) self.debounce_files.timeout.connect(self.do_file_changed) self.debounce_dirs.timeout.connect(self.do_dir_changed) self.set_title(None) def excepthook(self, exctype, value, traceback): if exctype == KeyboardInterrupt: return sys.__excepthook__(exctype, value, traceback) try: msg = str(value) except Exception: msg = repr(value) QMessageBox.critical(self, _('Unhandled error'), msg) def set_title(self, val): title = val or (_('{} images').format(len(self.files))) title += ' :: ' + appname self.setWindowTitle(title) def file_changed(self, path): if not os.access(path, os.R_OK): self.files.pop(path_to_url(path), None) self.changed_files.add(path) self.debounce_files.start() def directory_changed(self, path): self.changed_dirs.add(path) self.debounce_dirs.start() def do_file_changed(self): files, self.changed_files = self.changed_files, set() for path in files: url = path_to_url(path) if url in self.files: try: self.view.image_changed(url, file_metadata(path)) except EnvironmentError: del self.files[url] def do_dir_changed(self): dirs, self.changed_dirs = self.changed_dirs, set() all_files = {f['path'] for f in self.files.values()} added_files = set() for path in dirs: for f in files_from_dir(path): if f not in all_files: added_files.add(f) for f in added_files: try: self.files[path_to_url(f)] = file_metadata(f) except EnvironmentError: continue else: d = os.path.dirname(f) self.directories.add(d) self.file_watcher.addPaths([d, f]) self.view.refresh_files(self.files) def refresh_all(self): roots = set() for d in sorted(self.directories): for r in roots: if d.startswith(r): break else: roots.add(d) files = {} for cf in original_files: try: files[path_to_url(cf)] = file_metadata(cf) except EnvironmentError: continue for f in roots: for cf in files_from_dir(f): try: files[path_to_url(cf)] = file_metadata(cf) except EnvironmentError: continue self.files = files self.directories = {os.path.dirname(f['path']) for f in files.values()} self.view.refresh_files(self.files)