def load_style(self, path=None): if not path: return self.reset_style() # load userstyle from path provider = None try: csspath = tools.relpath2root(path) if not csspath: raise Exception('userstyle out of mount point is not allowed.') provider = Gtk.CssProvider.new() provider.load_from_path(csspath) except Exception as e: provider = None text = _('Failed to load userstyle: "{}", {}').format(path, e) log.warning(text) self.osd.show(text) finally: # always reset before setting userstyle self.reset_style() if not provider: # failed to load userstyle, stop return self.styleprovider = provider Gtk.StyleContext.add_provider_for_screen( self.get_screen(), self.styleprovider, Gtk.STYLE_PROVIDER_PRIORITY_USER)
def _library_version(self): """ Examines the library database structure to determine which version of MComix created it. @return C{version} from the table C{Info} if available, C{0} otherwise. C{-1} if the database has not been created yet.""" # Check if Comix' tables exist tables = ('book', 'collection', 'contain') for table in tables: if not self._table_exists(table): return -1 if self._table_exists('info'): cursor = self._con.cursor() version = cursor.execute('''select value from info where key = 'version' ''').fetchone() cursor.close() if not version: log.warning(_('Could not determine library database version!')) return -1 else: return int(version) else: # Comix database format return 0
def list_files(self, mode=FileProvider.IMAGES): """ Lists all files in the current directory. Returns a list of absolute paths, already sorted. """ if mode == FileProvider.IMAGES: should_accept = image_tools.is_image_file elif mode == FileProvider.ARCHIVES: should_accept = archive_tools.is_archive_file else: should_accept = lambda file: True try: files = [ os.path.join(self.base_dir, filename) for filename in # Explicitly convert all files to Unicode, even when # os.listdir returns a mixture of byte/unicode strings. # (MComix bug #3424405) [ i18n.to_unicode(fn) for fn in os.listdir(self.base_dir) ] if should_accept(os.path.join(self.base_dir, filename)) ] FileProvider.sort_files(files) return files except OSError: log.warning(u'! ' + _('Could not open %s: Permission denied.'), self.base_dir) return []
def _iter_contents(self, archive, root=None): self._archive_list.append(archive) self._archive_root[archive] = root supported_archive_regexp = archive_tools.get_supported_archive_regex() for f in archive.iter_contents(): if supported_archive_regexp.search(f): # Extract sub-archive. destination_dir = os.path.join(self._destination_dir, 'sub-archives') if root is not None: destination_dir = os.path.join(destination_dir, root) archive.extract(f, destination_dir) # And open it and list its contents. sub_archive_path = os.path.join(destination_dir, f) sub_archive = archive_tools.get_archive_handler( sub_archive_path) if sub_archive is None: log.warning('Non-supported archive format: %s' % os.path.basename(sub_archive_path)) continue sub_root = f if root is not None: sub_root = os.path.join(root, sub_root) for name in self._iter_contents(sub_archive, sub_root): yield name else: name = f if root is not None: name = os.path.join(root, name) self._entry_mapping[name] = (archive, f) yield name
def list_files(self, mode=FileProvider.IMAGES): ''' Lists all files in the current directory. Returns a list of absolute paths, already sorted. ''' if mode == FileProvider.IMAGES: should_accept = functools.partial(image_tools.is_image_file, check_mimetype=True) elif mode == FileProvider.ARCHIVES: should_accept = archive_tools.is_archive_file else: should_accept = lambda file: True files = [] fname_map = {} try: # listdir() return list of bytes only if path is bytes for fn in self.listdir(self.base_dir): filename = i18n.to_unicode(fn) fpath = os.path.join(self.base_dir, filename) if should_accept(fpath): files.append(fpath) fname_map[fpath] = os.path.join(self.base_dir, fn) except OSError: log.warning('! ' + _('Could not open %s: Permission denied.'), self.base_dir) return [] FileProvider.sort_files(files) return [fname_map[fpath] for fpath in files]
def register(self, name, bindings, callback, args=[], kwargs={}): """ Registers an action for a predefined keybinding name. @param name: Action name, defined in L{BINDING_INFO}. @param bindings: List of keybinding strings, as understood by L{gtk.accelerator_parse}. Only used if no bindings were loaded for this action. @param callback: Function callback @param args: List of arguments to pass to the callback @param kwargs: List of keyword arguments to pass to the callback. """ global BINDING_INFO assert name in BINDING_INFO, "'%s' isn't a valid keyboard action." % name # Load stored keybindings, or fall back to passed arguments keycodes = self._action_to_bindings[name] if keycodes == []: keycodes = [gtk.accelerator_parse(binding) for binding in bindings ] for keycode in keycodes: if keycode in self._binding_to_action.keys(): if self._binding_to_action[keycode] != name: log.warning(_('Keybinding for "%(action)s" overrides hotkey for another action.'), {"action": name}) log.warning('Binding %s overrides %r' % (keycode, self._binding_to_action[keycode])) else: self._binding_to_action[keycode] = name self._action_to_bindings[name].append(keycode) # Add gtk accelerator for labels in menu if len(self._action_to_bindings[name]) > 0: key, mod = self._action_to_bindings[name][0] gtk.accel_map_change_entry('<Actions>/mcomix-main/%s' % name, key, mod, True) self._action_to_callback[name] = (callback, args, kwargs)
def _iter_contents(self, archive, root=None): self._archive_list.append(archive) self._archive_root[archive] = root supported_archive_regexp = archive_tools.get_supported_archive_regex() for f in archive.iter_contents(): if supported_archive_regexp.search(f): # Extract sub-archive. destination_dir = os.path.join(self._destination_dir, 'sub-archives') if root is not None: destination_dir = os.path.join(destination_dir, root) archive.extract(f, destination_dir) # And open it and list its contents. sub_archive_path = os.path.join(destination_dir, f) sub_archive = archive_tools.get_archive_handler(sub_archive_path) if sub_archive is None: log.warning('Non-supported archive format: %s' % os.path.basename(sub_archive_path)) continue sub_root = f if root is not None: sub_root = os.path.join(root, sub_root) for name in self._iter_contents(sub_archive, sub_root): yield name else: name = f if root is not None: name = os.path.join(root, name) self._entry_mapping[name] = (archive, f) yield name
def setup(self, src, type=None): '''Setup the extractor with archive <src> and destination dir <dst>. Return a threading.Condition related to the is_ready() method, or None if the format of <src> isn't supported. ''' self._src = src self._files = [] self._extracted = set() self._archive = archive_tools.get_recursive_archive_handler( src, type=type, prefix='mcomix.extractor.') if self._archive is None: msg = _('Non-supported archive format: %s') % os.path.basename(src) log.warning(msg) raise ArchiveException(msg) self._dst = self._archive.destdir self._contents_listed = False self._extract_started = False self._condition = threading.Condition() self._threadpool.apply_async(self._list_contents, callback=self._list_contents_cb, error_callback=self._list_contents_errcb) self._setupped = True return self._condition
def register(self, name, callback, args=[], kwargs={}, bindings=[]): ''' Registers an action for a predefined keybinding name. @param name: Action name, defined in L{keybindings_map.BINDING_INFO}. @param bindings: List of keybinding strings, as understood by L{Gtk.accelerator_parse}. Only used if no bindings were loaded for this action. @param callback: Function callback @param args: List of arguments to pass to the callback @param kwargs: List of keyword arguments to pass to the callback. ''' assert name in keybindings_map.BINDING_INFO, '"%s" isn\'t a valid keyboard action.' % name # Load stored keybindings, or fall back to passed arguments keycodes = self._action_to_bindings[name] if keycodes == []: keycodes = [Gtk.accelerator_parse(binding) for binding in bindings] for keycode in keycodes: if keycode in self._binding_to_action.keys(): if self._binding_to_action[keycode] != name: log.warning(_('Keybinding for "%(action)s" overrides hotkey for another action.'), {'action': name}) log.warning('Binding %s overrides %r', keycode, self._binding_to_action[keycode]) else: self._binding_to_action[keycode] = name self._action_to_bindings[name].append(keycode) # Add gtk accelerator for labels in menu if len(self._action_to_bindings[name]) > 0: key, mod = self._action_to_bindings[name][0] Gtk.AccelMap.change_entry('<Actions>/mcomix-main/%s' % name, key, mod, True) self._action_to_callback[name] = (callback, args, kwargs)
def _save_thumbnail(self, pixbuf, thumbpath, tEXt_data): ''' Saves <pixbuf> as <thumbpath>, with additional metadata from <tEXt_data>. If <thumbpath> already exists, it is overwritten. ''' try: directory = os.path.dirname(thumbpath) if not os.path.isdir(directory): os.makedirs(directory, 0o700) if os.path.isfile(thumbpath): os.remove(thumbpath) option_keys = [] option_values = [] for key, value in tEXt_data.items(): option_keys.append(key) option_values.append(value) pixbuf.savev(thumbpath, 'png', option_keys, option_values) os.chmod(thumbpath, 0o600) except Exception as ex: log.warning( _('! Could not save thumbnail "%(thumbpath)s": %(error)s'), { 'thumbpath': thumbpath, 'error': ex })
def setup(self, src, dst, type=None): """Setup the extractor with archive <src> and destination dir <dst>. Return a threading.Condition related to the is_ready() method, or None if the format of <src> isn't supported. """ self._src = src self._dst = dst self._files = [] self._extracted = set() self._archive = archive_tools.get_recursive_archive_handler(src, dst, type=type) if self._archive is None: msg = _('Non-supported archive format: %s') % os.path.basename(src) log.warning(msg) raise ArchiveException(msg) self._contents_listed = False self._extract_started = False self._condition = threading.Condition() self._list_thread = WorkerThread(self._list_contents, name='list') self._list_thread.append_order(self._archive) self._setupped = True return self._condition
def _is_not_unrar_free(path): if 'win32' == sys.platform: return True if os.path.realpath(path).endswith(f'{os.path.sep}unrar-free'): log.warning(f'RAR executable {path} is unrar-free, ignoring') return False return True
def is_not_unrar_free(exe): real_exe = exe while os.path.islink(real_exe): real_exe = os.readlink(real_exe) if real_exe.endswith(os.path.sep + 'unrar-free'): log.warning( 'RAR executable %s is unrar-free, ignoring', exe) return False return True
def archive_mime_type(path): '''Return the archive type of <path> or None for non-archives.''' try: if os.path.isfile(path): if not os.access(path, os.R_OK): return None if zipfile.is_zipfile(path): if zip_py.is_py_supported_zipfile(path): return constants.ZIP else: return constants.ZIP_EXTERNAL with open(path, 'rb') as fd: magic = fd.read(10) try: istarfile = tarfile.is_tarfile(path) except IOError: # Tarfile raises an error when accessing certain network shares istarfile = False if istarfile and os.path.getsize(path) > 0: if magic.startswith(b'\x1f\x8b\x08'): return constants.GZIP elif magic.startswith(b'BZh') and magic[4:10] == b'1AY&SY': return constants.BZIP2 elif magic.startswith((b'\x5d\x00\x00\x80', b'\xfd7zXZ')): return constants.XZ else: return constants.TAR if magic.startswith(b'Rar!\x1a\x07'): if sevenzip_external.is_7z_support_rar(): return constants.RAR else: return constants.RAR5 if magic[0:6] == b'7z\xbc\xaf\x27\x1c': return constants.SEVENZIP if magic[2:].startswith((b'-lh', b'-lz')): return constants.LHA if magic[0:4] == b'%PDF': return constants.PDF if magic.startswith((b'sqsh', b'hsqs')): return constants.SQUASHFS except Exception: log.warning(_('! Could not read %s'), path) return None
def fileinfo_conv(fileinfo_pickle, fileinfo_json): try: with open(fileinfo_pickle, mode='rb') as f: loader = legacy_pickle_loader(f) fileinfo = loader.load() except Exception as e: log.warning('! Failed to upgrade {}, {}'.format( fileinfo_pickle, str(e))) else: with open(fileinfo_json, mode='wt', encoding='utf8') as f: json.dump(fileinfo, f, ensure_ascii=False, indent=2) os.rename(fileinfo_pickle, fileinfo_pickle + '.bak')
def archive_mime_type(path): """Return the archive type of <path> or None for non-archives.""" try: if os.path.isfile(path): if not os.access(path, os.R_OK): return None if zipfile.is_zipfile(path): if zip.is_py_supported_zipfile(path): return constants.ZIP else: return constants.ZIP_EXTERNAL fd = open(path, 'rb') magic = fd.read(5) fd.close() try: istarfile = tarfile.is_tarfile(path) except IOError: # Tarfile raises an error when accessing certain network shares istarfile = False if istarfile and os.path.getsize(path) > 0: if magic.startswith('BZh'): return constants.BZIP2 elif magic.startswith('\037\213'): return constants.GZIP else: return constants.TAR if magic[0:4] == 'Rar!': return constants.RAR if magic[0:4] == '7z\xBC\xAF': return constants.SEVENZIP # Headers for TAR-XZ and TAR-LZMA that aren't supported by tarfile if magic[0:5] == '\xFD7zXZ' or magic[0:5] == ']\x00\x00\x80\x00': return constants.XZ if magic[2:4] == '-l': return constants.LHA if magic[0:4] == '%PDF': return constants.PDF except Exception: log.warning(_('! Could not read %s'), path) return None
def archive_mime_type(path): """Return the archive type of <path> or None for non-archives.""" try: if os.path.isfile(path): if not os.access(path, os.R_OK): return None if zipfile.is_zipfile(path): if zip.is_py_supported_zipfile(path): return constants.ZIP else: return constants.ZIP_EXTERNAL fd = open(path, 'rb') magic = fd.read(5) fd.close() try: istarfile = tarfile.is_tarfile(path) except IOError: # Tarfile raises an error when accessing certain network shares istarfile = False if istarfile and os.path.getsize(path) > 0: if magic.startswith('BZh'): return constants.BZIP2 elif magic.startswith('\037\213'): return constants.GZIP else: return constants.TAR if magic[0:4] == 'Rar!': return constants.RAR if magic[0:4] == '7z\xBC\xAF': return constants.SEVENZIP # Headers for TAR-XZ and TAR-LZMA that aren't supported by tarfile if magic[0:5] == '\xFD7zXZ' or magic[0:5] == ']\x00\x00\x80\x00': return constants.SEVENZIP if magic[2:4] == '-l': return constants.LHA if magic[0:4] == '%PDF': return constants.PDF except Exception: log.warning(_('! Could not read %s'), path) return None
def _copy_windows(self, pixbuf, path): """ Copies pixbuf and path to the clipboard. Uses native Win32 API, as GTK+ doesn't seem to work. """ windll = ctypes.windll OpenClipboard = windll.user32.OpenClipboard EmptyClipboard = windll.user32.EmptyClipboard SetClipboardData = windll.user32.SetClipboardData CloseClipboard = windll.user32.CloseClipboard GlobalAlloc = windll.kernel32.GlobalAlloc GlobalLock = windll.kernel32.GlobalLock GlobalLock.restype = ctypes.c_void_p GlobalUnlock = windll.kernel32.GlobalUnlock def buffer_to_handle(buffer, buffer_size): """ Creates a memory handle for the passed data. This handle doesn't need to be freed by the application. """ global_mem = GlobalAlloc( 0x0042, # GMEM_MOVEABLE | GMEM_ZEROINIT buffer_size) lock = GlobalLock(global_mem) ctypes.memmove(lock, ctypes.addressof(buffer), buffer_size) GlobalUnlock(global_mem) return global_mem # Paste the text as Unicode string text_buffer = ctypes.create_unicode_buffer(path) text_handle = buffer_to_handle(text_buffer, ctypes.sizeof(text_buffer)) # Paste the image as Win32 DIB structure pil = image_tools.pixbuf_to_pil(pixbuf) output = cStringIO.StringIO() pil.convert("RGB").save(output, "BMP") dibdata = output.getvalue()[14:] output.close() image_buffer = ctypes.create_string_buffer(dibdata) image_handle = buffer_to_handle(image_buffer, ctypes.sizeof(image_buffer)) # Actually copy data to clipboard if OpenClipboard(self._window.window.handle): EmptyClipboard() SetClipboardData( 13, # CF_UNICODETEXT text_handle) SetClipboardData( 8, # CF_DIB image_handle) CloseClipboard() else: log.warning('Could not open clipboard.')
def get_book_thumbnail(self, path): """ Returns a pixbuf with a thumbnail of the cover of the book at <path>, or None, if no thumbnail could be generated. """ thumbnailer = thumbnail_tools.Thumbnailer(dst_dir=constants.LIBRARY_COVERS_PATH) thumbnailer.set_store_on_disk(True) # This is the maximum image size allowed by the library, so that thumbnails might be downscaled, # but never need to be upscaled (and look ugly) thumbnailer.set_size(constants.MAX_LIBRARY_COVER_SIZE, constants.MAX_LIBRARY_COVER_SIZE) thumb = thumbnailer.thumbnail(path) if thumb is None: log.warning( _('! Could not get cover for book "%s"'), path ) return thumb
def extract(self, filename, destination_dir): destination_path = os.path.join(destination_dir, filename) member = self._contents_info[filename] with self._lock: try: with self._tar.extractfile(member) as fp: data = fp.read() except AttributeError: log.warning(_('Corrupted file: %(filename)s'), {'filename': filename}) with self._create_file(destination_path) as new: new.write(data) return destination_path
def extract(self, filename, destination_dir): new = self._create_file(os.path.join(destination_dir, filename)) content = self.zip.read(self._original_filename(filename)) new.write(content) new.close() zipinfo = self.zip.getinfo(self._original_filename(filename)) if len(content) != zipinfo.file_size: log.warning(_('%(filename)s\'s extracted size is %(actual_size)d bytes,' ' but should be %(expected_size)d bytes.' ' The archive might be corrupt or in an unsupported format.'), { 'filename' : filename, 'actual_size' : len(content), 'expected_size' : zipinfo.file_size })
def load_icons(): _icons = (('gimp-flip-horizontal.png', 'mcomix-flip-horizontal'), ('gimp-flip-vertical.png', 'mcomix-flip-vertical'), ('gimp-rotate-180.png', 'mcomix-rotate-180'), ('gimp-rotate-270.png', 'mcomix-rotate-270'), ('gimp-rotate-90.png', 'mcomix-rotate-90'), ('gimp-thumbnails.png', 'mcomix-thumbnails'), ('gimp-transform.png', 'mcomix-transform'), ('tango-enhance-image.png', 'mcomix-enhance-image'), ('tango-add-bookmark.png', 'mcomix-add-bookmark'), ('tango-archive.png', 'mcomix-archive'), ('tango-image.png', 'mcomix-image'), ('library.png', 'mcomix-library'), ('comments.png', 'mcomix-comments'), ('zoom.png', 'mcomix-zoom'), ('lens.png', 'mcomix-lens'), ('double-page.png', 'mcomix-double-page'), ('manga.png', 'mcomix-manga'), ('fitbest.png', 'mcomix-fitbest'), ('fitwidth.png', 'mcomix-fitwidth'), ('fitheight.png', 'mcomix-fitheight'), ('fitmanual.png', 'mcomix-fitmanual'), ('goto-first-page.png', 'mcomix-goto-first-page'), ('goto-last-page.png', 'mcomix-goto-last-page'), ('next-page.png', 'mcomix-next-page'), ('previous-page.png', 'mcomix-previous-page'), ('next-archive.png', 'mcomix-next-archive'), ('previous-archive.png', 'mcomix-previous-archive'), ('next-directory.png', 'mcomix-next-directory'), ('previous-directory.png', 'mcomix-previous-directory')) # Load window title icons. pixbufs = mcomix_icons() gtk.window_set_default_icon_list(*pixbufs) # Load application icons. factory = gtk.IconFactory() for filename, stockid in _icons: try: icon_data = resource_string('mcomix.images', filename) pixbuf = image_tools.load_pixbuf_data(icon_data) iconset = gtk.IconSet(pixbuf) factory.add(stockid, iconset) except Exception: log.warning(_('! Could not load icon "%s"'), filename) factory.add_default()
def _copy_windows(self, pixbuf, path): """ Copies pixbuf and path to the clipboard. Uses native Win32 API, as GTK+ doesn't seem to work. """ windll = ctypes.windll OpenClipboard = windll.user32.OpenClipboard EmptyClipboard = windll.user32.EmptyClipboard SetClipboardData = windll.user32.SetClipboardData CloseClipboard = windll.user32.CloseClipboard GlobalAlloc = windll.kernel32.GlobalAlloc GlobalLock = windll.kernel32.GlobalLock GlobalLock.restype = ctypes.c_void_p GlobalUnlock = windll.kernel32.GlobalUnlock def buffer_to_handle(buffer, buffer_size): """ Creates a memory handle for the passed data. This handle doesn't need to be freed by the application. """ global_mem = GlobalAlloc( 0x0042, # GMEM_MOVEABLE | GMEM_ZEROINIT buffer_size) lock = GlobalLock(global_mem) ctypes.memmove(lock, ctypes.addressof(buffer), buffer_size) GlobalUnlock(global_mem) return global_mem # Paste the text as Unicode string text_buffer = ctypes.create_unicode_buffer(path) text_handle = buffer_to_handle(text_buffer, ctypes.sizeof(text_buffer)) # Paste the image as Win32 DIB structure pil = image_tools.pixbuf_to_pil(pixbuf) output = cStringIO.StringIO() pil.convert("RGB").save(output, "BMP") dibdata = output.getvalue()[14:] output.close() image_buffer = ctypes.create_string_buffer(dibdata) image_handle = buffer_to_handle(image_buffer, ctypes.sizeof(image_buffer)) # Actually copy data to clipboard if OpenClipboard(self._window.window.handle): EmptyClipboard() SetClipboardData(13, # CF_UNICODETEXT text_handle) SetClipboardData(8, # CF_DIB image_handle) CloseClipboard() else: log.warning('Could not open clipboard.')
def bookmarks_conv(bookmarks_pickle, bookmarks_json): try: with open(bookmarks_pickle, mode='rb') as f: loader = legacy_pickle_loader(f) version = loader.load() bookmarks = [(name, path, page, numpages, packtype, date.timestamp()) for name, path, page, numpages, packtype, date in loader.load()] except Exception as e: log.warning('! Failed to upgrade {}, {}'.format( bookmarks_pickle, str(e))) else: with open(bookmarks_json, mode='wt', encoding='utf8') as f: json.dump((version, bookmarks), f, ensure_ascii=False, indent=2) os.rename(bookmarks_pickle, bookmarks_pickle + '.bak')
def extract(self, filename, destination_dir): destination_path = os.path.join(destination_dir, filename) info = self._contents_info[filename] with self._lock: data = self._zip.read(info) with self._create_file(destination_path) as new: filelen = new.write(data) if filelen != info.file_size: log.warning( _('%(filename)s\'s extracted size is %(actual_size)d bytes,' ' but should be %(expected_size)d bytes.' ' The archive might be corrupt or in an unsupported format.'), {'filename': filename, 'actual_size': filelen, 'expected_size': info.file_size}) return destination_path
def get_book_thumbnail(self, path): """ Returns a pixbuf with a thumbnail of the cover of the book at <path>, or None, if no thumbnail could be generated. """ thumbnailer = thumbnail_tools.Thumbnailer( dst_dir=constants.LIBRARY_COVERS_PATH) thumbnailer.set_store_on_disk(True) # This is the maximum image size allowed by the library, so that thumbnails might be downscaled, # but never need to be upscaled (and look ugly) thumbnailer.set_size(constants.MAX_LIBRARY_COVER_SIZE, constants.MAX_LIBRARY_COVER_SIZE) thumb = thumbnailer.thumbnail(path) if thumb is None: log.warning(_('! Could not get cover for book "%s"'), path) return thumb
def _save_thumbnail(self, pixbuf, thumbpath, tEXt_data): """ Saves <pixbuf> as <thumbpath>, with additional metadata from <tEXt_data>. If <thumbpath> already exists, it is overwritten. """ try: directory = os.path.dirname(thumbpath) if not os.path.isdir(directory): os.makedirs(directory, 0700) if os.path.isfile(thumbpath): os.remove(thumbpath) pixbuf.save(thumbpath, 'png', tEXt_data) os.chmod(thumbpath, 0600) except Exception, ex: log.warning( _('! Could not save thumbnail "%(thumbpath)s": %(error)s'), { 'thumbpath' : thumbpath, 'error' : ex } )
def get_book_thumbnail(self, path): ''' Returns a pixbuf with a thumbnail of the cover of the book at <path>, or None, if no thumbnail could be generated. ''' # Use the maximum image size allowed by the library, so that thumbnails # might be downscaled, but never need to be upscaled (and look ugly). thumbnailer = thumbnail_tools.Thumbnailer( dst_dir=constants.LIBRARY_COVERS_PATH, store_on_disk=True, archive_support=True, size=(constants.MAX_LIBRARY_COVER_SIZE, constants.MAX_LIBRARY_COVER_SIZE)) thumb = thumbnailer.thumbnail(path) if thumb is None: log.warning(_('! Could not get cover for book "%s"'), path) return thumb
def archive_mime_type(path): """Return the archive type of <path> or None for non-archives.""" try: if os.path.isfile(path): if not os.access(path, os.R_OK): return None if zipfile.is_zipfile(path): return constants.ZIP fd = open(path, 'rb') magic = fd.read(4) fd.close() try: istarfile = tarfile.is_tarfile(path) except IOError: # Tarfile raises an error when accessing certain network shares istarfile = False if istarfile and os.path.getsize(path) > 0: if magic.startswith('BZh'): return constants.BZIP2 elif magic.startswith('\037\213'): return constants.GZIP else: return constants.TAR if magic == 'Rar!': return constants.RAR elif magic == '7z\xBC\xAF': return constants.SEVENZIP elif magic[2:] == '-l': return constants.LHA #if magic == '%PDF': # return constants.PDF except Exception: log.warning(_('! Could not read %s'), path) return None
def _iter_contents(self, archive, root=None, decrypt=True): if archive.is_encrypted and not decrypt: return if not root: root = os.path.join(self.destdir, 'main_archive') self._archive_list.append(archive) self._archive_root[archive] = root sub_archive_list = [] for f in archive.iter_contents(): if archive_tools.is_archive_file(f): # We found a sub-archive, don't try to extract it now, as we # must finish listing the containing archive contents before # any extraction can be done. sub_archive_list.append(f) name = f if root is None else os.path.join(root, f) self._entry_mapping[name] = (archive, f) self._sub_archives.add(name) continue name = f if root is not None: name = os.path.join(root, name) self._entry_mapping[name] = (archive, f) yield name for f in sub_archive_list: # Extract sub-archive. destination_dir = self.destdir if root is not None: destination_dir = os.path.join(destination_dir, root) sub_archive_path = archive.extract(f, destination_dir) # And open it and list its contents. sub_archive = archive_tools.get_archive_handler(sub_archive_path) if sub_archive is None: log.warning('Non-supported archive format: %s', os.path.basename(sub_archive_path)) continue sub_tempdir = tempfile.TemporaryDirectory( prefix='sub_archive.{:04}.'.format(len(self._archive_list)), dir=self.destdir) sub_root = sub_tempdir.name self._sub_tempdirs.append(sub_tempdir) for name in self._iter_contents(sub_archive, sub_root): yield name os.remove(sub_archive_path)
def archive_mime_type(path): """Return the archive type of <path> or None for non-archives.""" try: if os.path.isfile(path): if not os.access(path, os.R_OK): return None if zipfile.is_zipfile(path): return constants.ZIP fd = open(path, 'rb') magic = fd.read(4) fd.close() if tarfile.is_tarfile(path) and os.path.getsize(path) > 0: if magic.startswith('BZh'): return constants.BZIP2 if magic.startswith('\037\213'): return constants.GZIP return constants.TAR if magic == 'Rar!': return constants.RAR elif magic == '7z\xBC\xAF': return constants.SEVENZIP elif magic[2:] == '-l': return constants.LHA #if magic == '%PDF': # return constants.PDF except Exception: log.warning(_('! Could not read %s'), path) return None
def list_files(self, mode=FileProvider.IMAGES): """ Lists all files in the current directory. Returns a list of absolute paths, already sorted. """ if mode == FileProvider.IMAGES: should_accept = lambda file: image_tools.is_image_file(file) elif mode == FileProvider.ARCHIVES: should_accept = lambda file: \ constants.SUPPORTED_ARCHIVE_REGEX.search(file, re.I) is not None else: should_accept = lambda file: True try: files = [ os.path.join(self.base_dir, filename) for filename in # Explicitly convert all files to Unicode, even when # os.listdir returns a mixture of byte/unicode strings. # (MComix bug #3424405) [i18n.to_unicode(fn) for fn in os.listdir(self.base_dir)] if should_accept(os.path.join(self.base_dir, filename)) ] if preferences.prefs['sort by'] == constants.SORT_NAME: tools.alphanumeric_sort(files) elif preferences.prefs['sort by'] == constants.SORT_LAST_MODIFIED: # Most recently modified file first files.sort( key=lambda filename: os.path.getmtime(filename) * -1) elif preferences.prefs['sort by'] == constants.SORT_SIZE: # Smallest file first files.sort(key=lambda filename: os.stat(filename).st_size) # else: don't sort at all: use OS ordering. # Default is ascending. if preferences.prefs['sort order'] == constants.SORT_DESCENDING: files.reverse() return files except OSError: log.warning(u'! ' + _('Could not open %s: Permission denied.'), self.base_dir) return []
def read_preferences_file(): '''Read preferences data from disk.''' saved_prefs = {} if os.path.isfile(constants.PREFERENCE_PATH): try: with open(constants.PREFERENCE_PATH, 'r') as config_file: saved_prefs.update(json.load(config_file)) except: # Gettext might not be installed yet at this point. corrupt_name = constants.PREFERENCE_PATH + '.broken' log.warning('! Corrupt preferences file, moving to "%s".' % corrupt_name) if os.path.isfile(corrupt_name): os.unlink(corrupt_name) os.rename(constants.PREFERENCE_PATH, corrupt_name) prefs.update(filter(lambda i: i[0] in prefs, saved_prefs.items()))
def _iter_contents(self, archive, root=None): self._archive_list.append(archive) self._archive_root[archive] = root sub_archive_list = [] for f in archive.iter_contents(): if archive_tools.is_archive_file(f): # We found a sub-archive, don't try to extract it now, as we # must finish listing the containing archive contents before # any extraction can be done. sub_archive_list.append(f) continue name = f if root is not None: name = os.path.join(root, name) self._entry_mapping[name] = (archive, f) yield name for f in sub_archive_list: # Extract sub-archive. destination_dir = self._destination_dir if root is not None: destination_dir = os.path.join(destination_dir, root) archive.extract(f, destination_dir) sub_archive_ext = os.path.splitext(f)[1].lower()[1:] sub_archive_path = os.path.join( self._destination_dir, 'sub-archives', '%04u.%s' % (len(self._archive_list), sub_archive_ext )) self._create_directory(os.path.dirname(sub_archive_path)) os.rename(os.path.join(destination_dir, f), sub_archive_path) # And open it and list its contents. sub_archive = archive_tools.get_archive_handler(sub_archive_path) if sub_archive is None: log.warning('Non-supported archive format: %s', os.path.basename(sub_archive_path)) continue sub_root = f if root is not None: sub_root = os.path.join(root, sub_root) for name in self._iter_contents(sub_archive, sub_root): yield name
def load_icons(): _icons = (('gimp-flip-horizontal.png', 'mcomix-flip-horizontal'), ('gimp-flip-vertical.png', 'mcomix-flip-vertical'), ('gimp-rotate-180.png', 'mcomix-rotate-180'), ('gimp-rotate-270.png', 'mcomix-rotate-270'), ('gimp-rotate-90.png', 'mcomix-rotate-90'), ('gimp-thumbnails.png', 'mcomix-thumbnails'), ('gimp-transform.png', 'mcomix-transform'), ('tango-enhance-image.png', 'mcomix-enhance-image'), ('tango-add-bookmark.png', 'mcomix-add-bookmark'), ('tango-archive.png', 'mcomix-archive'), ('tango-image.png', 'mcomix-image'), ('library.png', 'mcomix-library'), ('comments.png', 'mcomix-comments'), ('zoom.png', 'mcomix-zoom'), ('lens.png', 'mcomix-lens'), ('double-page.png', 'mcomix-double-page'), ('manga.png', 'mcomix-manga'), ('fitbest.png', 'mcomix-fitbest'), ('fitwidth.png', 'mcomix-fitwidth'), ('fitheight.png', 'mcomix-fitheight'), ('fitmanual.png', 'mcomix-fitmanual'), ('fitsize.png', 'mcomix-fitsize')) # Load window title icons. pixbufs = mcomix_icons() Gtk.Window.set_default_icon_list(pixbufs) # Load application icons. factory = Gtk.IconFactory() for filename, stockid in _icons: try: icon_data = tools.read_binary('images', filename) pixbuf = image_tools.load_pixbuf_data(icon_data) iconset = Gtk.IconSet.new_from_pixbuf(pixbuf) factory.add(stockid, iconset) except Exception: log.warning(_('! Could not load icon "%s"'), filename) factory.add_default()
def _iter_contents(self, archive, root=None): self._archive_list.append(archive) self._archive_root[archive] = root sub_archive_list = [] for f in archive.iter_contents(): if archive_tools.is_archive_file(f): # We found a sub-archive, don't try to extract it now, as we # must finish listing the containing archive contents before # any extraction can be done. sub_archive_list.append(f) continue name = f if root is not None: name = os.path.join(root, name) self._entry_mapping[name] = (archive, f) yield name for f in sub_archive_list: # Extract sub-archive. destination_dir = self._destination_dir if root is not None: destination_dir = os.path.join(destination_dir, root) archive.extract(f, destination_dir) sub_archive_ext = os.path.splitext(f)[1].lower()[1:] sub_archive_path = os.path.join( self._destination_dir, 'sub-archives', '%04u.%s' % (len(self._archive_list), sub_archive_ext)) self._create_directory(os.path.dirname(sub_archive_path)) os.rename(os.path.join(destination_dir, f), sub_archive_path) # And open it and list its contents. sub_archive = archive_tools.get_archive_handler(sub_archive_path) if sub_archive is None: log.warning('Non-supported archive format: %s', os.path.basename(sub_archive_path)) continue sub_root = f if root is not None: sub_root = os.path.join(root, sub_root) for name in self._iter_contents(sub_archive, sub_root): yield name
def list_files(self, mode=FileProvider.IMAGES): """ Lists all files in the current directory. Returns a list of absolute paths, already sorted. """ if mode == FileProvider.IMAGES: should_accept = lambda file: image_tools.is_image_file(file) elif mode == FileProvider.ARCHIVES: should_accept = lambda file: \ constants.SUPPORTED_ARCHIVE_REGEX.search(file, re.I) is not None else: should_accept = lambda file: True try: files = [ os.path.join(self.base_dir, filename) for filename in # Explicitly convert all files to Unicode, even when # os.listdir returns a mixture of byte/unicode strings. # (MComix bug #3424405) [ i18n.to_unicode(fn) for fn in os.listdir(self.base_dir) ] if should_accept(os.path.join(self.base_dir, filename)) ] if preferences.prefs['sort by'] == constants.SORT_NAME: tools.alphanumeric_sort(files) elif preferences.prefs['sort by'] == constants.SORT_LAST_MODIFIED: # Most recently modified file first files.sort(key=lambda filename: os.path.getmtime(filename)*-1) elif preferences.prefs['sort by'] == constants.SORT_SIZE: # Smallest file first files.sort(key=lambda filename: os.stat(filename).st_size) # else: don't sort at all: use OS ordering. # Default is ascending. if preferences.prefs['sort order'] == constants.SORT_DESCENDING: files.reverse() return files except OSError: log.warning(u'! ' + _('Could not open %s: Permission denied.'), self.base_dir) return []
def setup(self, src, dst, type=None): """Setup the extractor with archive <src> and destination dir <dst>. Return a threading.Condition related to the is_ready() method, or None if the format of <src> isn't supported. """ self._src = src self._dst = dst self._type = type or archive_tools.archive_mime_type(src) self._files = [] self._extracted = {} self._stop = False self._extract_thread = None self._condition = threading.Condition() self._archive = archive_tools.get_archive_handler(src) if self._archive: self._files = self._archive.list_contents() self._setupped = True return self._condition else: msg = _('Non-supported archive format: %s') % os.path.basename(src) log.warning(msg) raise ArchiveException(msg)
def register(self, name, bindings, callback, args=[], kwargs={}): """ Registers an action for a predefined keybinding name. @param name: Action name, defined in L{BINDING_INFO}. @param bindings: List of keybinding strings, as understood by L{gtk.accelerator_parse}. Only used if no bindings were loaded for this action. @param callback: Function callback @param args: List of arguments to pass to the callback @param kwargs: List of keyword arguments to pass to the callback. """ global BINDING_INFO assert name in BINDING_INFO, "'%s' isn't a valid keyboard action." % name # Load stored keybindings, or fall back to passed arguments if self._get_bindings_for_action(name) is not None: keycodes = self._get_bindings_for_action(name) else: keycodes = [gtk.accelerator_parse(binding) for binding in bindings] for keycode in keycodes: if keycode in self._callbacks: log.warning(_('Keybinding for "%(action)s" overrides hotkey for another action.'), {"action": name}) self._callbacks[keycode] = (name, callback, args, kwargs)
import datetime from mcomix import archive_tools from mcomix import constants from mcomix import thumbnail_tools from mcomix import log from mcomix import callback from mcomix.library import backend_types try: from sqlite3 import dbapi2 except ImportError: try: from pysqlite2 import dbapi2 except ImportError: log.warning( _('! Could neither find pysqlite2 nor sqlite3.') ) dbapi2 = None class _LibraryBackend: """The LibraryBackend handles the storing and retrieval of library data to and from disk. """ #: Current version of the library database structure. # See method _upgrade_database() for changes between versions. DB_VERSION = 3 def __init__(self):
# -*- coding: utf-8 -*- """ Unicode-aware wrapper for zipfile.ZipFile. """ import os from contextlib import closing from mcomix import log from mcomix.archive import archive_base # Try to use czipfile if available as it's much faster at decryption. try: import czipfile as zipfile except ImportError: log.warning('czipfile not available! using zipfile') import zipfile def is_py_supported_zipfile(path): """Check if a given zipfile has all internal files stored with Python supported compression """ # Use contextlib's closing for 2.5 compatibility with closing(zipfile.ZipFile(path, 'r')) as zip_file: for file_info in zip_file.infolist(): if file_info.compress_type not in (zipfile.ZIP_STORED, zipfile.ZIP_DEFLATED): return False return True class ZipArchive(archive_base.NonUnicodeArchive): def __init__(self, archive): super(ZipArchive, self).__init__(archive)
import os import datetime from mcomix import log from mcomix import constants # This import is only used for legacy data that is imported # into the library at upgrade. try: from sqlite3 import dbapi2 except ImportError: try: from pysqlite2 import dbapi2 except ImportError: log.warning(_("! Could neither find pysqlite2 nor sqlite3.")) dbapi2 = None class LastReadPage(object): """ Automatically stores the last page the user read for all book files, and restores the page the next time the archive is opened. When the book is finished, the page will be cleared. If L{enabled} is set to C{false}, all methods will do nothing. This simplifies code in other places, as it does not have to check each time if the preference option to store pages automatically is enabled. """ def __init__(self, backend): """ Constructor.
"""process.py - Process spawning module.""" import gc import sys import os from mcomix import log from mcomix import i18n try: import subprocess32 as subprocess _using_subprocess32 = True except ImportError: log.warning('subprocess32 not available! using subprocess') import subprocess _using_subprocess32 = False NULL = open(os.devnull, 'wb') class Process: """The subprocess and popen2 modules in Python are broken (see issue #1336). The problem (i.e. complete crash) they can cause happen fairly often (once is too often) in MComix when calling "rar" or "unrar" to extract specific files from archives. We roll our own very simple process spawning module here instead. """ # TODO: I can no longer reproduce the issue. Check if this version of # process.py still solves it. def __init__(self, args):