def extract(self, filename, destination_dir): self._create_directory(destination_dir) destination_path = os.path.join(destination_dir, filename) page_num = int(filename[0:-4]) # Try to find optimal DPI. cmd = _mudraw_exec + _mudraw_trace_args + ["--", self.archive, str(page_num)] log.debug("finding optimal DPI for %s: %s", filename, " ".join(cmd)) proc = process.popen(cmd) try: max_size = 0 max_dpi = PDF_RENDER_DPI_DEF for line in proc.stdout: match = self._fill_image_regex.match(line) if not match: continue matrix = [float(f) for f in match.group("matrix").split()] for size, coeff1, coeff2 in ( (int(match.group("width")), matrix[0], matrix[1]), (int(match.group("height")), matrix[2], matrix[3]), ): if size < max_size: continue render_size = math.sqrt(coeff1 * coeff1 + coeff2 * coeff2) dpi = int(size * 72 / render_size) if dpi > PDF_RENDER_DPI_MAX: dpi = PDF_RENDER_DPI_MAX max_size = size max_dpi = dpi finally: proc.stdout.close() proc.wait() # Render... cmd = _mudraw_exec + ["-r", str(max_dpi), "-o", destination_path, "--", self.archive, str(page_num)] log.debug("rendering %s: %s", filename, " ".join(cmd)) process.call(cmd)
def get_thumbnail(self, page=None, width=128, height=128, create=False, nowait=False): """Return a thumbnail pixbuf of <page> that fit in a box with dimensions <width>x<height>. Return a thumbnail for the current page if <page> is None. If <create> is True, and <width>x<height> <= 128x128, the thumbnail is also stored on disk. If <nowait> is True, don't wait for <page> to be available. """ if not self._wait_on_page(page, check_only=nowait): # Page is not available! return None path = self.get_path_to_page(page) if path == None: return None try: thumbnailer = thumbnail_tools.Thumbnailer(store_on_disk=create, size=(width, height)) return thumbnailer.thumbnail(path) except Exception: log.debug("Failed to create thumbnail for image `%s':\n%s", path, traceback.format_exc()) return image_tools.MISSING_IMAGE_ICON
def do_cacheing(self): """Make sure that the correct pixbufs are stored in cache. These are (in the current implementation) the current image(s), and if cacheing is enabled, also the one or two pixbufs before and after the current page. All other pixbufs are deleted and garbage collected directly in order to save memory. """ if not self._window.filehandler.file_loaded: return # Flush caching orders. self._thread.clear_orders() # Get list of wanted pixbufs. wanted_pixbufs = self._ask_for_pages(self.get_current_page()) if -1 != self._cache_pages: # We're not caching everything, remove old pixbufs. for index in set(self._raw_pixbufs) - set(wanted_pixbufs): del self._raw_pixbufs[index] log.debug("Caching page(s) %s", " ".join([str(index + 1) for index in wanted_pixbufs])) self._wanted_pixbufs = wanted_pixbufs # Start caching available images not already in cache. wanted_pixbufs = [ index for index in wanted_pixbufs if index in self._available_images and not index in self._raw_pixbufs ] orders = [(priority, index) for priority, index in enumerate(wanted_pixbufs)] if len(orders) > 0: self._thread.extend_orders(orders)
def iter_extract(self, entries, destination_dir): if not self._contents_listed: self.list_contents() # Unfortunately we can't just rely on BaseArchive default # implementation if solid archives are to be correctly supported: # we need to call iter_extract (not extract) for each archive ourselves. wanted = set(entries) for archive in self._archive_list: archive_wanted = {} for name in wanted: name_archive, name_archive_name = self._entry_mapping[name] if name_archive == archive: archive_wanted[name_archive_name] = name if 0 == len(archive_wanted): continue root = self._archive_root[archive] archive_destination_dir = destination_dir if root is not None: archive_destination_dir = os.path.join(destination_dir, root) log.debug('extracting from %s to %s: %s', archive, archive_destination_dir, ' '.join(archive_wanted.keys())) for f in archive.iter_extract(archive_wanted.keys(), archive_destination_dir): yield archive_wanted[f] wanted -= set(archive_wanted.values()) if 0 == len(wanted): break
def _ask_for_pages(self, page): """Ask for pages around <page> to be given priority extraction. """ files = [] if prefs["default double page"]: page_width = 2 else: page_width = 1 if 0 == self._cache_pages: # Only ask for current page. num_pages = page_width elif -1 == self._cache_pages: # Ask for 10 pages. num_pages = min(10, self.get_number_of_pages()) else: num_pages = self._cache_pages page_list = [page - 1 - page_width + n for n in xrange(num_pages)] # Current and next page first, followed by previous page. previous_page = page_list[0:page_width] del page_list[0:page_width] page_list[2 * page_width : 2 * page_width] = previous_page page_list = [index for index in page_list if index >= 0 and index < len(self._image_files)] log.debug("Ask for priority extraction around page %u: %s", page, " ".join([str(n + 1) for n in page_list])) for index in page_list: if index not in self._available_images: files.append(self._image_files[index]) if len(files) > 0: self._window.filehandler._ask_for_files(files) return page_list
def terminate_program(self, save_current_file=False): """Run clean-up tasks and exit the program.""" self.hide() if gtk.main_level() > 0: gtk.main_quit() if prefs['auto load last file'] and self.filehandler.file_loaded: prefs['path to last file'] = self.imagehandler.get_real_path() prefs['page of last file'] = self.imagehandler.get_current_page() else: prefs['path to last file'] = '' prefs['page of last file'] = 1 if prefs['hide all'] and self.hide_all_forced and self.fullscreen: prefs['hide all'] = False self.write_config_files() self.filehandler.cleanup() self.imagehandler.cleanup() self.thumbnailsidebar.clear() if main_dialog._dialog is not None: main_dialog._dialog.close() backend.LibraryBackend().close() # This hack is to avoid Python issue #1856. for thread in threading.enumerate(): if thread is not threading.currentThread(): log.debug('Waiting for thread %s to finish before exit', thread) thread.join()
def _extract_files_errcb(self, name, etype, value, tb): # Better to ignore any failed extractions (e.g. from a corrupt # archive) than to crash here and leave the main thread in a # possible infinite block. Damaged or missing files *should* be # handled gracefully by the main program anyway. log.error(_('! Extraction error: %s'), value) log.debug('Traceback:\n%s', ''.join(traceback.format_tb(tb)).strip())
def __run_callbacks(self, *args, **kwargs): ''' Executes callback functions. ''' for obj_ref, func in self.__callbacks: if obj_ref is None: # Callback is a normal function callback = func elif obj_ref() is not None: # Callback is a bound method. # Recreate it by binding the function to the object. callback = func.__get__(obj_ref()) else: # Callback is a bound method, object # no longer exists. callback = None if callback: try: callback(*args, **kwargs) except Exception as e: log.error(_('! Callback %(function)r failed: %(error)s'), { 'function': callback, 'error': e }) log.debug('Traceback:\n%s', traceback.format_exc())
def do_cacheing(self): '''Make sure that the correct pixbufs are stored in cache. These are (in the current implementation) the current image(s), and if cacheing is enabled, also the one or two pixbufs before and after the current page. All other pixbufs are deleted and garbage collected directly in order to save memory. ''' if not self._lock.acquire(blocking=False): return try: if not self._window.filehandler.file_loaded: return # Get list of wanted pixbufs. wanted_pixbufs = self._ask_for_pages(self.get_current_page()) if -1 != self._cache_pages: # We're not caching everything, remove old pixbufs. for index in set(self._raw_pixbufs) - set(wanted_pixbufs): del self._raw_pixbufs[index] log.debug('Caching page(s) %s', ' '.join([str(index + 1) for index in wanted_pixbufs])) self._wanted_pixbufs[:] = wanted_pixbufs # Start caching available images not already in cache. wanted_pixbufs = [index for index in wanted_pixbufs if index in self._available_images] self._thread.map_async(self._cache_pixbuf, wanted_pixbufs) finally: self._lock.release()
def load_pixbuf_data(imgdata): """ Loads a pixbuf from the data passed in <imgdata>. """ # TODO similar to load_pixbuf, should be merged using callbacks etc. pixbuf = None last_error = None for provider in (constants.IMAGEIO_GDKPIXBUF, constants.IMAGEIO_PIL): try: # TODO use dynamic dispatch instead of "if" chain if provider == constants.IMAGEIO_GDKPIXBUF: loader = gtk.gdk.PixbufLoader() loader.write(imgdata, len(imgdata)) loader.close() pixbuf = loader.get_pixbuf() elif provider == constants.IMAGEIO_PIL: pixbuf = pil_to_pixbuf(Image.open(StringIO(imgdata)), keep_orientation=True) else: raise TypeError() except Exception, e: # current provider could not load image last_error = e if pixbuf is not None: # stop loop on success log.debug("provider %s succeeded in decoding %s bytes", provider, len(imgdata)) break log.debug("provider %s failed to decode %s bytes", provider, len(imgdata))
def iter_extract(self, entries, destination_dir): if not self._contents_listed: self.list_contents() # Unfortunately we can't just rely on BaseArchive default # implementation if solid archives are to be correctly supported: # we need to call iter_extract (not extract) for each archive ourselves. wanted = set(entries) | self._sub_archives for archive in self._archive_list: archive_wanted = {} for name in wanted: name_archive, name_archive_name = self._entry_mapping[name] if name_archive == archive: archive_wanted[name_archive_name] = name if 0 == len(archive_wanted): continue root = self._archive_root[archive] archive_destination_dir = destination_dir if root is not None: archive_destination_dir = os.path.join(destination_dir, root) log.debug('extracting from %s to %s: %s', archive.archive, archive_destination_dir, ' '.join(archive_wanted.keys())) for f in archive.iter_extract(archive_wanted.keys(), archive_destination_dir): name = archive_wanted[f] if name in self._sub_archives: continue yield name wanted -= set(archive_wanted.values()) if 0 == len(wanted): break
def do_cacheing(self): """Make sure that the correct pixbufs are stored in cache. These are (in the current implementation) the current image(s), and if cacheing is enabled, also the one or two pixbufs before and after the current page. All other pixbufs are deleted and garbage collected directly in order to save memory. """ if not self._window.filehandler.file_loaded: return # Flush caching orders. self._thread.clear_orders() # Get list of wanted pixbufs. wanted_pixbufs = self._ask_for_pages(self.get_current_page()) if -1 != self._cache_pages: # We're not caching everything, remove old pixbufs. for index in set(self._raw_pixbufs) - set(wanted_pixbufs): del self._raw_pixbufs[index] log.debug('Caching page(s) %s', ' '.join([str(index + 1) for index in wanted_pixbufs])) self._wanted_pixbufs = wanted_pixbufs # Start caching available images not already in cache. wanted_pixbufs = [index for index in wanted_pixbufs if index in self._available_images and not index in self._raw_pixbufs] orders = [(priority, index) for priority, index in enumerate(wanted_pixbufs)] if len(orders) > 0: self._thread.extend_orders(orders)
def terminate_program(self): """Run clean-up tasks and exit the program.""" self.hide() if gtk.main_level() > 0: gtk.main_quit() if prefs['auto load last file'] and self.filehandler.file_loaded: prefs['path to last file'] = self.imagehandler.get_real_path() prefs['page of last file'] = self.imagehandler.get_current_page() else: prefs['path to last file'] = '' prefs['page of last file'] = 1 if prefs['hide all'] and self.hide_all_forced and self.fullscreen: prefs['hide all'] = False self.write_config_files() self.filehandler.close_file() if main_dialog._dialog is not None: main_dialog._dialog.close() backend.LibraryBackend().close() # This hack is to avoid Python issue #1856. for thread in threading.enumerate(): if thread is not threading.currentThread(): log.debug('Waiting for thread %s to finish before exit', thread) thread.join()
def is_7z_support_rar(): '''Check whether p7zip has Rar.so, which is needed to Rar format''' if 'support_rar' not in _7z_executable: _7z_executable['support_rar'] = _has_rar_so() if _7z_executable['support_rar']: log.debug('rar format supported by 7z') else: log.debug('rar format not supported by 7z') return _7z_executable['support_rar']
def _extract_file(self, name): '''Extract the file named <name> to the destination directory, mark the file as "ready", then signal a notify() on the Condition returned by setup(). ''' log.debug('Extracting from "%s" to "%s": "%s"', self._src, self._dst, name) self._archive.extract(name) return name
def extract(self, filename, destination_dir): if not self._contents_listed: self.list_contents() archive, name = self._entry_mapping[filename] root = self._archive_root[archive] if root is not None: destination_dir = os.path.join(destination_dir, root) log.debug('extracting from %s to %s: %s', archive.archive, destination_dir, filename) archive.extract(name, destination_dir)
def _extract_all_files(self): # With multiple extractions for each pass, some of the files might have # already been extracted. with self._condition: files = list(set(self._files) - self._extracted) log.debug('Extracting from "%s" to "%s": "%s"', self._src, self._dst, '", "'.join(files)) for name in self._archive.iter_extract(files, self._dst): if self._extraction_finished(name): return
def page_available(self, page): ''' Called whenever a new page becomes available, i.e. the corresponding file has been extracted. ''' log.debug('Page %u is available', page) index = page - 1 assert index not in self._available_images self._cache_lock[index] = mt.Lock() self._available_images.add(index) # Check if we need to cache it. if index in self._wanted_pixbufs or -1 == self._cache_pages: self._thread.apply_async(self._cache_pixbuf, (index, ))
def is_available(): global _pdf_possible if _pdf_possible is not None: return _pdf_possible global _mutool_exec, _mudraw_exec, _mudraw_trace_args mutool = process.find_executable((u'mutool', )) _pdf_possible = False version = None if mutool is None: log.debug('mutool executable not found') else: _mutool_exec = [mutool] # Find MuPDF version; assume 1.6 version since # the '-v' switch is only supported from 1.7 onward... version = '1.6' proc = process.popen([mutool, '-v'], stdout=process.NULL, stderr=process.PIPE) try: output = proc.stderr.read() if output.startswith('mutool version '): version = output[15:].rstrip() finally: proc.stderr.close() proc.wait() version = LooseVersion(version) if version >= LooseVersion('1.8'): # Mutool executable with draw support. _mudraw_exec = [mutool, 'draw'] _mudraw_trace_args = ['-F', 'trace'] _pdf_possible = True else: # Separate mudraw executable. mudraw = process.find_executable((u'mudraw', )) if mudraw is None: log.debug('mudraw executable not found') else: _mudraw_exec = [mudraw] if version >= LooseVersion('1.7'): _mudraw_trace_args = ['-F', 'trace'] else: _mudraw_trace_args = ['-x'] _pdf_possible = True if _pdf_possible: log.info('Using MuPDF version: %s', version) log.debug('mutool: %s', ' '.join(_mutool_exec)) log.debug('mudraw: %s', ' '.join(_mudraw_exec)) log.debug('mudraw trace arguments: %s', ' '.join(_mudraw_trace_args)) else: log.info('MuPDF not available.') return _pdf_possible
def _get_unrar_dll(): ''' Tries to load libunrar and will return a handle of it. Returns None if an error occured or the library couldn't be found. ''' if 'context' not in _unrar_dll: dll = _get_unrar_dll_win32( ) if sys.platform == 'win32' else _get_unrar_dll_unix() _unrar_dll['context'] = dll if dll is not None: dll.RARGetDllVersion.restype = ctypes.c_int dll.RARGetDllVersion.argtypes = [] log.debug('unrar dll version: %d', dll.RARGetDllVersion()) return _unrar_dll['context']
def is_available(): global _pdf_possible if _pdf_possible is not None: return _pdf_possible mutool = process.find_executable(('mutool', )) _pdf_possible = False version = None if mutool is None: log.debug('mutool executable not found') else: _mutool_exec.append(mutool) # Find MuPDF version; assume 1.6 version since # the '-v' switch is only supported from 1.7 onward... version = (1, 6) with process.popen([mutool, '-v'], stdout=process.NULL, stderr=process.PIPE, universal_newlines=True) as proc: output = re.match( r'mutool version ' r'(?P<version>[\d.]+)([^\d].*)?', proc.stderr.read()) if output: version = tuple( map(int, output.group('version').split('.'))) if version >= (1, 8): # Mutool executable with draw support. _mudraw_exec.extend((mutool, 'draw', '-q')) _mudraw_trace_args.extend(('-F', 'trace')) _pdf_possible = True else: # Separate mudraw executable. mudraw = process.find_executable(('mudraw', )) if mudraw is None: log.debug('mudraw executable not found') else: _mudraw_exec.append(mudraw) if version >= (1, 7): _mudraw_trace_args.extend(('-F', 'trace')) else: _mudraw_trace_args.append('-x') _pdf_possible = True if _pdf_possible: log.info('Using MuPDF version: %s', '.'.join(map(str, version))) log.debug('mutool: %s', ' '.join(_mutool_exec)) log.debug('mudraw: %s', ' '.join(_mudraw_exec)) log.debug('mudraw trace arguments: %s', ' '.join(_mudraw_trace_args)) else: log.info('MuPDF not available.') return _pdf_possible
def is_available(): global _pdf_possible if _pdf_possible is not None: return _pdf_possible global _mutool_exec, _mudraw_exec, _mudraw_trace_args mutool = process.find_executable((u"mutool",)) _pdf_possible = False version = None if mutool is None: log.debug("mutool executable not found") else: _mutool_exec = [mutool] # Find MuPDF version; assume 1.6 version since # the '-v' switch is only supported from 1.7 onward... version = "1.6" proc = process.popen([mutool, "-v"], stdout=process.NULL, stderr=process.PIPE) try: output = proc.stderr.read() if output.startswith("mutool version "): version = output[15:].rstrip() finally: proc.stderr.close() proc.wait() version = LooseVersion(version) if version >= LooseVersion("1.8"): # Mutool executable with draw support. _mudraw_exec = [mutool, "draw"] _mudraw_trace_args = ["-F", "trace"] _pdf_possible = True else: # Separate mudraw executable. mudraw = process.find_executable((u"mudraw",)) if mudraw is None: log.debug("mudraw executable not found") else: _mudraw_exec = [mudraw] if version >= LooseVersion("1.7"): _mudraw_trace_args = ["-F", "trace"] else: _mudraw_trace_args = ["-x"] _pdf_possible = True if _pdf_possible: log.info("Using MuPDF version: %s", version) log.debug("mutool: %s", " ".join(_mutool_exec)) log.debug("mudraw: %s", " ".join(_mudraw_exec)) log.debug("mudraw trace arguments: %s", " ".join(_mudraw_trace_args)) else: log.info("MuPDF not available.") return _pdf_possible
def is_available(): global _pdf_possible if _pdf_possible is not None: return _pdf_possible mutool = process.find_executable(('mutool', )) _pdf_possible = False version = None if mutool is None: log.debug('mutool executable not found') else: _mutool_exec.append(mutool) # Find MuPDF version; assume 1.6 version since # the '-v' switch is only supported from 1.7 onward... version = '1.6' with process.popen([mutool, '-v'], stdout=process.NULL, stderr=process.PIPE, universal_newlines=True) as proc: output = proc.stderr.read() if output.startswith('mutool version '): version = output[15:].rstrip() version = LooseVersion(version) if version >= LooseVersion('1.8'): # Mutool executable with draw support. _mudraw_exec.extend((mutool, 'draw', '-q')) _mudraw_trace_args.extend(('-F', 'trace')) _pdf_possible = True else: # Separate mudraw executable. mudraw = process.find_executable(('mudraw', )) if mudraw is None: log.debug('mudraw executable not found') else: _mudraw_exec.append(mudraw) if version >= LooseVersion('1.7'): _mudraw_trace_args.extend(('-F', 'trace')) else: _mudraw_trace_args.append('-x') _pdf_possible = True if _pdf_possible: log.info('Using MuPDF version: %s', version) log.debug('mutool: %s', ' '.join(_mutool_exec)) log.debug('mudraw: %s', ' '.join(_mudraw_exec)) log.debug('mudraw trace arguments: %s', ' '.join(_mudraw_trace_args)) else: log.info('MuPDF not available.') return _pdf_possible
def _cache_pixbuf(self, index, force=False): self._wait_on_page(index + 1) with self._cache_lock[index]: if index in self._raw_pixbufs: return with self._lock: if not force and index not in self._wanted_pixbufs: return log.debug('Caching page %u', index + 1) try: pixbuf = image_tools.load_pixbuf(self._image_files[index]) tools.garbage_collect() except Exception as e: log.error('Could not load pixbuf for page %u: %r', index + 1, e) pixbuf = image_tools.MISSING_IMAGE_ICON self._raw_pixbufs[index] = pixbuf
def _get_pixbuf(self, index): """Return the pixbuf indexed by <index> from cache. Pixbufs not found in cache are fetched from disk first. """ pixbuf = constants.MISSING_IMAGE_ICON if index not in self._raw_pixbufs: self._wait_on_page(index + 1) try: pixbuf = image_tools.load_pixbuf(self._image_files[index]) self._raw_pixbufs[index] = pixbuf tools.garbage_collect() except Exception, e: self._raw_pixbufs[index] = constants.MISSING_IMAGE_ICON log.debug('Could not load pixbuf for page %d: %r', index, e)
def page_available(self, page): """ Called whenever a new page becomes available, i.e. the corresponding file has been extracted. """ log.debug('Page %u is available', page) index = page - 1 assert index not in self._available_images self._available_images.add(index) # Check if we need to cache it. priority = None if index in self._wanted_pixbufs: # In the list of wanted pixbufs. priority = self._wanted_pixbufs.index(index) elif -1 == self._cache_pages: # We're caching everything. priority = self.get_number_of_pages() if priority is not None: self._thread.append_order((priority, index))
def _find_unrar_executable(): ''' Tries to start rar/unrar, and returns either 'rar' or 'unrar' if one of them was started successfully. Returns None if neither could be started. ''' if 'path' not in _rar_executable: path = process.find_executable(('unrar-nonfree', 'unrar', 'rar'), is_valid_candidate=_is_not_unrar_free) _rar_executable['path'] = path if path is not None: with process.popen([path], text=True) as proc: # only check first line line = proc.stdout.read().strip().splitlines()[0].split() if line[0]=='UNRAR': log.debug('unrar version: %s', line[1]) return _rar_executable['path']
def _find_mupdf(): _mupdf['found'] = False _mupdf['version'] = None _mupdf['mutool'] = [] _mupdf['mudraw'] = [] _mupdf['mudraw_trace_args'] = [] if (mutool := process.find_executable(('mutool', ))) is None: return log.debug('mutool executable not found')
def page_available(self, page): """ Called whenever a new page becomes available, i.e. the corresponding file has been extracted. """ log.debug("Page %u is available", page) index = page - 1 assert index not in self._available_images self._available_images.add(index) # Check if we need to cache it. priority = None if index in self._wanted_pixbufs: # In the list of wanted pixbufs. priority = self._wanted_pixbufs.index(index) elif -1 == self._cache_pages: # We're caching everything. priority = self.get_number_of_pages() if priority is not None: self._thread.append_order((priority, index))
def _extract_file(self, name): """Extract the file named <name> to the destination directory, mark the file as "ready", then signal a notify() on the Condition returned by setup(). """ try: log.debug(u'Extracting from "%s" to "%s": "%s"', self._src, self._dst, name) self._archive.extract(name, self._dst) except Exception, ex: # Better to ignore any failed extractions (e.g. from a corrupt # archive) than to crash here and leave the main thread in a # possible infinite block. Damaged or missing files *should* be # handled gracefully by the main program anyway. log.error(_('! Extraction error: %s'), ex) log.debug('Traceback:\n%s', traceback.format_exc())
def _run(self): order = None while True: with self._condition: if order is not None: self._processing_orders.remove(order) while not self._stop and 0 == len(self._waiting_orders): self._condition.wait() if self._stop: return order = self._waiting_orders.pop(0) self._processing_orders.append(order) try: self._process_order(order) except Exception, e: log.error(_('! Worker thread processing %(function)r failed: %(error)s'), { 'function' : self._process_order, 'error' : e }) log.debug('Traceback:\n%s', traceback.format_exc())
def _wait_on_page(self, page, check_only=False): """Block the running (main) thread until the file corresponding to image <page> has been fully extracted. If <check_only> is True, only check (and return status), don't wait. """ index = page - 1 if index in self._available_images: # Already extracted! return True if check_only: # Asked for check only... return False log.debug('Waiting for page %u', page) path = self.get_path_to_page(page) self._window.filehandler._wait_on_file(path) return True
def _run(self): order_uid = None while True: with self._condition: if order_uid is not None: self._orders_set.remove(order_uid) while not self._stop and 0 == len(self._orders_queue): self._condition.wait() if self._stop: return order = self._orders_queue.pop(0) if self._unique_orders: order_uid = self._order_uid(order) try: self._process_order(order) except Exception as e: log.error(_('! Worker thread processing %(function)r failed: %(error)s'), { 'function' : self._process_order, 'error' : e }) log.debug('Traceback:\n%s', traceback.format_exc())
def extract(self, filename, destination_dir): self._create_directory(destination_dir) destination_path = os.path.join(destination_dir, filename) page_num, ext = os.path.splitext(filename) # Try to find optimal DPI. cmd = _mupdf['mudraw'] + _mupdf['mudraw_trace_args'] + [ '--', self.archive, str(page_num) ] log.debug('finding optimal DPI for %s: %s', filename, ' '.join(cmd)) with process.popen(cmd, text=True) as proc: max_size = 0 max_dpi = PDF_RENDER_DPI_DEF for line in proc.stdout: match = self._fill_image_regex.match(line) if not match: continue matrix = [float(f) for f in match.group('matrix').split()] for size, coeff1, coeff2 in ( (int(match.group('width')), matrix[0], matrix[1]), (int(match.group('height')), matrix[2], matrix[3]), ): if size < max_size: continue render_size = math.sqrt(coeff1 * coeff1 + coeff2 * coeff2) dpi = int(size * 72 / render_size) if dpi > PDF_RENDER_DPI_MAX: dpi = PDF_RENDER_DPI_MAX max_size = size max_dpi = dpi # Render... cmd = _mupdf['mudraw'] + [ '-r', str(max_dpi), '-o', destination_path, '--', self.archive, str(page_num) ] log.debug('rendering %s: %s', filename, ' '.join(cmd)) with process.popen(cmd, stdout=process.NULL) as proc: with self._pdf_procs_lock: self._pdf_procs[(pid := proc.pid)] = proc proc.wait() with self._pdf_procs_lock: self._pdf_procs.pop(pid) return destination_path
def _extract_file(self, name): """Extract the file named <name> to the destination directory, mark the file as "ready", then signal a notify() on the Condition returned by setup(). """ if self._stop: self.close() return try: dst_path = os.path.join(self._dst, name) log.debug(u'Extracting "%s" to "%s"', name, dst_path) self._archive.extract(name, dst_path) except Exception, ex: # Better to ignore any failed extractions (e.g. from a corrupt # archive) than to crash here and leave the main thread in a # possible infinite block. Damaged or missing files *should* be # handled gracefully by the main program anyway. log.error(_('! Extraction error: %s'), ex)
def extract(self, filename, destination_dir): self._create_directory(destination_dir) destination_path = os.path.join(destination_dir, filename) page_num = int(filename[0:-4]) # Try to find optimal DPI. proc = process.Process(['mudraw', '-x', '--', self.pdf, str(page_num)]) fd = proc.spawn() max_size = 0 max_dpi = PDF_RENDER_DPI_DEF if fd is not None: for line in fd: match = self._fill_image_regex.match(line) if not match: continue matrix = [float(f) for f in match.group('matrix').split()] for size, coeff1, coeff2 in ( (int(match.group('width')), matrix[0], matrix[1]), (int(match.group('height')), matrix[2], matrix[3]), ): if size < max_size: continue render_size = math.sqrt(coeff1 * coeff1 + coeff2 * coeff2) dpi = int(size * 72 / render_size) if dpi > PDF_RENDER_DPI_MAX: dpi = PDF_RENDER_DPI_MAX max_size = size max_dpi = dpi fd.close() proc.wait() # Render... cmd = [ 'mudraw', '-r', str(max_dpi), '-o', destination_path, '--', self.pdf, str(page_num) ] log.debug('rendering %s: %s' % (filename, ' '.join(cmd))) proc = process.Process(cmd) fd = proc.spawn() if fd is not None: fd.close() proc.wait()
def extract(self, filename, destination_dir): self._create_directory(destination_dir) destination_path = os.path.join(destination_dir, filename) page_num = int(filename[0:-4]) # Try to find optimal DPI. cmd = _mudraw_exec + _mudraw_trace_args + [ '--', self.archive, str(page_num) ] log.debug('finding optimal DPI for %s: %s', filename, ' '.join(cmd)) proc = process.popen(cmd) try: max_size = 0 max_dpi = PDF_RENDER_DPI_DEF for line in proc.stdout: match = self._fill_image_regex.match(line) if not match: continue matrix = [float(f) for f in match.group('matrix').split()] for size, coeff1, coeff2 in ( (int(match.group('width')), matrix[0], matrix[1]), (int(match.group('height')), matrix[2], matrix[3]), ): if size < max_size: continue render_size = math.sqrt(coeff1 * coeff1 + coeff2 * coeff2) dpi = int(size * 72 / render_size) if dpi > PDF_RENDER_DPI_MAX: dpi = PDF_RENDER_DPI_MAX max_size = size max_dpi = dpi finally: proc.stdout.close() proc.wait() # Render... cmd = _mudraw_exec + [ '-r', str(max_dpi), '-o', destination_path, '--', self.archive, str(page_num) ] log.debug('rendering %s: %s', filename, ' '.join(cmd)) process.call(cmd)
def _ask_for_pages(self, page): """Ask for pages around <page> to be given priority extraction. """ files = [] if prefs['default double page']: page_width = 2 else: page_width = 1 if 0 == self._cache_pages: # Only ask for current page. num_pages = page_width elif -1 == self._cache_pages: # Ask for 10 pages. num_pages = min(10, self.get_number_of_pages()) else: num_pages = self._cache_pages page_list = [page - 1 - page_width + n for n in xrange(num_pages)] # Current and next page first, followed by previous page. previous_page = page_list[0:page_width] del page_list[0:page_width] page_list[2 * page_width:2 * page_width] = previous_page page_list = [ index for index in page_list if index >= 0 and index < len(self._image_files) ] log.debug('Ask for priority extraction around page %u: %s', page, ' '.join([str(n + 1) for n in page_list])) # print self._available_images for index in page_list: if index not in self._available_images: # print "Why? %s" % (index) files.append(self._image_files[index]) if len(files) > 0: self._window.filehandler._ask_for_files(files) return page_list
def _extract_all_files(self, files): # With multiple extractions for each pass, some of the files might have # already been extracted. with self._condition: files = list(set(files) - self._extracted) files.sort() try: log.debug(u'Extracting from "%s" to "%s": "%s"', self._src, self._dst, '", "'.join(files)) for f in self._archive.iter_extract(files, self._dst): if self._extract_thread.must_stop(): return self._extraction_finished(f) except Exception, ex: # Better to ignore any failed extractions (e.g. from a corrupt # archive) than to crash here and leave the main thread in a # possible infinite block. Damaged or missing files *should* be # handled gracefully by the main program anyway. log.error(_('! Extraction error: %s'), ex) log.debug('Traceback:\n%s', traceback.format_exc())
def __run_callbacks(self, *args, **kwargs): """ Executes callback functions. """ for obj_ref, func in self.__callbacks: if obj_ref is None: # Callback is a normal function callback = func elif obj_ref() is not None: # Callback is a bound method. # Recreate it by binding the function to the object. callback = func.__get__(obj_ref()) else: # Callback is a bound method, object # no longer exists. callback = None if callback: try: callback(*args, **kwargs) except Exception, e: log.error(_('! Callback %(function)r failed: %(error)s'), { 'function' : callback, 'error' : e }) log.debug('Traceback:\n%s', traceback.format_exc())
def extract(self, filename, destination_dir): self._create_directory(destination_dir) destination_path = os.path.join(destination_dir, filename) page_num = int(filename[0:-4]) # Try to find optimal DPI. proc = process.Process(['mudraw', '-x', '--', self.pdf, str(page_num)]) fd = proc.spawn() max_size = 0 max_dpi = PDF_RENDER_DPI_DEF if fd is not None: for line in fd: match = self._fill_image_regex.match(line) if not match: continue matrix = [float(f) for f in match.group('matrix').split()] for size, coeff1, coeff2 in ( (int(match.group('width')), matrix[0], matrix[1]), (int(match.group('height')), matrix[2], matrix[3]), ): if size < max_size: continue render_size = math.sqrt(coeff1 * coeff1 + coeff2 * coeff2) dpi = int(size * 72 / render_size) if dpi > PDF_RENDER_DPI_MAX: dpi = PDF_RENDER_DPI_MAX max_size = size max_dpi = dpi fd.close() proc.wait() # Render... cmd = ['mudraw', '-r', str(max_dpi), '-o', destination_path, '--', self.pdf, str(page_num)] log.debug('rendering %s: %s' % (filename, ' '.join(cmd))) proc = process.Process(cmd) fd = proc.spawn() if fd is not None: fd.close() proc.wait()
def run(): """Run the program.""" try: import pkg_resources except ImportError: # gettext isn't initialized yet, since pkg_resources is required to find translation files. # Thus, localizing these messages is pointless. log._print("The package 'pkg_resources' could not be found.") log._print("You need to install the 'setuptools' package, which also includes pkg_resources.") log._print("Note: On most distributions, 'distribute' supersedes 'setuptools'.") wait_and_exit() # Load configuration and setup localisation. preferences.read_preferences_file() from mcomix import i18n i18n.install_gettext() # Retrieve and parse command line arguments. argv = portability.get_commandline_args() opts, args = parse_arguments(argv) # First things first: set the log level. log.setLevel(opts.loglevel) # On Windows, update the fontconfig cache manually, before MComix starts # using Gtk, since the process may take several minutes, during which the # main window will just be frozen if the work is left to Gtk itself... if opts.update_fontconfig_cache: # First, update fontconfig cache. log.debug('starting fontconfig cache update') try: from mcomix.win32 import fc_cache from mcomix import process fc_cache.update() log.debug('fontconfig cache updated') except Exception as e: log.error('during fontconfig cache update', exc_info=e) # And then replace current MComix process with a fresh one # (that will not try to update the cache again). exe = sys.argv[0] if sys.platform == 'win32' and exe.endswith('.py'): # Find the interpreter. exe = process.find_executable(('pythonw.exe', 'python.exe')) args = [exe, sys.argv[0]] else: args = [exe] if sys.platform == 'win32': args.append('--no-update-fontconfig-cache') args.extend(argv) if '--update-fontconfig-cache' in args: args.remove('--update-fontconfig-cache') log.debug('restarting MComix from fresh: os.execv(%s, %s)', repr(exe), args) try: if sys.platform == 'win32': # Of course we can't use os.execv on Windows because it will # mangle arguments containing spaces or non-ascii characters... process.Win32Popen(args) sys.exit(0) else: os.execv(exe, args) except Exception as e: log.error('os.execv(%s, %s) failed', exe, str(args), exc_info=e) wait_and_exit() # Check for PyGTK and PIL dependencies. try: import pygtk pygtk.require('2.0') import gtk assert gtk.gtk_version >= (2, 12, 0) assert gtk.pygtk_version >= (2, 12, 0) import gobject gobject.threads_init() except AssertionError: log.error( _("You do not have the required versions of GTK+ and PyGTK installed.") ) log.error( _('Installed GTK+ version is: %s') % \ '.'.join([str(n) for n in gtk.gtk_version]) ) log.error( _('Required GTK+ version is: 2.12.0 or higher') ) log.error( _('Installed PyGTK version is: %s') % \ '.'.join([str(n) for n in gtk.pygtk_version]) ) log.error( _('Required PyGTK version is: 2.12.0 or higher') ) wait_and_exit() except ImportError: log.error( _('Required PyGTK version is: 2.12.0 or higher') ) log.error( _('No version of PyGTK was found on your system.') ) log.error( _('This error might be caused by missing GTK+ libraries.') ) wait_and_exit() try: import PIL.Image assert PIL.Image.VERSION >= '1.1.5' except AssertionError: log.error( _("You don't have the required version of the Python Imaging"), end=' ') log.error( _('Library (PIL) installed.') ) log.error( _('Installed PIL version is: %s') % Image.VERSION ) log.error( _('Required PIL version is: 1.1.5 or higher') ) wait_and_exit() except ImportError: log.error( _('Python Imaging Library (PIL) 1.1.5 or higher is required.') ) log.error( _('No version of the Python Imaging Library was found on your system.') ) wait_and_exit() if not os.path.exists(constants.DATA_DIR): os.makedirs(constants.DATA_DIR, 0700) if not os.path.exists(constants.CONFIG_DIR): os.makedirs(constants.CONFIG_DIR, 0700) from mcomix import icons icons.load_icons() open_path = None open_page = 1 if len(args) == 1: open_path = args[0] elif len(args) > 1: open_path = args elif preferences.prefs['auto load last file'] \ and preferences.prefs['path to last file'] \ and os.path.isfile(preferences.prefs['path to last file']): open_path = preferences.prefs['path to last file'] open_page = preferences.prefs['page of last file'] # Some languages require a RTL layout if preferences.prefs['language'] in ('he', 'fa'): gtk.widget_set_default_direction(gtk.TEXT_DIR_RTL) gtk.gdk.set_program_class(constants.APPNAME) from mcomix import main window = main.MainWindow(fullscreen = opts.fullscreen, is_slideshow = opts.slideshow, show_library = opts.library, manga_mode = opts.manga, double_page = opts.doublepage, zoom_mode = opts.zoommode, open_path = open_path, open_page = open_page) main.set_main_window(window) if 'win32' != sys.platform: # Add a SIGCHLD handler to reap zombie processes. def on_sigchld(signum, frame): try: os.waitpid(-1, os.WNOHANG) except OSError: pass signal.signal(signal.SIGCHLD, on_sigchld) signal.signal(signal.SIGTERM, lambda: gobject.idle_add(window.terminate_program)) try: gtk.main() except KeyboardInterrupt: # Will not always work because of threading. window.terminate_program()
def __init__(self, action=gtk.FILE_CHOOSER_ACTION_OPEN): self._action = action self._destroyed = False if action == gtk.FILE_CHOOSER_ACTION_OPEN: title = _('Open') buttons = (gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL, gtk.STOCK_OPEN, gtk.RESPONSE_OK) else: title = _('Save') buttons = (gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL, gtk.STOCK_SAVE, gtk.RESPONSE_OK) super(_BaseFileChooserDialog, self).__init__(title, None, 0, buttons) self.set_default_response(gtk.RESPONSE_OK) self.filechooser = gtk.FileChooserWidget(action=action) self.filechooser.set_size_request(680, 420) self.vbox.pack_start(self.filechooser) self.set_border_width(4) self.filechooser.set_border_width(6) self.connect('response', self._response) self.filechooser.connect('file_activated', self._response, gtk.RESPONSE_OK) preview_box = gtk.VBox(False, 10) preview_box.set_size_request(130, 0) self._preview_image = gtk.Image() self._preview_image.set_size_request(130, 130) preview_box.pack_start(self._preview_image, False, False) self.filechooser.set_preview_widget(preview_box) pango_scale_small = (1 / 1.2) self._namelabel = labels.FormattedLabel(weight=pango.WEIGHT_BOLD, scale=pango_scale_small) self._namelabel.set_ellipsize(pango.ELLIPSIZE_MIDDLE) preview_box.pack_start(self._namelabel, False, False) self._sizelabel = labels.FormattedLabel(scale=pango_scale_small) self._sizelabel.set_ellipsize(pango.ELLIPSIZE_MIDDLE) preview_box.pack_start(self._sizelabel, False, False) self.filechooser.set_use_preview_label(False) preview_box.show_all() self.filechooser.connect('update-preview', self._update_preview) self._all_files_filter = self.add_filter( _('All files'), [], ['*']) try: current_file = self._current_file() last_file = self.__class__._last_activated_file # If a file is currently open, use its path if current_file and os.path.exists(current_file): self.filechooser.set_current_folder(os.path.dirname(current_file)) # If no file is open, use the last stored file elif (last_file and os.path.exists(last_file)): self.filechooser.set_filename(last_file) # If no file was stored yet, fall back to preferences elif os.path.isdir(prefs['path of last browsed in filechooser']): if prefs['store recent file info']: self.filechooser.set_current_folder( prefs['path of last browsed in filechooser']) else: self.filechooser.set_current_folder( constants.HOME_DIR) except Exception, ex: # E.g. broken prefs values. log.debug(ex)
def __init__(self, action=gtk.FILE_CHOOSER_ACTION_OPEN): self._action = action self._destroyed = False if action == gtk.FILE_CHOOSER_ACTION_OPEN: title = _('Open') buttons = (gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL, gtk.STOCK_OPEN, gtk.RESPONSE_OK) else: title = _('Save') buttons = (gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL, gtk.STOCK_SAVE, gtk.RESPONSE_OK) gtk.Dialog.__init__(self, title, None, 0, buttons) self.set_default_response(gtk.RESPONSE_OK) self.set_has_separator(False) self.filechooser = gtk.FileChooserWidget(action=action) self.filechooser.set_size_request(680, 420) self.vbox.pack_start(self.filechooser) self.set_border_width(4) self.filechooser.set_border_width(6) self.connect('response', self._response) self.filechooser.connect('file_activated', self._response, gtk.RESPONSE_OK) preview_box = gtk.VBox(False, 10) preview_box.set_size_request(130, 0) self._preview_image = gtk.Image() self._preview_image.set_size_request(130, 130) preview_box.pack_start(self._preview_image, False, False) self.filechooser.set_preview_widget(preview_box) self._namelabel = labels.FormattedLabel(weight=pango.WEIGHT_BOLD, scale=pango.SCALE_SMALL) self._namelabel.set_ellipsize(pango.ELLIPSIZE_MIDDLE) preview_box.pack_start(self._namelabel, False, False) self._sizelabel = labels.FormattedLabel(scale=pango.SCALE_SMALL) self._sizelabel.set_ellipsize(pango.ELLIPSIZE_MIDDLE) preview_box.pack_start(self._sizelabel, False, False) self.filechooser.set_use_preview_label(False) preview_box.show_all() self.filechooser.connect('update-preview', self._update_preview) self._all_files_filter = self.add_filter( _('All files'), [], ['*']) # Determine which types should go into 'All archives' based on # extractor availability. mimetypes = constants.ZIP_FORMATS[0] + constants.TAR_FORMATS[0] patterns = constants.ZIP_FORMATS[1] + constants.TAR_FORMATS[1] if constants.RAR_AVAILABLE(): mimetypes += constants.RAR_FORMATS[0] patterns += constants.RAR_FORMATS[1] if constants.SZIP_AVAILABLE(): mimetypes += constants.SZIP_FORMATS[0] patterns += constants.SZIP_FORMATS[1] if constants.LHA_AVAILABLE(): mimetypes += constants.LHA_FORMATS[0] patterns += constants.LHA_FORMATS[1] self.add_filter(_('All Archives'), mimetypes, patterns) self.add_filter(_('ZIP archives'), *constants.ZIP_FORMATS) self.add_filter(_('Tar archives'), *constants.TAR_FORMATS) if constants.RAR_AVAILABLE(): self.add_filter(_('RAR archives'), *constants.RAR_FORMATS) if constants.SZIP_AVAILABLE(): self.add_filter(_('7z archives'), *constants.SZIP_FORMATS) if constants.LHA_AVAILABLE(): self.add_filter(_('LHA archives'), *constants.LHA_FORMATS) try: current_file = self._current_file() last_file = self.__class__._last_activated_file # If a file is currently open, use its path if current_file and os.path.exists(current_file): self.filechooser.set_current_folder(os.path.dirname(current_file)) # If no file is open, use the last stored file elif (last_file and os.path.exists(last_file)): self.filechooser.set_filename(last_file) # If no file was stored yet, fall back to preferences elif os.path.isdir(prefs['path of last browsed in filechooser']): self.filechooser.set_current_folder( prefs['path of last browsed in filechooser']) except Exception, ex: # E.g. broken prefs values. log.debug(ex)
def _cache_pixbuf(self, wanted): priority, index = wanted log.debug("Caching page %u", index + 1) self._get_pixbuf(index)
def remove_all(self): """ Removes all entries to recently opened files. """ try: self._manager.purge_items() except gobject.GError, error: log.debug(error)