def get_new_files(self, filelist): ''' Returns a list of files that are present in the watched directory, but not in the list of files passed in C{filelist}. ''' if not self.is_valid(): return [] old_files = frozenset([os.path.abspath(path) for path in filelist]) if not self.recursive: available_files = frozenset([ os.path.join(self.directory, filename) for filename in os.listdir(self.directory) if archive_tools.is_archive_file(filename) ]) else: available_files = [] for dirpath, dirnames, filenames in os.walk(self.directory): for filename in filter(archive_tools.is_archive_file, filenames): path = os.path.join(dirpath, filename) available_files.append(path) available_files = frozenset(available_files) return list(available_files.difference(old_files))
def _create_thumbnail_pixbuf(self, filepath): ''' Creates a thumbnail pixbuf from <filepath>, and returns it as a tuple along with a file metadata dictionary: (pixbuf, tEXt_data) ''' if self.archive_support: mime = archive_tools.archive_mime_type(filepath) else: mime = None if mime is not None: if not archive_tools.is_archive_file(filepath): return None, None with archive_tools.get_recursive_archive_handler( filepath, type=mime, prefix='mcomix_archive_thumb.') as archive: if archive is None: return None, None if archive.is_encrypted: image_path = tools.pkg_path('images', 'encrypted-book.png') else: files = archive.list_contents(decrypt=False) wanted = self._guess_cover(files) if wanted is None: return None, None image_path = archive.extract(wanted) if not os.path.isfile(image_path): return None, None pixbuf = image_tools.load_pixbuf_size(image_path, self.width, self.height) if self.store_on_disk: tEXt_data = self._get_text_data(image_path) # Use the archive's mTime instead of the extracted file's mtime tEXt_data['tEXt::Thumb::MTime'] = str( os.stat(filepath).st_mtime) else: tEXt_data = None return pixbuf, tEXt_data elif image_tools.is_image_file(filepath, check_mimetype=True): pixbuf = image_tools.load_pixbuf_size(filepath, self.width, self.height) if self.store_on_disk: tEXt_data = self._get_text_data(filepath) else: tEXt_data = None return pixbuf, tEXt_data else: return None, None
def __get_file_filter(self, files): """ Determines what kind of files should be filtered in the given list of <files>. Returns either a filter accepting only images, or only archives, depending on what type of file is found first in the list. """ for file in files: if os.path.isfile(file): if image_tools.is_image_file(file): return image_tools.is_image_file if archive_tools.is_archive_file(file): return archive_tools.is_archive_file # Default filter only accepts images. return image_tools.is_image_file
def _iter_contents(self, archive, root=None, decrypt=True): if archive.is_encrypted and not decrypt: return if not root: root = os.path.join(self.destdir, 'main_archive') self._archive_list.append(archive) self._archive_root[archive] = root sub_archive_list = [] for f in archive.iter_contents(): if archive_tools.is_archive_file(f): # We found a sub-archive, don't try to extract it now, as we # must finish listing the containing archive contents before # any extraction can be done. sub_archive_list.append(f) name = f if root is None else os.path.join(root, f) self._entry_mapping[name] = (archive, f) self._sub_archives.add(name) continue name = f if root is not None: name = os.path.join(root, name) self._entry_mapping[name] = (archive, f) yield name for f in sub_archive_list: # Extract sub-archive. destination_dir = self.destdir if root is not None: destination_dir = os.path.join(destination_dir, root) sub_archive_path = archive.extract(f, destination_dir) # And open it and list its contents. sub_archive = archive_tools.get_archive_handler(sub_archive_path) if sub_archive is None: log.warning('Non-supported archive format: %s', os.path.basename(sub_archive_path)) continue sub_tempdir = tempfile.TemporaryDirectory( prefix='sub_archive.{:04}.'.format(len(self._archive_list)), dir=self.destdir) sub_root = sub_tempdir.name self._sub_tempdirs.append(sub_tempdir) for name in self._iter_contents(sub_archive, sub_root): yield name os.remove(sub_archive_path)
def _iter_contents(self, archive, root=None): self._archive_list.append(archive) self._archive_root[archive] = root sub_archive_list = [] for f in archive.iter_contents(): if archive_tools.is_archive_file(f): # We found a sub-archive, don't try to extract it now, as we # must finish listing the containing archive contents before # any extraction can be done. sub_archive_list.append(f) continue name = f if root is not None: name = os.path.join(root, name) self._entry_mapping[name] = (archive, f) yield name for f in sub_archive_list: # Extract sub-archive. destination_dir = self._destination_dir if root is not None: destination_dir = os.path.join(destination_dir, root) archive.extract(f, destination_dir) sub_archive_ext = os.path.splitext(f)[1].lower()[1:] sub_archive_path = os.path.join( self._destination_dir, 'sub-archives', '%04u.%s' % (len(self._archive_list), sub_archive_ext )) self._create_directory(os.path.dirname(sub_archive_path)) os.rename(os.path.join(destination_dir, f), sub_archive_path) # And open it and list its contents. sub_archive = archive_tools.get_archive_handler(sub_archive_path) if sub_archive is None: log.warning('Non-supported archive format: %s', os.path.basename(sub_archive_path)) continue sub_root = f if root is not None: sub_root = os.path.join(root, sub_root) for name in self._iter_contents(sub_archive, sub_root): yield name
def _iter_contents(self, archive, root=None): self._archive_list.append(archive) self._archive_root[archive] = root sub_archive_list = [] for f in archive.iter_contents(): if archive_tools.is_archive_file(f): # We found a sub-archive, don't try to extract it now, as we # must finish listing the containing archive contents before # any extraction can be done. sub_archive_list.append(f) continue name = f if root is not None: name = os.path.join(root, name) self._entry_mapping[name] = (archive, f) yield name for f in sub_archive_list: # Extract sub-archive. destination_dir = self._destination_dir if root is not None: destination_dir = os.path.join(destination_dir, root) archive.extract(f, destination_dir) sub_archive_ext = os.path.splitext(f)[1].lower()[1:] sub_archive_path = os.path.join( self._destination_dir, 'sub-archives', '%04u.%s' % (len(self._archive_list), sub_archive_ext)) self._create_directory(os.path.dirname(sub_archive_path)) os.rename(os.path.join(destination_dir, f), sub_archive_path) # And open it and list its contents. sub_archive = archive_tools.get_archive_handler(sub_archive_path) if sub_archive is None: log.warning('Non-supported archive format: %s', os.path.basename(sub_archive_path)) continue sub_root = f if root is not None: sub_root = os.path.join(root, sub_root) for name in self._iter_contents(sub_archive, sub_root): yield name
def get_new_files(self, filelist): """ Returns a list of files that are present in the watched directory, but not in the list of files passed in C{filelist}. """ if not self.is_valid(): return [] old_files = frozenset([os.path.abspath(path) for path in filelist]) if not self.recursive: available_files = frozenset([os.path.join(self.directory, filename) for filename in os.listdir(self.directory) if archive_tools.is_archive_file(filename)]) else: available_files = [] for dirpath, dirnames, filenames in os.walk(self.directory): for filename in filter(archive_tools.is_archive_file, filenames): path = os.path.join(dirpath, filename) available_files.append(path) available_files = frozenset(available_files) return list(available_files.difference(old_files))