def extract(self, location: Location, directory_watcher, stdio) -> Optional[ArchiveExtractor]: extractor = self._extractors.get(location, None) if extractor is not None: return extractor # extraction already running or queued outdir = self._make_extractor_outdir(location) if not os.path.isdir(outdir): extractor = ArchiveExtractor(stdio.get_stdio_name(location.origin()), outdir) self._extractors[location] = extractor extractor.sig_started.connect(lambda extractor: self._on_archive_extractor_started(extractor)) extractor.sig_finished.connect(lambda extractor, d=directory_watcher: self._on_archive_extractor_finished(extractor, d)) if self.get_running_extractor_count() < self._max_extractors: extractor.start() else: self._queued_extractors.append(extractor) return extractor else: status_file = os.path.join(outdir, "status.json") status = ExtractorStatus.from_file(status_file) if status.status() == ExtractorResult.FAILURE: def send_message(dw=directory_watcher, message=status.message()): dw.sig_message.emit(status.message()) # FIXME: this is a bit of a crude hack to # communicate the message to the user QTimer.singleShot(0, send_message) logger.error("%s: archive exist, but is broken: %s", location, status.message()) return None
def _make_extractor_outdir(self, location: Location) -> str: assert location._payloads[-1].protocol == "archive" origin = location.origin() origin._payloads.append(Payload("archive", "")) loc_hash = hashlib.md5(origin.as_url().encode()).hexdigest() outdir = os.path.join(self._extractor_dir, loc_hash) return outdir