def post_dump(self, *args, **kwargs): if not self.release: # wasn't set before, means no need to post-process (ie. up-to-date, already done) return build_meta = json.load( open(os.path.join(self.new_data_folder, "%s.json" % self.release))) if build_meta["type"] == "incremental": self.logger.info("Checking md5sum for files in '%s'" % self.new_data_folder) metadata = json.load( open(os.path.join(self.new_data_folder, "metadata.json"))) for md5_fname in metadata["diff"]["files"]: spec_md5 = md5_fname["md5sum"] fname = md5_fname["name"] compute_md5 = md5sum(os.path.join(self.new_data_folder, fname)) if compute_md5 != spec_md5: self.logger.error( "md5 check failed for file '%s', it may be corrupted" % fname) e = DumperException("Bad md5sum for file '%s'" % fname) self.register_status("failed", download={"err": repr(e)}) raise e else: self.logger.debug("md5 check success for file '%s'" % fname) elif build_meta["type"] == "full": # if type=fs, check if archive must be uncompressed # TODO # repo_name = list(build_meta["metadata"]["repository"].keys())[0] if build_meta["metadata"]["repository"]["type"] == "fs": uncompressall(self.new_data_folder)
def post_dump(self, *args, **kwargs): """After download/dump, uncompress the downloaded .gz files""" # UNCOMPRESS set to True if self.__class__.UNCOMPRESS: self.logger.info("Uncompress all archive files in '%s'" % self.new_data_folder) uncompressall(self.new_data_folder)
def post_dump(self, *args, **kwargs): if self.__class__.UNCOMPRESS: self.logger.info("Uncompress all archive files in '%s'" % self.new_data_folder) uncompressall(self.new_data_folder)