def test_compression_type(self): self.assertEqual(cr.compression_type(None), cr.UNKNOWN_COMPRESSION) self.assertEqual(cr.compression_type(""), cr.UNKNOWN_COMPRESSION) self.assertEqual(cr.compression_type("gz"), cr.GZ) self.assertEqual(cr.compression_type("bz2"), cr.BZ2) self.assertEqual(cr.compression_type("xz"), cr.XZ) self.assertEqual(cr.compression_type("XZ"), cr.XZ) self.assertEqual(cr.compression_type("zck"), cr.ZCK)
def __init__(self, old_repo_path, new_repo_path, out_path=None, logger=None, contenthash_type="sha256", compression_type="xz", force_database=False, ignore_missing=False): # Initialization self.ignore_missing = ignore_missing LoggingInterface.__init__(self, logger) self.out_path = out_path or "./" self.final_path = os.path.join(self.out_path, "repodata") self.new_repo_path = new_repo_path self.new_repodata_path = os.path.join(self.new_repo_path, "repodata/") self.new_repomd_path = os.path.join(self.new_repodata_path, "repomd.xml") self.old_repo_path = old_repo_path self.old_repodata_path = os.path.join(self.old_repo_path, "repodata/") self.old_repomd_path = os.path.join(self.old_repodata_path, "repomd.xml") self.delta_repo_path = out_path self.delta_repodata_path = os.path.join(self.delta_repo_path, ".repodata/") self.delta_repomd_path = os.path.join(self.delta_repodata_path, "repomd.xml") # contenthash type self.contenthash_type_str = contenthash_type or "sha256" self.compression_type_str = compression_type or "xz" self.compression_type = cr.compression_type(self.compression_type_str) # Prepare Repomd objects self.old_repomd = cr.Repomd(self.old_repomd_path) self.new_repomd = cr.Repomd(self.new_repomd_path) self.delta_repomd = cr.Repomd() # Use revision and tags self.delta_repomd.set_revision(self.new_repomd.revision) for tag in self.new_repomd.distro_tags: self.delta_repomd.add_distro_tag(tag[1], tag[0]) for tag in self.new_repomd.repo_tags: self.delta_repomd.add_repo_tag(tag) for tag in self.new_repomd.content_tags: self.delta_repomd.add_content_tag(tag) # Load records self.old_records = {} self.new_records = {} for record in self.old_repomd.records: self.old_records[record.type] = record for record in self.new_repomd.records: self.new_records[record.type] = record old_record_types = set(self.old_records.keys()) new_record_types = set(self.new_records.keys()) self.deleted_repomd_record_types = old_record_types - new_record_types self.added_repomd_record_types = new_record_types - old_record_types # Important sanity checks (repo without primary is definitely bad) if not "primary" in self.old_records: raise DeltaRepoError("Missing \"primary\" metadata in old repo") if not "primary" in self.new_records: raise DeltaRepoError("Missing \"primary\" metadata in new repo") # Detect type of checksum in the new repomd.xml (global) self.checksum_type = cr.checksum_type( self.new_records["primary"].checksum_type) if self.checksum_type == cr.UNKNOWN_CHECKSUM: raise DeltaRepoError("Unknown checksum type used in new repo: %s" % \ self.new_records["primary"].checksum_type) # TODO: Je treba detekovat typ checksumu, kdyz se stejne pro kazdej # record nakonec detekuje znova??? # Detection if use unique md filenames if self.new_records["primary"].location_href.split("primary")[0] != "": self.unique_md_filenames = True self.old_contenthash = self.old_repomd.contenthash self.new_contenthash = self.new_repomd.contenthash self.deltametadata = DeltaMetadata() # Prepare global bundle self.globalbundle = GlobalBundle() self.globalbundle.contenthash_type_str = self.contenthash_type_str self.globalbundle.unique_md_filenames = self.unique_md_filenames self.globalbundle.force_database = force_database self.globalbundle.ignore_missing = ignore_missing
def __init__(self, old_repo_path, new_repo_path, out_path=None, logger=None, contenthash_type="sha256", compression_type="xz", force_database=False, ignore_missing=False): # Initialization self.ignore_missing = ignore_missing LoggingInterface.__init__(self, logger) self.out_path = out_path or "./" self.final_path = os.path.join(self.out_path, "repodata") self.new_repo_path = new_repo_path self.new_repodata_path = os.path.join(self.new_repo_path, "repodata/") self.new_repomd_path = os.path.join(self.new_repodata_path, "repomd.xml") self.old_repo_path = old_repo_path self.old_repodata_path = os.path.join(self.old_repo_path, "repodata/") self.old_repomd_path = os.path.join(self.old_repodata_path, "repomd.xml") self.delta_repo_path = out_path self.delta_repodata_path = os.path.join(self.delta_repo_path, ".repodata/") self.delta_repomd_path = os.path.join(self.delta_repodata_path, "repomd.xml") # contenthash type self.contenthash_type_str = contenthash_type or "sha256" self.compression_type_str = compression_type or "xz" self.compression_type = cr.compression_type(self.compression_type_str) # Prepare Repomd objects self.old_repomd = cr.Repomd(self.old_repomd_path) self.new_repomd = cr.Repomd(self.new_repomd_path) self.delta_repomd = cr.Repomd() # Use revision and tags self.delta_repomd.set_revision(self.new_repomd.revision) for tag in self.new_repomd.distro_tags: self.delta_repomd.add_distro_tag(tag[1], tag[0]) for tag in self.new_repomd.repo_tags: self.delta_repomd.add_repo_tag(tag) for tag in self.new_repomd.content_tags: self.delta_repomd.add_content_tag(tag) # Load records self.old_records = {} self.new_records = {} for record in self.old_repomd.records: self.old_records[record.type] = record for record in self.new_repomd.records: self.new_records[record.type] = record old_record_types = set(self.old_records.keys()) new_record_types = set(self.new_records.keys()) self.deleted_repomd_record_types = old_record_types - new_record_types self.added_repomd_record_types = new_record_types - old_record_types # Important sanity checks (repo without primary is definitely bad) if not "primary" in self.old_records: raise DeltaRepoError("Missing \"primary\" metadata in old repo") if not "primary" in self.new_records: raise DeltaRepoError("Missing \"primary\" metadata in new repo") # Detect type of checksum in the new repomd.xml (global) self.checksum_type = cr.checksum_type(self.new_records["primary"].checksum_type) if self.checksum_type == cr.UNKNOWN_CHECKSUM: raise DeltaRepoError("Unknown checksum type used in new repo: %s" % \ self.new_records["primary"].checksum_type) # TODO: Je treba detekovat typ checksumu, kdyz se stejne pro kazdej # record nakonec detekuje znova??? # Detection if use unique md filenames if self.new_records["primary"].location_href.split("primary")[0] != "": self.unique_md_filenames = True self.old_contenthash = self.old_repomd.contenthash self.new_contenthash = self.new_repomd.contenthash self.deltametadata = DeltaMetadata() # Prepare global bundle self.globalbundle = GlobalBundle() self.globalbundle.contenthash_type_str = self.contenthash_type_str self.globalbundle.unique_md_filenames = self.unique_md_filenames self.globalbundle.force_database = force_database self.globalbundle.ignore_missing = ignore_missing