Exemple #1
0
    def gen_use_original(self, md, compression_type=cr.NO_COMPRESSION):
        """Function that takes original metadata file and
        copy it to the delta repo unmodified.
        Plugins could use this function when they cannot generate delta file
        for some reason (eg. file is newly added, so delta is
        meaningless/impossible)."""

        md.delta_fn = os.path.join(md.out_dir, os.path.basename(md.new_fn))

        # Compress or copy original file
        stat = None
        if (compression_type != cr.NO_COMPRESSION):
            md.delta_fn += cr.compression_suffix(compression_type)
            stat = cr.ContentStat(md.checksum_type)
            cr.compress_file(md.new_fn, md.delta_fn, compression_type, stat)
        else:
            shutil.copy2(md.new_fn, md.delta_fn)

        # Prepare repomd record of xml file
        rec = cr.RepomdRecord(md.metadata_type, md.delta_fn)
        if stat is not None:
            rec.load_contentstat(stat)
        rec.fill(md.checksum_type)
        if self.globalbundle.unique_md_filenames:
            rec.rename_file()
            md.delta_fn = rec.location_real

        return rec
Exemple #2
0
    def gen_use_original(self, md, compression_type=cr.NO_COMPRESSION):
        """Function that takes original metadata file and
        copy it to the delta repo unmodified.
        Plugins could use this function when they cannot generate delta file
        for some reason (eg. file is newly added, so delta is
        meaningless/impossible)."""

        md.delta_fn = os.path.join(md.out_dir, os.path.basename(md.new_fn))

        # Compress or copy original file
        stat = None
        if (compression_type != cr.NO_COMPRESSION):
            md.delta_fn += cr.compression_suffix(compression_type)
            stat = cr.ContentStat(md.checksum_type)
            cr.compress_file(md.new_fn, md.delta_fn, compression_type, stat)
        else:
            shutil.copy2(md.new_fn, md.delta_fn)

        # Prepare repomd record of xml file
        rec = cr.RepomdRecord(md.metadata_type, md.delta_fn)
        if stat is not None:
            rec.load_contentstat(stat)
        rec.fill(md.checksum_type)
        if self.globalbundle.unique_md_filenames:
            rec.rename_file()
            md.delta_fn = rec.location_real

        return rec
Exemple #3
0
    def _gen_db_from_xml(self, md):
        """Gen sqlite db from the delta metadata.
        """
        mdtype = md.metadata_type

        if mdtype == "primary":
            dbclass = cr.PrimarySqlite
            parsefunc = cr.xml_parse_primary
        elif mdtype == "filelists":
            dbclass = cr.FilelistsSqlite
            parsefunc = cr.xml_parse_filelists
        elif mdtype == "other":
            dbclass = cr.OtherSqlite
            parsefunc = cr.xml_parse_other
        else:
            raise DeltaRepoPluginError(
                "Unsupported type of metadata {0}".format(mdtype))

        src_fn = md.new_fn
        src_rec = md.new_rec

        md.db_fn = os.path.join(md.out_dir, "{0}.sqlite".format(mdtype))
        db = dbclass(md.db_fn)

        def pkgcb(pkg):
            db.add_pkg(pkg)

        parsefunc(src_fn, pkgcb=pkgcb)

        db.dbinfo_update(src_rec.checksum)
        db.close()

        db_stat = cr.ContentStat(md.checksum_type)
        db_compressed = md.db_fn + ".bz2"
        cr.compress_file(md.db_fn, None, cr.BZ2, db_stat)
        os.remove(md.db_fn)

        # Prepare repomd record of database file
        db_rec = cr.RepomdRecord("{0}_db".format(md.metadata_type),
                                 db_compressed)
        db_rec.load_contentstat(db_stat)
        db_rec.fill(md.checksum_type)
        if self.globalbundle.unique_md_filenames:
            db_rec.rename_file()

        return db_rec
Exemple #4
0
    def _gen_db_from_xml(self, md):
        """Gen sqlite db from the delta metadata.
        """
        mdtype = md.metadata_type

        if mdtype == "primary":
            dbclass = cr.PrimarySqlite
            parsefunc = cr.xml_parse_primary
        elif mdtype == "filelists":
            dbclass = cr.FilelistsSqlite
            parsefunc = cr.xml_parse_filelists
        elif mdtype == "other":
            dbclass = cr.OtherSqlite
            parsefunc = cr.xml_parse_other
        else:
            raise DeltaRepoPluginError("Unsupported type of metadata {0}".format(mdtype))

        src_fn = md.new_fn
        src_rec = md.new_rec

        md.db_fn = os.path.join(md.out_dir, "{0}.sqlite".format(mdtype))
        db = dbclass(md.db_fn)

        def pkgcb(pkg):
            db.add_pkg(pkg)

        parsefunc(src_fn, pkgcb=pkgcb)

        db.dbinfo_update(src_rec.checksum)
        db.close()

        db_stat = cr.ContentStat(md.checksum_type)
        db_compressed = md.db_fn+".bz2"
        cr.compress_file(md.db_fn, None, cr.BZ2, db_stat)
        os.remove(md.db_fn)

        # Prepare repomd record of database file
        db_rec = cr.RepomdRecord("{0}_db".format(md.metadata_type),
                                 db_compressed)
        db_rec.load_contentstat(db_stat)
        db_rec.fill(md.checksum_type)
        if self.globalbundle.unique_md_filenames:
            db_rec.rename_file()

        return db_rec
Exemple #5
0
    def apply(self, metadata):

        gen_repomd_recs = []

        md_group = metadata.get("group")
        md_group_gz = metadata.get("group_gz")

        if md_group and (not md_group.delta_fn_exists
                         and not md_group.old_fn_exists):
            md_group = None

        if md_group_gz and (not md_group_gz.delta_fn_exists
                            and not md_group_gz.old_fn_exists):
            md_group_gz = None

        if md_group:
            notes = self._metadata_notes_from_plugin_bundle(
                md_group.metadata_type)
            rc, rec = self._apply_basic_delta(md_group, notes)
            assert rc
            if rec:
                gen_repomd_recs.append(rec)
                if notes.get("gen_group_gz"):
                    # Gen group_gz metadata from the group metadata
                    stat = cr.ContentStat(md_group.checksum_type)
                    group_gz_fn = md_group.new_fn + ".gz"
                    cr.compress_file(md_group.new_fn, group_gz_fn, cr.GZ, stat)
                    rec = cr.RepomdRecord("group_gz", group_gz_fn)
                    rec.load_contentstat(stat)
                    rec.fill(md_group.checksum_type)
                    if self.globalbundle.unique_md_filenames:
                        rec.rename_file()
                    gen_repomd_recs.append(rec)
        elif md_group_gz:
            notes = self._metadata_notes_from_plugin_bundle(
                md_group_gz.metadata_type)
            rc, rec = self._apply_basic_delta(md_group_gz, notes)
            assert rc
            if rec:
                gen_repomd_recs.append(rec)

        return gen_repomd_recs
    def test_compress_file(self):
        # Non exist file
        self.assertRaises(IOError, cr.compress_file, self.nofile, None, cr.BZ2)

        # Compression - use the same name+suffix
        cr.compress_file(self.tmpfile, None, cr.BZ2)
        self.assertTrue(os.path.isfile(self.tmpfile + ".bz2"))

        # Compression - new name
        new_name = os.path.join(self.tmpdir, "foobar.gz")
        cr.compress_file(self.tmpfile, new_name, cr.GZ)
        self.assertTrue(os.path.isfile(new_name))

        # Compression - with stat
        stat = cr.ContentStat(cr.SHA256)
        cr.compress_file(self.tmpfile, None, cr.XZ, stat)
        self.assertTrue(os.path.isfile(self.tmpfile + ".xz"))
        self.assertEqual(stat.checksum, "e61ebaa6241e335c779194ce7af98c590f1"\
                                        "b26a749f219b997a0d7d5a773063b")
        self.assertEqual(stat.checksum_type, cr.SHA256)
        self.assertEqual(stat.size, len(self.content))

        # Check directory for unexpected files
        self.assertEqual(set(os.listdir(self.tmpdir)),
                         set(['file.bz2', 'file.xz', 'file', 'foobar.gz']))
Exemple #7
0
    def test_compress_file(self):
        # Non exist file
        self.assertRaises(IOError, cr.compress_file,
                          self.nofile, None, cr.BZ2)

        # Compression - use the same name+suffix
        cr.compress_file(self.tmpfile, None, cr.BZ2)
        self.assertTrue(os.path.isfile(self.tmpfile+".bz2"))

        # Compression - new name
        new_name = os.path.join(self.tmpdir, "foobar.gz")
        cr.compress_file(self.tmpfile, new_name, cr.GZ)
        self.assertTrue(os.path.isfile(new_name))

        # Compression - with stat
        stat = cr.ContentStat(cr.SHA256)
        cr.compress_file(self.tmpfile, None, cr.XZ, stat)
        self.assertTrue(os.path.isfile(self.tmpfile+".xz"))
        self.assertEqual(stat.checksum, "e61ebaa6241e335c779194ce7af98c590f1"\
                                        "b26a749f219b997a0d7d5a773063b")
        self.assertEqual(stat.checksum_type, cr.SHA256)
        self.assertEqual(stat.size, len(self.content))

        # Check directory for unexpected files
        self.assertEqual(set(os.listdir(self.tmpdir)),
                         set(['file.bz2', 'file.xz', 'file', 'foobar.gz']))
Exemple #8
0
    def apply(self, metadata):

        gen_repomd_recs = []

        md_group = metadata.get("group")
        md_group_gz = metadata.get("group_gz")

        if md_group and (not md_group.delta_fn_exists
                         and not md_group.old_fn_exists):
            md_group = None

        if md_group_gz and (not md_group_gz.delta_fn_exists
                            and not md_group_gz.old_fn_exists):
            md_group_gz = None

        if md_group:
            notes = self._metadata_notes_from_plugin_bundle(md_group.metadata_type)
            rc, rec = self._apply_basic_delta(md_group, notes)
            assert rc
            if rec:
                gen_repomd_recs.append(rec)
                if notes.get("gen_group_gz"):
                    # Gen group_gz metadata from the group metadata
                    stat = cr.ContentStat(md_group.checksum_type)
                    group_gz_fn = md_group.new_fn+".gz"
                    cr.compress_file(md_group.new_fn, group_gz_fn, cr.GZ, stat)
                    rec = cr.RepomdRecord("group_gz", group_gz_fn)
                    rec.load_contentstat(stat)
                    rec.fill(md_group.checksum_type)
                    if self.globalbundle.unique_md_filenames:
                        rec.rename_file()
                    gen_repomd_recs.append(rec)
        elif md_group_gz:
            notes = self._metadata_notes_from_plugin_bundle(md_group_gz.metadata_type)
            rc, rec = self._apply_basic_delta(md_group_gz, notes)
            assert rc
            if rec:
                gen_repomd_recs.append(rec)

        return gen_repomd_recs
Exemple #9
0
        def finish_metadata(md):
            if md is None:
                return

            # Close XML file
            md.new_f.close()

            # Prepare repomd record of xml file
            rec = cr.RepomdRecord(md.metadata_type, md.new_fn)
            rec.load_contentstat(md.new_f_stat)
            rec.fill(md.checksum_type)
            if self.globalbundle.unique_md_filenames:
                rec.rename_file()

            md.new_rec = rec
            md.new_fn_exists = True

            gen_repomd_recs.append(rec)

            # Prepare database
            if hasattr(md, "db") and md.db:
                self._debug("Generating database: {0}".format(md.db_fn))
                md.db.dbinfo_update(rec.checksum)
                md.db.close()
                db_stat = cr.ContentStat(md.checksum_type)
                db_compressed = md.db_fn + ".bz2"
                cr.compress_file(md.db_fn, None, cr.BZ2, db_stat)
                os.remove(md.db_fn)

                # Prepare repomd record of database file
                db_rec = cr.RepomdRecord("{0}_db".format(md.metadata_type),
                                         db_compressed)
                db_rec.load_contentstat(db_stat)
                db_rec.fill(md.checksum_type)
                if self.globalbundle.unique_md_filenames:
                    db_rec.rename_file()

                gen_repomd_recs.append(db_rec)
Exemple #10
0
        def finish_metadata(md):
            if md is None:
                return

            # Close XML file
            md.new_f.close()

            # Prepare repomd record of xml file
            rec = cr.RepomdRecord(md.metadata_type, md.new_fn)
            rec.load_contentstat(md.new_f_stat)
            rec.fill(md.checksum_type)
            if self.globalbundle.unique_md_filenames:
                rec.rename_file()

            md.new_rec = rec
            md.new_fn_exists = True

            gen_repomd_recs.append(rec)

            # Prepare database
            if hasattr(md, "db") and md.db:
                self._debug("Generating database: {0}".format(md.db_fn))
                md.db.dbinfo_update(rec.checksum)
                md.db.close()
                db_stat = cr.ContentStat(md.checksum_type)
                db_compressed = md.db_fn+".bz2"
                cr.compress_file(md.db_fn, None, cr.BZ2, db_stat)
                os.remove(md.db_fn)

                # Prepare repomd record of database file
                db_rec = cr.RepomdRecord("{0}_db".format(md.metadata_type),
                                         db_compressed)
                db_rec.load_contentstat(db_stat)
                db_rec.fill(md.checksum_type)
                if self.globalbundle.unique_md_filenames:
                    db_rec.rename_file()

                gen_repomd_recs.append(db_rec)
Exemple #11
0
    def applydelta(self, old_path, delta_path, out_path=None, database=False):
        removedxml = RemovedXml()
        hash_in_the_name = False

        # Prepare variables with paths
        old_repodata_path = os.path.join(old_path, "repodata/")
        delta_repodata_path = os.path.join(delta_path, "repodata/")

        old_repomd_path = os.path.join(old_repodata_path, "repomd.xml")
        delta_repomd_path = os.path.join(delta_repodata_path, "repomd.xml")

        # Prepare Repomd objects
        old_repomd = cr.Repomd(old_repomd_path)
        delta_repomd = cr.Repomd(delta_repomd_path)
        new_repomd = cr.Repomd()

        # Check if delta id correspond with used repo
        if not delta_repomd.repoid or len(delta_repomd.repoid.split('-')) != 2:
            raise DeltaRepoError("Bad DeltaRepoId")

        self.id_type = delta_repomd.repoid_type

        old_id, new_id = delta_repomd.repoid.split('-')

        self._debug("Delta %s -> %s" % (old_id, new_id))

        if old_repomd.repoid_type == delta_repomd.repoid_type:
            if old_repomd.repoid and old_repomd.repoid != old_id:
                raise DeltaRepoError("Not suitable delta for current repo " \
                        "(Expected: %s Real: %s)" % (old_id, old_repomd.repoid))
        else:
            self._debug("Different repoid types repo: %s vs delta: %s" % \
                    (old_repomd.repoid_type, delta_repomd.repoid_type))

        # Prepare output path
        new_path = os.path.join(out_path, ".repodata/")
        new_repodata_path = os.path.join(new_path, "repodata/")
        os.mkdir(new_path)
        os.mkdir(new_repodata_path)

        # Apply repomd delta
        new_repomd.set_revision(delta_repomd.revision)
        for tag in delta_repomd.distro_tags:
            new_repomd.add_distro_tag(tag[1], tag[0])
        for tag in delta_repomd.repo_tags:
            new_repomd.add_repo_tag(tag)
        for tag in delta_repomd.content_tags:
            new_repomd.add_content_tag(tag)

        old_records = dict([(record.type, record) for record in old_repomd.records ])
        delta_records = dict([(record.type, record) for record in delta_repomd.records ])
        old_record_types = set(old_records.keys())
        delta_record_types = set(delta_records.keys())
        deleted_repomd_record_types = old_record_types - delta_record_types
        added_repomd_record_types = delta_record_types - old_record_types

        # Prepare removedxml
        if "removed" in delta_records:
            removedxml_path = os.path.join(delta_path,
                                delta_records["removed"].location_href)
            removedxml.xml_parse(removedxml_path)
        else:
            self._warning("\"removed\" record is missing in repomd.xml "\
                          "of delta repo")

        # Important sanity check (repo without primary is definitely bad)
        if not "primary" in old_records or not "primary" in delta_records:
            raise DeltaRepoError("Missing primary metadata")

        # Detect type of checksum in the delta repomd.xml
        self.checksum_type = cr.checksum_type(delta_records["primary"].checksum_type)
        if self.checksum_type == cr.UNKNOWN_CHECKSUM:
            raise DeltaRepoError("Unknown checksum type detected: %s" % \
                    delta_records["primary"].checksum_type)

        # Detection if use unique md filenames
        if delta_records["primary"].location_href.split("primary")[0] != "":
            hash_in_the_name = True

        # Apply delta on primary, filelists and other
        pri_old_fn = os.path.join(old_path, old_records["primary"].location_href)
        pri_delta_fn = os.path.join(delta_path, delta_records["primary"].location_href)
        pri_out_fn = os.path.join(new_repodata_path, "primary.xml.gz")
        pri_out_f_stat = cr.ContentStat(self.checksum_type)
        pri_out_f = cr.PrimaryXmlFile(pri_out_fn, cr.GZ_COMPRESSION)
        pri_db_fn = None
        pri_db = None
        if database:
            pri_db_fn = os.path.join(new_repodata_path, "primary.sqlite")
            pri_db = cr.PrimarySqlite(pri_db_fn)

        fil_old_fn = None
        fil_delta_fn = None
        fil_out_fn = None
        fil_out_f_stat = None
        fil_out_f = None
        fil_db_fn = None
        fil_db = None
        if ("filelists" in delta_records):
            fil_old_fn = os.path.join(old_path, old_records["filelists"].location_href)
            fil_delta_fn = os.path.join(delta_path, delta_records["filelists"].location_href)
            fil_out_fn = os.path.join(new_repodata_path, "filelists.xml.gz")
            fil_out_f_stat = cr.ContentStat(self.checksum_type)
            fil_out_f = cr.FilelistsXmlFile(fil_out_fn, cr.GZ_COMPRESSION)
            if database:
                fil_db_fn = os.path.join(new_repodata_path, "filelists.sqlite")
                fil_db = cr.FilelistsSqlite(fil_db_fn)

        oth_old_fn = None
        oth_delta_fn = None
        oth_out_fn = None
        oth_out_f_stat = None
        oth_out_f = None
        oth_db_fn = None
        oth_db = None
        if ("other" in delta_records):
            oth_old_fn = os.path.join(old_path, old_records["other"].location_href)
            oth_delta_fn = os.path.join(delta_path, delta_records["other"].location_href)
            oth_out_fn = os.path.join(new_repodata_path, "other.xml.gz")
            oth_out_f_stat = cr.ContentStat(self.checksum_type)
            oth_out_f = cr.OtherXmlFile(oth_out_fn, cr.GZ_COMPRESSION)
            if database:
                oth_db_fn = os.path.join(new_repodata_path, "other.sqlite")
                oth_db = cr.OtherSqlite(oth_db_fn)

        deltamodule = MainDeltaModule(id_type=self.id_type,
                                      logger=self.logger)
        ids = deltamodule.apply(pri_old_fn, pri_delta_fn, pri_out_f, pri_db,
                                fil_old_fn, fil_delta_fn, fil_out_f, fil_db,
                                oth_old_fn, oth_delta_fn, oth_out_f, oth_db,
                                removedxml)

        pri_out_f.close()
        fil_out_f.close()
        oth_out_f.close()

        # Check returned IDs
        cold_id, cnew_id = ids  # Calculated ids

        if cold_id != old_id:
            raise DeltaRepoError("Calculated old RepoId doesn't match!")

        if cnew_id != new_id:
            raise DeltaRepoError("Calculated new RepoId doesn't match!")

        self._debug("RepoIds match")

        # Prepare repomd.xml records
        pri_rec = cr.RepomdRecord("primary", pri_out_fn)
        pri_rec.load_contentstat(pri_out_f_stat)
        pri_rec.fill(self.checksum_type)
        if hash_in_the_name:
            pri_rec.rename_file()
        new_repomd.set_record(pri_rec)

        if database:
            pri_db.dbinfo_update(pri_rec.checksum)
            pri_db.close()
            pri_db_stat = cr.ContentStat(self.checksum_type)
            pri_db_compressed = os.path.join(pri_db_fn+".bz2")
            cr.compress_file(pri_db_fn, None, cr.BZ2, pri_db_stat)
            os.remove(pri_db_fn)
            pri_db_rec = cr.RepomdRecord("primary_db", pri_db_compressed)
            pri_db_rec.load_contentstat(pri_db_stat)
            pri_db_rec.fill(self.checksum_type)
            if hash_in_the_name:
                pri_db_rec.rename_file()
            new_repomd.set_record(pri_db_rec)

        if fil_out_fn:
            fil_rec = cr.RepomdRecord("filelists", fil_out_fn)
            fil_rec.load_contentstat(fil_out_f_stat)
            fil_rec.fill(self.checksum_type)
            if hash_in_the_name:
                fil_rec.rename_file()
            new_repomd.set_record(fil_rec)

        if database:
            fil_db.dbinfo_update(fil_rec.checksum)
            fil_db.close()
            fil_db_stat = cr.ContentStat(self.checksum_type)
            fil_db_compressed = os.path.join(fil_db_fn+".bz2")
            cr.compress_file(fil_db_fn, None, cr.BZ2, fil_db_stat)
            os.remove(fil_db_fn)
            fil_db_rec = cr.RepomdRecord("primary_db", fil_db_compressed)
            fil_db_rec.load_contentstat(fil_db_stat)
            fil_db_rec.fill(self.checksum_type)
            if hash_in_the_name:
                fil_db_rec.rename_file()
            new_repomd.set_record(fil_db_rec)


        if oth_out_fn:
            oth_rec = cr.RepomdRecord("other", oth_out_fn)
            oth_rec.load_contentstat(oth_out_f_stat)
            oth_rec.fill(self.checksum_type)
            if hash_in_the_name:
                oth_rec.rename_file()
            new_repomd.set_record(oth_rec)

        if database:
            oth_db.dbinfo_update(oth_rec.checksum)
            oth_db.close()
            oth_db_stat = cr.ContentStat(self.checksum_type)
            oth_db_compressed = os.path.join(oth_db_fn+".bz2")
            cr.compress_file(oth_db_fn, None, cr.BZ2, oth_db_stat)
            os.remove(oth_db_fn)
            oth_db_rec = cr.RepomdRecord("primary_db", oth_db_compressed)
            oth_db_rec.load_contentstat(oth_db_stat)
            oth_db_rec.fill(self.checksum_type)
            if hash_in_the_name:
                oth_db_rec.rename_file()
            new_repomd.set_record(oth_db_rec)


        # Write out repomd.xml
        new_repomd.set_repoid(ids[1], self.id_type)
        new_repomd_path = os.path.join(new_repodata_path, "repomd.xml")
        new_repomd_xml = new_repomd.xml_dump()
        self._debug("Writing repomd.xml")
        open(new_repomd_path, "w").write(new_repomd_xml)

        # Final move
        final_destination = os.path.join(out_path, "repodata/")
        if os.path.exists(final_destination):
            self._warning("Destination dir already exists! Removing %s" % \
                          final_destination)
            shutil.rmtree(final_destination)
        self._info("Moving %s -> %s" % (new_path, final_destination))
        os.rename(new_path, final_destination)