Ejemplo n.º 1
0
    def test_compress_file(self):
        # Non exist file
        self.assertRaises(IOError, cr.compress_file, self.nofile, None, cr.BZ2)

        # Compression - use the same name+suffix
        cr.compress_file(self.tmpfile, None, cr.BZ2)
        self.assertTrue(os.path.isfile(self.tmpfile + ".bz2"))

        # Compression - new name
        new_name = os.path.join(self.tmpdir, "foobar.gz")
        cr.compress_file(self.tmpfile, new_name, cr.GZ)
        self.assertTrue(os.path.isfile(new_name))

        # Compression - with stat
        stat = cr.ContentStat(cr.SHA256)
        cr.compress_file(self.tmpfile, None, cr.XZ, stat)
        self.assertTrue(os.path.isfile(self.tmpfile + ".xz"))
        self.assertEqual(stat.checksum, "e61ebaa6241e335c779194ce7af98c590f1"\
                                        "b26a749f219b997a0d7d5a773063b")
        self.assertEqual(stat.checksum_type, cr.SHA256)
        self.assertEqual(stat.size, len(self.content))

        # Check directory for unexpected files
        self.assertEqual(set(os.listdir(self.tmpdir)),
                         set(['file.bz2', 'file.xz', 'file', 'foobar.gz']))
Ejemplo n.º 2
0
    def test_decompress_file(self):
        # Non exist file
        self.assertRaises(IOError, cr.decompress_file, self.nofile, None,
                          cr.BZ2)

        tmpfile_gz_comp = os.path.join(self.tmpdir, "gzipedfile.gz")
        shutil.copy(FILE_TEXT_GZ, tmpfile_gz_comp)
        tmpfile_gz_comp_ns = os.path.join(self.tmpdir, "gzipedfile_no_suffix")
        shutil.copy(FILE_TEXT_GZ, tmpfile_gz_comp_ns)

        # Decompression - use the same name without suffix
        dest = os.path.join(self.tmpdir, "gzipedfile")
        cr.decompress_file(tmpfile_gz_comp, None, cr.GZ)
        self.assertTrue(os.path.isfile(dest))

        # Decompression - use the specific name
        dest = os.path.join(self.tmpdir, "decompressed.file")
        cr.decompress_file(tmpfile_gz_comp, dest, cr.GZ)
        self.assertTrue(os.path.isfile(dest))

        # Decompression - bad suffix by default
        dest = os.path.join(self.tmpdir, "gzipedfile_no_suffix.decompressed")
        cr.decompress_file(tmpfile_gz_comp_ns, None, cr.GZ)
        self.assertTrue(os.path.isfile(dest))

        # Decompression - with stat
        stat = cr.ContentStat(cr.SHA256)
        dest = os.path.join(self.tmpdir, "gzipedfile")
        cr.decompress_file(tmpfile_gz_comp, None, cr.AUTO_DETECT_COMPRESSION,
                           stat)
        self.assertTrue(os.path.isfile(dest))
        self.assertEqual(stat.checksum, FILE_TEXT_SHA256SUM)
        self.assertEqual(stat.checksum_type, cr.SHA256)
        self.assertEqual(stat.size, 910)
Ejemplo n.º 3
0
        def prepare_paths_in_metadata(md, xmlclass, dbclass):
            if md is None:
                return

            notes = self._metadata_notes_from_plugin_bundle(md.metadata_type)
            if not notes:
                # TODO: Add flag to ignore this kind of warnings (?)
                self._warning("Metadata \"{0}\" doesn't have a record in "
                              "deltametadata.xml - Ignoring")
                return

            suffix = cr.compression_suffix(md.compression_type) or ""
            md.new_fn = os.path.join(
                md.out_dir, "{0}.xml{1}".format(md.metadata_type, suffix))
            md.new_f_stat = cr.ContentStat(md.checksum_type)
            md.new_f = xmlclass(md.new_fn, md.compression_type, md.new_f_stat)

            if self.globalbundle.force_database or notes.get(
                    "database") == "1":
                md.db_fn = os.path.join(md.out_dir,
                                        "{0}.sqlite".format(md.metadata_type))
                md.db = dbclass(md.db_fn)
            else:
                md.db_fn = None
                md.db = None
Ejemplo n.º 4
0
    def gen_use_original(self, md, compression_type=cr.NO_COMPRESSION):
        """Function that takes original metadata file and
        copy it to the delta repo unmodified.
        Plugins could use this function when they cannot generate delta file
        for some reason (eg. file is newly added, so delta is
        meaningless/impossible)."""

        md.delta_fn = os.path.join(md.out_dir, os.path.basename(md.new_fn))

        # Compress or copy original file
        stat = None
        if (compression_type != cr.NO_COMPRESSION):
            md.delta_fn += cr.compression_suffix(compression_type)
            stat = cr.ContentStat(md.checksum_type)
            cr.compress_file(md.new_fn, md.delta_fn, compression_type, stat)
        else:
            shutil.copy2(md.new_fn, md.delta_fn)

        # Prepare repomd record of xml file
        rec = cr.RepomdRecord(md.metadata_type, md.delta_fn)
        if stat is not None:
            rec.load_contentstat(stat)
        rec.fill(md.checksum_type)
        if self.globalbundle.unique_md_filenames:
            rec.rename_file()
            md.delta_fn = rec.location_real

        return rec
Ejemplo n.º 5
0
    def test_contentstat(self):
        pkg = cr.package_from_rpm(PKG_ARCHER_PATH)
        self.assertTrue(pkg)

        pkg.time_file = 1
        pkg.time_build = 1

        cs = cr.ContentStat(cr.SHA256)
        self.assertEqual(cs.size, 0)
        self.assertEqual(cs.checksum_type, cr.SHA256)
        self.assertEqual(cs.checksum, None)

        path = os.path.join(self.tmpdir, "primary.xml.gz")
        f = cr.PrimaryXmlFile(path, cr.GZ_COMPRESSION, cs)
        self.assertTrue(f)
        self.assertTrue(os.path.isfile(path))
        f.add_pkg(pkg)
        f.close()

        self.assertTrue(os.path.isfile(path))

        self.assertEqual(cs.size, 2668)
        self.assertEqual(cs.checksum_type, cr.SHA256)
        self.assertEqual(cs.checksum, "67bc6282915fad80dc11f3d7c3210977a0bde"\
                                      "05a762256d86083c2447d425776")
Ejemplo n.º 6
0
    def __create_xml(queues, xml_path, xml_func, name):
        cs = cr.ContentStat(cr.SHA256)
        xml = xml_func(xml_path, contentstat=cs)

        xml.set_num_of_pkgs(len(pkglist))

        for pkg in pkglist:
            xml.add_pkg(pkg)

        xml.close()

        queues['master'].put(
            ((name, xml_path), (cs.checksum, cs.size, cs.checksum_type)), True)
Ejemplo n.º 7
0
    def _gen_db_from_xml(self, md):
        """Gen sqlite db from the delta metadata.
        """
        mdtype = md.metadata_type

        if mdtype == "primary":
            dbclass = cr.PrimarySqlite
            parsefunc = cr.xml_parse_primary
        elif mdtype == "filelists":
            dbclass = cr.FilelistsSqlite
            parsefunc = cr.xml_parse_filelists
        elif mdtype == "other":
            dbclass = cr.OtherSqlite
            parsefunc = cr.xml_parse_other
        else:
            raise DeltaRepoPluginError(
                "Unsupported type of metadata {0}".format(mdtype))

        src_fn = md.new_fn
        src_rec = md.new_rec

        md.db_fn = os.path.join(md.out_dir, "{0}.sqlite".format(mdtype))
        db = dbclass(md.db_fn)

        def pkgcb(pkg):
            db.add_pkg(pkg)

        parsefunc(src_fn, pkgcb=pkgcb)

        db.dbinfo_update(src_rec.checksum)
        db.close()

        db_stat = cr.ContentStat(md.checksum_type)
        db_compressed = md.db_fn + ".bz2"
        cr.compress_file(md.db_fn, None, cr.BZ2, db_stat)
        os.remove(md.db_fn)

        # Prepare repomd record of database file
        db_rec = cr.RepomdRecord("{0}_db".format(md.metadata_type),
                                 db_compressed)
        db_rec.load_contentstat(db_stat)
        db_rec.fill(md.checksum_type)
        if self.globalbundle.unique_md_filenames:
            db_rec.rename_file()

        return db_rec
Ejemplo n.º 8
0
    def test_contentstat_ref_in_crfile(self):
        """Test if reference is saved properly"""

        cs = cr.ContentStat(cr.SHA256)
        self.assertEqual(cs.size, 0)
        self.assertEqual(cs.checksum_type, cr.SHA256)
        self.assertEqual(cs.checksum, None)

        path = os.path.join(self.tmpdir, "foofile.gz")
        f = cr.CrFile(path, cr.MODE_WRITE, cr.GZ_COMPRESSION, cs)
        self.assertTrue(f)
        self.assertTrue(os.path.isfile(path))
        del cs
        f.write("foobar")
        f.close()

        self.assertTrue(os.path.isfile(path))
Ejemplo n.º 9
0
    def test_repomdrecord_load_contentstat(self):
        rec = cr.RepomdRecord("primary", None)
        self.assertTrue(rec)

        stat = cr.ContentStat(cr.SHA256)
        stat.checksum = "foobar"
        stat.checksum_type = cr.SHA256
        stat.size = 123

        self.assertEqual(rec.checksum_open, None)
        self.assertEqual(rec.checksum_open_type, None)
        self.assertEqual(rec.size, 0)

        rec.load_contentstat(stat);

        self.assertEqual(rec.checksum_open, "foobar")
        self.assertEqual(rec.checksum_open_type, "sha256")
        self.assertEqual(rec.size_open, 123)
Ejemplo n.º 10
0
    def __create_db(queues, db_path, db_func, name):
        db = db_func(db_path)

        for pkg in pkglist:
            db.add_pkg(pkg)

        db.dbinfo_update(queues[name].get(True))

        db.close()

        cs = cr.ContentStat(cr.SHA256)
        cr.compress_file_with_stat(
            db_path, db_path + cr.compression_suffix(cr.BZ2_COMPRESSION),
            cr.BZ2_COMPRESSION, cs)
        os.remove(db_path)
        queues['master'].put(
            ((name + '_db',
              db_path + cr.compression_suffix(cr.BZ2_COMPRESSION)),
             (cs.checksum, cs.size, cs.checksum_type)), True)
Ejemplo n.º 11
0
    def apply(self, metadata):

        gen_repomd_recs = []

        md_group = metadata.get("group")
        md_group_gz = metadata.get("group_gz")

        if md_group and (not md_group.delta_fn_exists
                         and not md_group.old_fn_exists):
            md_group = None

        if md_group_gz and (not md_group_gz.delta_fn_exists
                            and not md_group_gz.old_fn_exists):
            md_group_gz = None

        if md_group:
            notes = self._metadata_notes_from_plugin_bundle(
                md_group.metadata_type)
            rc, rec = self._apply_basic_delta(md_group, notes)
            assert rc
            if rec:
                gen_repomd_recs.append(rec)
                if notes.get("gen_group_gz"):
                    # Gen group_gz metadata from the group metadata
                    stat = cr.ContentStat(md_group.checksum_type)
                    group_gz_fn = md_group.new_fn + ".gz"
                    cr.compress_file(md_group.new_fn, group_gz_fn, cr.GZ, stat)
                    rec = cr.RepomdRecord("group_gz", group_gz_fn)
                    rec.load_contentstat(stat)
                    rec.fill(md_group.checksum_type)
                    if self.globalbundle.unique_md_filenames:
                        rec.rename_file()
                    gen_repomd_recs.append(rec)
        elif md_group_gz:
            notes = self._metadata_notes_from_plugin_bundle(
                md_group_gz.metadata_type)
            rc, rec = self._apply_basic_delta(md_group_gz, notes)
            assert rc
            if rec:
                gen_repomd_recs.append(rec)

        return gen_repomd_recs
Ejemplo n.º 12
0
        def prepare_paths_in_metadata(md, xmlclass):
            if md is None:
                return None

            # Make a note about if the database should be generated
            db_available = metadata.get(md.metadata_type + "_db").new_fn_exists
            if db_available or self.globalbundle.force_database:
                metadata_notes.setdefault(md.metadata_type,
                                          {})["database"] = "1"
            else:
                metadata_notes.setdefault(md.metadata_type,
                                          {})["database"] = "0"

            suffix = cr.compression_suffix(md.compression_type) or ""
            md.delta_fn = os.path.join(
                md.out_dir, "{0}.xml{1}".format(md.metadata_type, suffix))
            md.delta_f_stat = cr.ContentStat(md.checksum_type)
            md.delta_f = xmlclass(md.delta_fn, md.compression_type,
                                  md.delta_f_stat)
            return md
Ejemplo n.º 13
0
        def finish_metadata(md):
            if md is None:
                return

            # Close XML file
            md.new_f.close()

            # Prepare repomd record of xml file
            rec = cr.RepomdRecord(md.metadata_type, md.new_fn)
            rec.load_contentstat(md.new_f_stat)
            rec.fill(md.checksum_type)
            if self.globalbundle.unique_md_filenames:
                rec.rename_file()

            md.new_rec = rec
            md.new_fn_exists = True

            gen_repomd_recs.append(rec)

            # Prepare database
            if hasattr(md, "db") and md.db:
                self._debug("Generating database: {0}".format(md.db_fn))
                md.db.dbinfo_update(rec.checksum)
                md.db.close()
                db_stat = cr.ContentStat(md.checksum_type)
                db_compressed = md.db_fn + ".bz2"
                cr.compress_file(md.db_fn, None, cr.BZ2, db_stat)
                os.remove(md.db_fn)

                # Prepare repomd record of database file
                db_rec = cr.RepomdRecord("{0}_db".format(md.metadata_type),
                                         db_compressed)
                db_rec.load_contentstat(db_stat)
                db_rec.fill(md.checksum_type)
                if self.globalbundle.unique_md_filenames:
                    db_rec.rename_file()

                gen_repomd_recs.append(db_rec)
Ejemplo n.º 14
0
    def test_contentstat_ref_in_xmlfile(self):
        """Test if reference is saved properly"""

        pkg = cr.package_from_rpm(PKG_ARCHER_PATH)
        self.assertTrue(pkg)

        pkg.time_file = 1
        pkg.time_build = 1

        cs = cr.ContentStat(cr.SHA256)
        self.assertEqual(cs.size, 0)
        self.assertEqual(cs.checksum_type, cr.SHA256)
        self.assertEqual(cs.checksum, None)

        path = os.path.join(self.tmpdir, "primary.xml.gz")
        f = cr.PrimaryXmlFile(path, cr.GZ_COMPRESSION, cs)
        self.assertTrue(f)
        self.assertTrue(os.path.isfile(path))
        del cs
        f.add_pkg(pkg)
        f.close()

        self.assertTrue(os.path.isfile(path))
Ejemplo n.º 15
0
    def gen(self):

        # Prepare output path
        os.mkdir(self.delta_repodata_path)

        # Set of types of processed metadata records ("primary", "primary_db"...)
        processed_metadata = set()

        for plugin in PLUGINS:

            # Prepare metadata for the plugin
            metadata_objects = {}
            for metadata_name in plugin.METADATA:
                metadata_object = self._new_metadata(metadata_name)
                if metadata_object is not None:
                    metadata_objects[metadata_name] = metadata_object

            # Skip plugin if no supported metadata available
            if not metadata_objects:
                self._debug("Plugin {0}: Skipped - None of supported " \
                            "metadata {1} available".format(
                            plugin.NAME, plugin.METADATA))
                continue

            # Prepare plugin bundle
            pluginbundle = PluginBundle(plugin.NAME, plugin.VERSION)
            self.deltametadata.add_pluginbundle(pluginbundle)

            # Use the plugin
            self._debug("Plugin {0}: Active".format(plugin.NAME))
            plugin_instance = plugin(pluginbundle,
                                     self.globalbundle,
                                     logger=self._get_logger())
            repomd_records = plugin_instance.gen(metadata_objects)

            # Put repomd records from processed metadatas to repomd
            self._debug("Plugin {0}: Processed {1} record(s) " \
                "and produced:".format(plugin.NAME, metadata_objects.keys()))
            for rec in repomd_records:
                self._debug(" - {0}".format(rec.type))
                self.delta_repomd.set_record(rec)

            # Organization stuff
            for md in metadata_objects.keys():
                processed_metadata.add(md)

        # Process rest of the metadata files
        metadata_objects = {}
        for rectype, rec in self.new_records.items():
            if rectype not in processed_metadata:
                metadata_object = self._new_metadata(rectype)
                if metadata_object is not None:
                    self._debug("To be processed by general delta plugin: " \
                                "{0}".format(rectype))
                    metadata_objects[rectype] = metadata_object
                else:
                    self._debug("Not processed - even by general delta " \
                                "plugin: {0}".format(rectype))

        if metadata_objects:
            # Use the plugin
            pluginbundle = PluginBundle(GENERAL_PLUGIN.NAME,
                                        GENERAL_PLUGIN.VERSION)
            self.deltametadata.add_pluginbundle(pluginbundle)
            self._debug("Plugin {0}: Active".format(GENERAL_PLUGIN.NAME))
            plugin_instance = GENERAL_PLUGIN(pluginbundle,
                                             self.globalbundle,
                                             logger=self._get_logger())
            repomd_records = plugin_instance.gen(metadata_objects)

            # Put repomd records from processed metadatas to repomd
            self._debug("Plugin {0}: Processed {1} record(s) " \
                "and produced:".format(GENERAL_PLUGIN.NAME, metadata_objects.keys()))
            for rec in repomd_records:
                self._debug(" - {0}".format(rec.type))
                self.delta_repomd.set_record(rec)

        # Check if calculated contenthashes match
        # and calculate them if they don't exist
        self.check_content_hashes()

        # Write out deltametadata.xml
        self.fill_deltametadata()
        deltametadata_path = os.path.join(self.delta_repodata_path,
                                          "deltametadata.xml")
        stat = cr.ContentStat(self.checksum_type)
        deltametadata_path = self.deltametadata.dump(
            deltametadata_path,
            compression_type=self.compression_type,
            stat=stat)

        deltametadata_rec = cr.RepomdRecord("deltametadata",
                                            deltametadata_path)
        deltametadata_rec.load_contentstat(stat)
        deltametadata_rec.fill(self.checksum_type)
        if self.unique_md_filenames:
            deltametadata_rec.rename_file()
        self.delta_repomd.set_record(deltametadata_rec)

        # Prepare and write out the new repomd.xml
        self._debug("Preparing repomd.xml ...")
        deltacontenthash = "{0}-{1}".format(
            self.globalbundle.calculated_old_contenthash,
            self.globalbundle.calculated_new_contenthash)
        self.delta_repomd.set_contenthash(deltacontenthash,
                                          self.contenthash_type_str)
        self.delta_repomd.sort_records()
        delta_repomd_xml = self.delta_repomd.xml_dump()

        self._debug("Writing repomd.xml ...")
        open(self.delta_repomd_path, "w").write(delta_repomd_xml)

        # Final move
        if os.path.exists(self.final_path):
            self._warning("Destination dir already exists! Removing %s" % \
                          self.final_path)
            shutil.rmtree(self.final_path)
        self._debug("Moving %s -> %s" %
                    (self.delta_repodata_path, self.final_path))
        os.rename(self.delta_repodata_path, self.final_path)