Exemple #1
0
    def gen_use_original(self, md, compression_type=cr.NO_COMPRESSION):
        """Function that takes original metadata file and
        copy it to the delta repo unmodified.
        Plugins could use this function when they cannot generate delta file
        for some reason (eg. file is newly added, so delta is
        meaningless/impossible)."""

        md.delta_fn = os.path.join(md.out_dir, os.path.basename(md.new_fn))

        # Compress or copy original file
        stat = None
        if (compression_type != cr.NO_COMPRESSION):
            md.delta_fn += cr.compression_suffix(compression_type)
            stat = cr.ContentStat(md.checksum_type)
            cr.compress_file(md.new_fn, md.delta_fn, compression_type, stat)
        else:
            shutil.copy2(md.new_fn, md.delta_fn)

        # Prepare repomd record of xml file
        rec = cr.RepomdRecord(md.metadata_type, md.delta_fn)
        if stat is not None:
            rec.load_contentstat(stat)
        rec.fill(md.checksum_type)
        if self.globalbundle.unique_md_filenames:
            rec.rename_file()
            md.delta_fn = rec.location_real

        return rec
Exemple #2
0
def insert_in_repo(comp_type, repodata, filetype, extension, source):
    """
    Inject a file into the repodata with the help of createrepo_c.

    Args:
        comp_type (int): createrepo_c compression type indication.
        repodata (basestring): The path to the repo where the metadata will be inserted.
        filetype (basestring): What type of metadata will be inserted by createrepo_c.
            This does allow any string to be inserted (custom types). There are some
            types which are used with dnf repos as primary, updateinfo, comps, filelist etc.
        extension (basestring): The file extension (xml, sqlite).
        source (basestring): A file path. File holds the dump of metadata until
            copied to the repodata folder.
    """
    log.info('Inserting %s.%s into %s', filetype, extension, repodata)
    target_fname = os.path.join(repodata, '%s.%s' % (filetype, extension))
    shutil.copyfile(source, target_fname)
    repomd_xml = os.path.join(repodata, 'repomd.xml')
    repomd = cr.Repomd(repomd_xml)
    # create a new record for our repomd.xml
    rec = cr.RepomdRecord(filetype, target_fname)
    # compress our metadata file with the comp_type
    rec_comp = rec.compress_and_fill(cr.SHA256, comp_type)
    # add hash to the compresed metadata file
    rec_comp.rename_file()
    # set type of metadata
    rec_comp.type = filetype
    # insert metadata about our metadata in repomd.xml
    repomd.set_record(rec_comp)
    with open(repomd_xml, 'w') as repomd_file:
        repomd_file.write(repomd.xml_dump())
    os.unlink(target_fname)
Exemple #3
0
    def xxx_repomdrecord_fill(self):
        shutil.copyfile(REPO_00_PRIXML, self.path00)
        self.assertTrue(os.path.exists(self.path00))

        rec = cr.RepomdRecord("primary", self.path00)
        self.assertTrue(rec)
        rec.fill(cr.SHA256)
        rec.rename_file()

        # Filename shoud contain a (valid) checksum
        self.assertEqual(os.listdir(self.tmpdir),
            ['dabe2ce5481d23de1f4f52bdcfee0f9af98316c9e0de2ce8123adeefa0dd08b9-primary.xml.gz'])
Exemple #4
0
    def xxx_repomdrecord_fill(self):
        shutil.copyfile(REPO_00_PRIXML, self.path00)
        self.assertTrue(os.path.exists(self.path00))

        rec = cr.RepomdRecord("primary", self.path00)
        self.assertTrue(rec)
        rec.fill(cr.SHA256)
        rec.rename_file()

        # Filename shoud contain a (valid) checksum
        self.assertEqual(os.listdir(self.tmpdir), [
            '1cb61ea996355add02b1426ed4c1780ea75ce0c04c5d1107c025c3fbd7d8bcae-primary.xml.gz'
        ])
    def test_repomdrecord_setters(self):
        shutil.copyfile(REPO_00_PRIXML, self.path00)
        self.assertTrue(os.path.exists(self.path00))

        rec = cr.RepomdRecord("primary", self.path00)
        self.assertTrue(rec)

        rec.fill(cr.SHA256)

        self.assertEqual(rec.type, "primary")
        self.assertEqual(rec.location_real, self.path00)
        self.assertEqual(rec.location_href, "repodata/primary.xml.gz")
        self.assertEqual(
            rec.checksum,
            "dabe2ce5481d23de1f4f52bdcfee0f9af98316c9e0de2ce8123adeefa0dd08b9")
        self.assertEqual(rec.checksum_type, "sha256")
        self.assertEqual(
            rec.checksum_open,
            "e1e2ffd2fb1ee76f87b70750d00ca5677a252b397ab6c2389137a0c33e7b359f")
        self.assertEqual(rec.checksum_open_type, "sha256")
        self.assertTrue(rec.timestamp > 0)
        self.assertEqual(rec.size, 134)
        self.assertEqual(rec.size_open, 167)
        self.assertEqual(rec.db_ver, 10)

        # Set new values

        rec.type = "foo"
        rec.location_href = "repodata/foo.xml.gz"
        rec.checksum = "foobar11"
        rec.checksum_type = "foo1"
        rec.checksum_open = "foobar22"
        rec.checksum_open_type = "foo2"
        rec.timestamp = 123
        rec.size = 456
        rec.size_open = 789
        rec.db_ver = 11

        # Check

        self.assertEqual(rec.type, "foo")
        self.assertEqual(rec.location_real, self.path00)
        self.assertEqual(rec.location_href, "repodata/foo.xml.gz")
        self.assertEqual(rec.checksum, "foobar11")
        self.assertEqual(rec.checksum_type, "foo1")
        self.assertEqual(rec.checksum_open, "foobar22")
        self.assertEqual(rec.checksum_open_type, "foo2")
        self.assertEqual(rec.timestamp, 123)
        self.assertEqual(rec.size, 456)
        self.assertEqual(rec.size_open, 789)
        self.assertEqual(rec.db_ver, 11)
Exemple #6
0
    def test_repomdrecord_setters(self):
        shutil.copyfile(REPO_00_PRIXML, self.path00)
        self.assertTrue(os.path.exists(self.path00))

        rec = cr.RepomdRecord("primary", self.path00)
        self.assertTrue(rec)

        rec.fill(cr.SHA256)

        self.assertEqual(rec.type, "primary")
        self.assertEqual(rec.location_real, self.path00)
        self.assertEqual(rec.location_href, "repodata/primary.xml.gz")
        self.assertEqual(
            rec.checksum,
            "1cb61ea996355add02b1426ed4c1780ea75ce0c04c5d1107c025c3fbd7d8bcae")
        self.assertEqual(rec.checksum_type, "sha256")
        self.assertEqual(
            rec.checksum_open,
            "e1e2ffd2fb1ee76f87b70750d00ca5677a252b397ab6c2389137a0c33e7b359f")
        self.assertEqual(rec.checksum_open_type, "sha256")
        self.assertTrue(rec.timestamp > 0)
        self.assertEqual(rec.size, 134)
        self.assertEqual(rec.size_open, 167)
        self.assertEqual(rec.db_ver, 10)

        # Set new values

        rec.type = "foo"
        rec.location_href = "repodata/foo.xml.gz"
        rec.checksum = "foobar11"
        rec.checksum_type = "foo1"
        rec.checksum_open = "foobar22"
        rec.checksum_open_type = "foo2"
        rec.timestamp = 123
        rec.size = 456
        rec.size_open = 789
        rec.db_ver = 11

        # Check

        self.assertEqual(rec.type, "foo")
        self.assertEqual(rec.location_real, self.path00)
        self.assertEqual(rec.location_href, "repodata/foo.xml.gz")
        self.assertEqual(rec.checksum, "foobar11")
        self.assertEqual(rec.checksum_type, "foo1")
        self.assertEqual(rec.checksum_open, "foobar22")
        self.assertEqual(rec.checksum_open_type, "foo2")
        self.assertEqual(rec.timestamp, 123)
        self.assertEqual(rec.size, 456)
        self.assertEqual(rec.size_open, 789)
        self.assertEqual(rec.db_ver, 11)
Exemple #7
0
        def finish_metadata(md):
            if md is None:
                return

            # Close XML file
            md.new_f.close()

            # Prepare repomd record of xml file
            rec = cr.RepomdRecord(md.metadata_type, md.new_fn)
            rec.load_contentstat(md.new_f_stat)
            rec.fill(md.checksum_type)
            if self.globalbundle.unique_md_filenames:
                rec.rename_file()

            md.new_rec = rec
            md.new_fn_exists = True

            gen_repomd_recs.append(rec)

            # Prepare database
            if hasattr(md, "db") and md.db:
                self._debug("Generating database: {0}".format(md.db_fn))
                md.db.dbinfo_update(rec.checksum)
                md.db.close()
                db_stat = cr.ContentStat(md.checksum_type)
                db_compressed = md.db_fn + ".bz2"
                cr.compress_file(md.db_fn, None, cr.BZ2, db_stat)
                os.remove(md.db_fn)

                # Prepare repomd record of database file
                db_rec = cr.RepomdRecord("{0}_db".format(md.metadata_type),
                                         db_compressed)
                db_rec.load_contentstat(db_stat)
                db_rec.fill(md.checksum_type)
                if self.globalbundle.unique_md_filenames:
                    db_rec.rename_file()

                gen_repomd_recs.append(db_rec)
Exemple #8
0
def modifyrepo(filename, repodata):
    repodata = os.path.join(repodata, 'repodata')
    uinfo_xml = os.path.join(repodata, os.path.basename(filename))
    shutil.copyfile(filename, uinfo_xml)

    uinfo_rec = cr.RepomdRecord('updateinfo', uinfo_xml)
    uinfo_rec.fill(cr.SHA256)
    uinfo_rec.rename_file()

    repomd_xml = os.path.join(repodata, 'repomd.xml')
    repomd = cr.Repomd(repomd_xml)
    repomd.set_record(uinfo_rec)
    with file(repomd_xml, 'w') as repomd_file:
        repomd_file.write(repomd.xml_dump())
Exemple #9
0
 def modifyrepo(self, filename):
     """Inject a file into the repodata for each architecture"""
     for arch in os.listdir(self.repo_path):
         repodata = os.path.join(self.repo_path, arch, 'repodata')
         log.info('Inserting %s into %s', filename, repodata)
         uinfo_xml = os.path.join(repodata, 'updateinfo.xml')
         shutil.copyfile(filename, uinfo_xml)
         repomd_xml = os.path.join(repodata, 'repomd.xml')
         repomd = cr.Repomd(repomd_xml)
         uinfo_rec = cr.RepomdRecord('updateinfo', uinfo_xml)
         uinfo_rec_comp = uinfo_rec.compress_and_fill(self.hash_type, self.comp_type)
         uinfo_rec_comp.rename_file()
         uinfo_rec_comp.type = 'updateinfo'
         repomd.set_record(uinfo_rec_comp)
         with file(repomd_xml, 'w') as repomd_file:
             repomd_file.write(repomd.xml_dump())
         os.unlink(uinfo_xml)
Exemple #10
0
    def _gen_db_from_xml(self, md):
        """Gen sqlite db from the delta metadata.
        """
        mdtype = md.metadata_type

        if mdtype == "primary":
            dbclass = cr.PrimarySqlite
            parsefunc = cr.xml_parse_primary
        elif mdtype == "filelists":
            dbclass = cr.FilelistsSqlite
            parsefunc = cr.xml_parse_filelists
        elif mdtype == "other":
            dbclass = cr.OtherSqlite
            parsefunc = cr.xml_parse_other
        else:
            raise DeltaRepoPluginError(
                "Unsupported type of metadata {0}".format(mdtype))

        src_fn = md.new_fn
        src_rec = md.new_rec

        md.db_fn = os.path.join(md.out_dir, "{0}.sqlite".format(mdtype))
        db = dbclass(md.db_fn)

        def pkgcb(pkg):
            db.add_pkg(pkg)

        parsefunc(src_fn, pkgcb=pkgcb)

        db.dbinfo_update(src_rec.checksum)
        db.close()

        db_stat = cr.ContentStat(md.checksum_type)
        db_compressed = md.db_fn + ".bz2"
        cr.compress_file(md.db_fn, None, cr.BZ2, db_stat)
        os.remove(md.db_fn)

        # Prepare repomd record of database file
        db_rec = cr.RepomdRecord("{0}_db".format(md.metadata_type),
                                 db_compressed)
        db_rec.load_contentstat(db_stat)
        db_rec.fill(md.checksum_type)
        if self.globalbundle.unique_md_filenames:
            db_rec.rename_file()

        return db_rec
Exemple #11
0
    def test_repomdrecord_load_contentstat(self):
        rec = cr.RepomdRecord("primary", None)
        self.assertTrue(rec)

        stat = cr.ContentStat(cr.SHA256)
        stat.checksum = "foobar"
        stat.checksum_type = cr.SHA256
        stat.size = 123

        self.assertEqual(rec.checksum_open, None)
        self.assertEqual(rec.checksum_open_type, None)
        self.assertEqual(rec.size, 0)

        rec.load_contentstat(stat);

        self.assertEqual(rec.checksum_open, "foobar")
        self.assertEqual(rec.checksum_open_type, "sha256")
        self.assertEqual(rec.size_open, 123)
Exemple #12
0
    def test_repomdrecord_compress_and_fill(self):
        open(self.path01, "w").write("foobar\ncontent\nhh\n")
        self.assertTrue(os.path.exists(self.path01))

        rec = cr.RepomdRecord("primary", self.path01)
        self.assertTrue(rec)
        rec_compressed = rec.compress_and_fill(cr.SHA256, cr.GZ_COMPRESSION)

        # A new compressed file should be created
        self.assertEqual(sorted(os.listdir(self.tmpdir)),
            sorted(['primary.xml.gz', 'primary.xml']))

        rec.rename_file()
        rec_compressed.rename_file()

        # Filename should contain a (valid) checksum
        self.assertEqual(sorted(os.listdir(self.tmpdir)),
            sorted(['10091f8e2e235ae875cb18c91c443891c7f1a599d41f44d518e8af759a6c8109-primary.xml.gz',
                    'b33fc63178d852333a826385bc15d9b72cb6658be7fb927ec28c4e40b5d426fb-primary.xml']))
    def test_repomdrecord_fill(self):
        shutil.copyfile(REPO_00_PRIXML, self.path00)
        self.assertTrue(os.path.exists(self.path00))

        rec = cr.RepomdRecord("primary", self.path00)
        self.assertTrue(rec)

        self.assertEqual(rec.location_real, self.path00)
        self.assertEqual(rec.location_href, "repodata/primary.xml.gz")
        self.assertEqual(rec.location_base, None)
        self.assertEqual(rec.checksum, None)
        self.assertEqual(rec.checksum_type, None)
        self.assertEqual(rec.checksum_open, None)
        self.assertEqual(rec.checksum_open_type, None)
        self.assertEqual(rec.timestamp, 0)
        self.assertEqual(rec.size, 0)
        self.assertEqual(rec.size_open, -1)
        self.assertEqual(rec.db_ver, 0)

        rec.fill(cr.SHA256)

        self.assertEqual(rec.location_real, self.path00)
        self.assertEqual(rec.location_href, "repodata/primary.xml.gz")
        self.assertEqual(rec.location_base, None)
        self.assertEqual(
            rec.checksum,
            "dabe2ce5481d23de1f4f52bdcfee0f9af98316c9e0de2ce8123adeefa0dd08b9")
        self.assertEqual(rec.checksum_type, "sha256")
        self.assertEqual(
            rec.checksum_open,
            "e1e2ffd2fb1ee76f87b70750d00ca5677a252b397ab6c2389137a0c33e7b359f")
        self.assertEqual(rec.checksum_open_type, "sha256")
        self.assertTrue(rec.timestamp > 0)
        self.assertEqual(rec.size, 134)
        self.assertEqual(rec.size_open, 167)
        self.assertEqual(rec.db_ver, 10)

        rec.rename_file()

        # Filename shoud contain a (valid) checksum
        self.assertEqual(os.listdir(self.tmpdir), [
            'dabe2ce5481d23de1f4f52bdcfee0f9af98316c9e0de2ce8123adeefa0dd08b9-primary.xml.gz'
        ])
Exemple #14
0
    def apply_use_original(self, md, decompress=False):
        """Reversal function for the gen_use_original"""
        md.new_fn = os.path.join(md.out_dir, os.path.basename(md.delta_fn))

        if decompress:
            md.new_fn = md.new_fn.rsplit('.', 1)[0]
            cr.decompress_file(md.delta_fn, md.new_fn,
                               cr.AUTO_DETECT_COMPRESSION)
        else:
            shutil.copy2(md.delta_fn, md.new_fn)

        # Prepare repomd record of xml file
        rec = cr.RepomdRecord(md.metadata_type, md.new_fn)
        rec.fill(md.checksum_type)
        if self.globalbundle.unique_md_filenames:
            rec.rename_file()
            md.new_fn = rec.location_real

        return rec
Exemple #15
0
    def apply(self, metadata):

        gen_repomd_recs = []

        md_group = metadata.get("group")
        md_group_gz = metadata.get("group_gz")

        if md_group and (not md_group.delta_fn_exists
                         and not md_group.old_fn_exists):
            md_group = None

        if md_group_gz and (not md_group_gz.delta_fn_exists
                            and not md_group_gz.old_fn_exists):
            md_group_gz = None

        if md_group:
            notes = self._metadata_notes_from_plugin_bundle(
                md_group.metadata_type)
            rc, rec = self._apply_basic_delta(md_group, notes)
            assert rc
            if rec:
                gen_repomd_recs.append(rec)
                if notes.get("gen_group_gz"):
                    # Gen group_gz metadata from the group metadata
                    stat = cr.ContentStat(md_group.checksum_type)
                    group_gz_fn = md_group.new_fn + ".gz"
                    cr.compress_file(md_group.new_fn, group_gz_fn, cr.GZ, stat)
                    rec = cr.RepomdRecord("group_gz", group_gz_fn)
                    rec.load_contentstat(stat)
                    rec.fill(md_group.checksum_type)
                    if self.globalbundle.unique_md_filenames:
                        rec.rename_file()
                    gen_repomd_recs.append(rec)
        elif md_group_gz:
            notes = self._metadata_notes_from_plugin_bundle(
                md_group_gz.metadata_type)
            rc, rec = self._apply_basic_delta(md_group_gz, notes)
            assert rc
            if rec:
                gen_repomd_recs.append(rec)

        return gen_repomd_recs
Exemple #16
0
def insert_in_repo(comp_type, repodata, filetype, extension, source, zchunk):
    """
    Inject a file into the repodata with the help of createrepo_c.

    Args:
        comp_type (int): createrepo_c compression type indication.
        repodata (str): The path to the repo where the metadata will be inserted.
        filetype (str): What type of metadata will be inserted by createrepo_c.
            This does allow any string to be inserted (custom types). There are some
            types which are used with dnf repos as primary, updateinfo, comps, filelist etc.
        extension (str): The file extension (xml, sqlite).
        source (str): A file path. File holds the dump of metadata until
            copied to the repodata folder.
        zchunk (bool): Whether zchunk data is supported for clients of this repo.
    """
    log.info('Inserting %s.%s into %s', filetype, extension, repodata)
    target_fname = os.path.join(repodata, '%s.%s' % (filetype, extension))
    shutil.copyfile(source, target_fname)
    repomd_xml = os.path.join(repodata, 'repomd.xml')
    repomd = cr.Repomd(repomd_xml)
    add_list = [(filetype, comp_type)]
    if zchunk and hasattr(
            cr, 'ZCK_COMPRESSION') and comp_type != cr.ZCK_COMPRESSION:
        add_list.append((filetype + "_zck", cr.ZCK_COMPRESSION))
    for (ft, ct) in add_list:
        # create a new record for our repomd.xml
        rec = cr.RepomdRecord(ft, target_fname)
        # compress our metadata file with the comp_type
        rec_comp = rec.compress_and_fill(cr.SHA256, ct)
        # add hash to the compressed metadata file
        rec_comp.rename_file()
        # set type of metadata
        rec_comp.type = ft
        # insert metadata about our metadata in repomd.xml
        repomd.set_record(rec_comp)
    with open(repomd_xml, 'w') as repomd_file:
        repomd_file.write(repomd.xml_dump())
    os.unlink(target_fname)
Exemple #17
0
    def test_repomdrecord_fill(self):
        shutil.copyfile(REPO_00_PRIXML, self.path00)
        self.assertTrue(os.path.exists(self.path00))

        rec = cr.RepomdRecord("primary", self.path00)
        self.assertTrue(rec)

        self.assertEqual(rec.location_real, self.path00)
        self.assertEqual(rec.location_href, "repodata/primary.xml.gz")
        self.assertEqual(rec.location_base, None)
        self.assertEqual(rec.checksum, None)
        self.assertEqual(rec.checksum_type, None)
        self.assertEqual(rec.checksum_open, None)
        self.assertEqual(rec.checksum_open_type, None)
        self.assertEqual(rec.timestamp, 0)

        self.assertEqual(rec.size, 0)
        self.assertEqual(rec.size_open, -1)
        self.assertEqual(rec.db_ver, 0)

        rec.fill(cr.SHA256)

        self.assertEqual(rec.location_real, self.path00)
        self.assertEqual(rec.location_href, "repodata/primary.xml.gz")
        self.assertEqual(rec.location_base, None)
        self.assertEqual(rec.checksum, "1cb61ea996355add02b1426ed4c1780ea75ce0c04c5d1107c025c3fbd7d8bcae")
        self.assertEqual(rec.checksum_type, "sha256")
        self.assertEqual(rec.checksum_open, "e1e2ffd2fb1ee76f87b70750d00ca5677a252b397ab6c2389137a0c33e7b359f")
        self.assertEqual(rec.checksum_open_type, "sha256")
        self.assertTrue(rec.timestamp > 0)
        self.assertEqual(rec.size, 134)
        self.assertEqual(rec.size_open, 167)

        rec.rename_file()

        shutil.copyfile(REPO_00_PRIZCK, self.path02)
        self.assertTrue(os.path.exists(self.path02))

        zrc = cr.RepomdRecord("primary_zck", self.path02)
        self.assertTrue(zrc)

        self.assertEqual(zrc.location_real, self.path02)
        self.assertEqual(zrc.location_href, "repodata/primary.xml.zck")
        self.assertEqual(zrc.location_base, None)
        self.assertEqual(zrc.checksum, None)
        self.assertEqual(zrc.checksum_type, None)
        self.assertEqual(zrc.checksum_open, None)
        self.assertEqual(zrc.checksum_open_type, None)
        self.assertEqual(zrc.checksum_header, None)
        self.assertEqual(zrc.checksum_header_type, None)
        self.assertEqual(zrc.timestamp, 0)

        self.assertEqual(zrc.size, 0)
        self.assertEqual(zrc.size_open, -1)
        self.assertEqual(zrc.size_header, -1)

        if cr.HAS_ZCK == 0:
            filelist = os.listdir(self.tmpdir)
            filelist.sort()
            self.assertEqual(filelist,
                ['1cb61ea996355add02b1426ed4c1780ea75ce0c04c5d1107c025c3fbd7d8bcae-primary.xml.gz',
                 'primary.xml.zck'])
            return

        zrc.fill(cr.SHA256)

        self.assertEqual(zrc.location_real, self.path02)
        self.assertEqual(zrc.location_href, "repodata/primary.xml.zck")
        self.assertEqual(zrc.location_base, None)
        self.assertEqual(zrc.checksum, "e0ac03cd77e95e724dbf90ded0dba664e233315a8940051dd8882c56b9878595")
        self.assertEqual(zrc.checksum_type, "sha256")
        self.assertEqual(zrc.checksum_open, "e1e2ffd2fb1ee76f87b70750d00ca5677a252b397ab6c2389137a0c33e7b359f")
        self.assertEqual(zrc.checksum_open_type, "sha256")
        self.assertEqual(zrc.checksum_header, "243baf7c02f5241d46f2e8c237ebc7ea7e257ca993d9cfe1304254c7ba7f6546")
        self.assertEqual(zrc.checksum_header_type, "sha256")
        self.assertTrue(zrc.timestamp > 0)
        self.assertEqual(zrc.size, 269)
        self.assertEqual(zrc.size_open, 167)
        self.assertEqual(zrc.size_header, 132)
        self.assertEqual(zrc.db_ver, 10)

        zrc.rename_file()

        # Filename should contain a (valid) checksum
        filelist = os.listdir(self.tmpdir)
        filelist.sort()
        self.assertEqual(filelist,
            ['1cb61ea996355add02b1426ed4c1780ea75ce0c04c5d1107c025c3fbd7d8bcae-primary.xml.gz',
             'e0ac03cd77e95e724dbf90ded0dba664e233315a8940051dd8882c56b9878595-primary.xml.zck'])
Exemple #18
0
def generate_repo_metadata(
    content,
    publication,
    checksum_types,
    extra_repomdrecords,
    sub_folder=None,
    metadata_signing_service=None,
):
    """
    Creates a repomd.xml file.

    Args:
        content(app.models.Content): content set
        publication(pulpcore.plugin.models.Publication): the publication
        extra_repomdrecords(list): list with data relative to repo metadata files
        sub_folder(str): name of the folder for sub repos
        metadata_signing_service (pulpcore.app.models.AsciiArmoredDetachedSigningService):
            A reference to an associated signing service.

    """
    cwd = os.getcwd()
    repodata_path = REPODATA_PATH
    has_modules = False
    has_comps = False
    package_checksum_type = checksum_types.get("package")

    if sub_folder:
        cwd = os.path.join(cwd, sub_folder)
        repodata_path = os.path.join(sub_folder, repodata_path)

    if package_checksum_type and package_checksum_type not in settings.ALLOWED_CONTENT_CHECKSUMS:
        raise ValueError(
            "Repository contains disallowed package checksum type '{}', "
            "thus can't be published. {}".format(package_checksum_type,
                                                 ALLOWED_CHECKSUM_ERROR_MSG))

    # Prepare metadata files
    repomd_path = os.path.join(cwd, "repomd.xml")
    pri_xml_path = os.path.join(cwd, "primary.xml.gz")
    fil_xml_path = os.path.join(cwd, "filelists.xml.gz")
    oth_xml_path = os.path.join(cwd, "other.xml.gz")
    upd_xml_path = os.path.join(cwd, "updateinfo.xml.gz")
    mod_yml_path = os.path.join(cwd, "modules.yaml")
    comps_xml_path = os.path.join(cwd, "comps.xml")

    pri_xml = cr.PrimaryXmlFile(pri_xml_path)
    fil_xml = cr.FilelistsXmlFile(fil_xml_path)
    oth_xml = cr.OtherXmlFile(oth_xml_path)
    upd_xml = cr.UpdateInfoXmlFile(upd_xml_path)

    if publication.sqlite_metadata:
        pri_db_path = os.path.join(cwd, "primary.sqlite")
        fil_db_path = os.path.join(cwd, "filelists.sqlite")
        oth_db_path = os.path.join(cwd, "other.sqlite")
        pri_db = cr.PrimarySqlite(pri_db_path)
        fil_db = cr.FilelistsSqlite(fil_db_path)
        oth_db = cr.OtherSqlite(oth_db_path)

    packages = Package.objects.filter(pk__in=content)

    # We want to support publishing with a different checksum type than the one built-in to the
    # package itself, so we need to get the correct checksums somehow if there is an override.
    # We must also take into consideration that if the package has not been downloaded the only
    # checksum that is available is the one built-in.
    #
    # Since this lookup goes from Package->Content->ContentArtifact->Artifact, performance is a
    # challenge. We use ContentArtifact as our starting point because it enables us to work with
    # simple foreign keys and avoid messing with the many-to-many relationship, which doesn't
    # work with select_related() and performs poorly with prefetch_related(). This is fine
    # because we know that Packages should only ever have one artifact per content.
    contentartifact_qs = (
        ContentArtifact.objects.filter(content__in=packages.only("pk")).
        select_related(
            # content__rpm_package is a bit of a hack, exploiting the way django sets up model
            # inheritance, but it works and is unlikely to break. All content artifacts being
            # accessed here have an associated Package since they originally came from the
            # Package queryset.
            "artifact",
            "content__rpm_package",
        ).only("artifact", "content__rpm_package__checksum_type",
               "content__rpm_package__pkgId"))

    pkg_to_hash = {}
    for ca in contentartifact_qs.iterator():
        if package_checksum_type:
            package_checksum_type = package_checksum_type.lower()
            pkgid = getattr(ca.artifact, package_checksum_type, None)

        if not package_checksum_type or not pkgid:
            if ca.content.rpm_package.checksum_type not in settings.ALLOWED_CONTENT_CHECKSUMS:
                raise ValueError(
                    "Package {} as content unit {} contains forbidden checksum type '{}', "
                    "thus can't be published. {}".format(
                        ca.content.rpm_package.nevra,
                        ca.content.pk,
                        ca.content.rpm_package.checksum_type,
                        ALLOWED_CHECKSUM_ERROR_MSG,
                    ))
            package_checksum_type = ca.content.rpm_package.checksum_type
            pkgid = ca.content.rpm_package.pkgId

        pkg_to_hash[ca.content_id] = (package_checksum_type, pkgid)

    # TODO: this is meant to be a !! *temporary* !! fix for
    # https://github.com/pulp/pulp_rpm/issues/2407
    pkg_pks_to_ignore = set()
    latest_build_time_by_nevra = defaultdict(list)
    for pkg in packages.only("pk", "name", "epoch", "version", "release",
                             "arch", "time_build").iterator():
        latest_build_time_by_nevra[pkg.nevra].append((pkg.time_build, pkg.pk))
    for nevra, pkg_data in latest_build_time_by_nevra.items():
        # sort the packages by when they were built
        if len(pkg_data) > 1:
            pkg_data.sort(key=lambda p: p[0], reverse=True)
            pkg_pks_to_ignore |= set(entry[1] for entry in pkg_data[1:])
            log.warning(
                "Duplicate packages found competing for NEVRA {nevra}, selected the one with "
                "the most recent build time, excluding {others} others.".
                format(nevra=nevra, others=len(pkg_data[1:])))

    total_packages = packages.count() - len(pkg_pks_to_ignore)

    pri_xml.set_num_of_pkgs(total_packages)
    fil_xml.set_num_of_pkgs(total_packages)
    oth_xml.set_num_of_pkgs(total_packages)

    # Process all packages
    for package in packages.order_by("name", "evr").iterator():
        if package.pk in pkg_pks_to_ignore:  # Temporary!
            continue
        pkg = package.to_createrepo_c()

        # rewrite the checksum and checksum type with the desired ones
        (checksum, pkgId) = pkg_to_hash[package.pk]
        pkg.checksum_type = checksum
        pkg.pkgId = pkgId

        pkg_filename = os.path.basename(package.location_href)
        # this can cause an issue when two same RPM package names appears
        # a/name1.rpm b/name1.rpm
        pkg.location_href = os.path.join(PACKAGES_DIRECTORY,
                                         pkg_filename[0].lower(), pkg_filename)
        pri_xml.add_pkg(pkg)
        fil_xml.add_pkg(pkg)
        oth_xml.add_pkg(pkg)
        if publication.sqlite_metadata:
            pri_db.add_pkg(pkg)
            fil_db.add_pkg(pkg)
            oth_db.add_pkg(pkg)

    # Process update records
    for update_record in UpdateRecord.objects.filter(
            pk__in=content).iterator():
        upd_xml.add_chunk(
            cr.xml_dump_updaterecord(update_record.to_createrepo_c()))

    # Process modulemd, modulemd_defaults and obsoletes
    with open(mod_yml_path, "ab") as mod_yml:
        for modulemd in Modulemd.objects.filter(pk__in=content).iterator():
            mod_yml.write(modulemd.snippet.encode())
            has_modules = True
        for default in ModulemdDefaults.objects.filter(
                pk__in=content).iterator():
            mod_yml.write(default.snippet.encode())
            has_modules = True
        for obsolete in ModulemdObsolete.objects.filter(
                pk__in=content).iterator():
            mod_yml.write(obsolete.snippet.encode())
            has_modules = True

    # Process comps
    comps = libcomps.Comps()
    for pkg_grp in PackageGroup.objects.filter(pk__in=content).iterator():
        group = pkg_grp.pkg_grp_to_libcomps()
        comps.groups.append(group)
        has_comps = True
    for pkg_cat in PackageCategory.objects.filter(pk__in=content).iterator():
        cat = pkg_cat.pkg_cat_to_libcomps()
        comps.categories.append(cat)
        has_comps = True
    for pkg_env in PackageEnvironment.objects.filter(
            pk__in=content).iterator():
        env = pkg_env.pkg_env_to_libcomps()
        comps.environments.append(env)
        has_comps = True
    for pkg_lng in PackageLangpacks.objects.filter(pk__in=content).iterator():
        comps.langpacks = dict_to_strdict(pkg_lng.matches)
        has_comps = True

    comps.toxml_f(
        comps_xml_path,
        xml_options={
            "default_explicit": True,
            "empty_groups": True,
            "empty_packages": True,
            "uservisible_explicit": True,
        },
    )

    pri_xml.close()
    fil_xml.close()
    oth_xml.close()
    upd_xml.close()

    repomd = cr.Repomd()
    # If the repository is empty, use a revision of 0
    # See: https://pulp.plan.io/issues/9402
    if not content.exists():
        repomd.revision = "0"

    if publication.sqlite_metadata:
        repomdrecords = [
            ("primary", pri_xml_path, pri_db),
            ("filelists", fil_xml_path, fil_db),
            ("other", oth_xml_path, oth_db),
            ("primary_db", pri_db_path, None),
            ("filelists_db", fil_db_path, None),
            ("other_db", oth_db_path, None),
            ("updateinfo", upd_xml_path, None),
        ]
    else:
        repomdrecords = [
            ("primary", pri_xml_path, None),
            ("filelists", fil_xml_path, None),
            ("other", oth_xml_path, None),
            ("updateinfo", upd_xml_path, None),
        ]

    if has_modules:
        repomdrecords.append(("modules", mod_yml_path, None))

    if has_comps:
        repomdrecords.append(("group", comps_xml_path, None))

    repomdrecords.extend(extra_repomdrecords)

    sqlite_files = ("primary_db", "filelists_db", "other_db")

    for name, path, db_to_update in repomdrecords:
        record = cr.RepomdRecord(name, path)
        checksum_type = cr_checksum_type_from_string(
            get_checksum_type(name,
                              checksum_types,
                              default=publication.metadata_checksum_type))
        if name in sqlite_files:
            record_bz = record.compress_and_fill(checksum_type, cr.BZ2)
            record_bz.type = name
            record_bz.rename_file()
            path = record_bz.location_href.split("/")[-1]
            repomd.set_record(record_bz)
        else:
            record.fill(checksum_type)
            if db_to_update:
                db_to_update.dbinfo_update(record.checksum)
                db_to_update.close()
            record.rename_file()
            path = record.location_href.split("/")[-1]
            repomd.set_record(record)

        if sub_folder:
            path = os.path.join(sub_folder, path)

        with open(path, "rb") as repodata_fd:
            PublishedMetadata.create_from_file(
                relative_path=os.path.join(repodata_path,
                                           os.path.basename(path)),
                publication=publication,
                file=File(repodata_fd),
            )

    with open(repomd_path, "w") as repomd_f:
        repomd_f.write(repomd.xml_dump())

    if metadata_signing_service:
        signing_service = AsciiArmoredDetachedSigningService.objects.get(
            pk=metadata_signing_service)
        sign_results = signing_service.sign(repomd_path)

        # publish a signed file
        with open(sign_results["file"], "rb") as signed_file_fd:
            PublishedMetadata.create_from_file(
                relative_path=os.path.join(
                    repodata_path, os.path.basename(sign_results["file"])),
                publication=publication,
                file=File(signed_file_fd),
            )

        # publish a detached signature
        with open(sign_results["signature"], "rb") as signature_fd:
            PublishedMetadata.create_from_file(
                relative_path=os.path.join(
                    repodata_path,
                    os.path.basename(sign_results["signature"])),
                publication=publication,
                file=File(signature_fd),
            )

        # publish a public key required for further verification
        pubkey_name = "repomd.xml.key"
        with open(pubkey_name, "wb+") as f:
            f.write(signing_service.public_key.encode("utf-8"))
            f.flush()
            PublishedMetadata.create_from_file(
                relative_path=os.path.join(repodata_path, pubkey_name),
                publication=publication,
                file=File(f),
            )
    else:
        with open(repomd_path, "rb") as repomd_fd:
            PublishedMetadata.create_from_file(
                relative_path=os.path.join(repodata_path,
                                           os.path.basename(repomd_path)),
                publication=publication,
                file=File(repomd_fd),
            )
Exemple #19
0
def create_repomd_xml(content,
                      publication,
                      checksum_types,
                      extra_repomdrecords,
                      sub_folder=None,
                      metadata_signing_service=None):
    """
    Creates a repomd.xml file.

    Args:
        content(app.models.Content): content set
        publication(pulpcore.plugin.models.Publication): the publication
        extra_repomdrecords(list): list with data relative to repo metadata files
        sub_folder(str): name of the folder for sub repos
        metadata_signing_service (pulpcore.app.models.AsciiArmoredDetachedSigningService):
            A reference to an associated signing service.

    """
    cwd = os.getcwd()
    repodata_path = REPODATA_PATH
    has_modules = False
    has_comps = False
    package_checksum_type = checksum_types.get("package")

    if sub_folder:
        cwd = os.path.join(cwd, sub_folder)
        repodata_path = os.path.join(sub_folder, repodata_path)

    # Prepare metadata files
    repomd_path = os.path.join(cwd, "repomd.xml")
    pri_xml_path = os.path.join(cwd, "primary.xml.gz")
    fil_xml_path = os.path.join(cwd, "filelists.xml.gz")
    oth_xml_path = os.path.join(cwd, "other.xml.gz")
    pri_db_path = os.path.join(cwd, "primary.sqlite")
    fil_db_path = os.path.join(cwd, "filelists.sqlite")
    oth_db_path = os.path.join(cwd, "other.sqlite")
    upd_xml_path = os.path.join(cwd, "updateinfo.xml.gz")
    mod_yml_path = os.path.join(cwd, "modules.yaml")
    comps_xml_path = os.path.join(cwd, "comps.xml")

    pri_xml = cr.PrimaryXmlFile(pri_xml_path)
    fil_xml = cr.FilelistsXmlFile(fil_xml_path)
    oth_xml = cr.OtherXmlFile(oth_xml_path)
    pri_db = cr.PrimarySqlite(pri_db_path)
    fil_db = cr.FilelistsSqlite(fil_db_path)
    oth_db = cr.OtherSqlite(oth_db_path)
    upd_xml = cr.UpdateInfoXmlFile(upd_xml_path)

    packages = Package.objects.filter(pk__in=content)
    total_packages = packages.count()

    pri_xml.set_num_of_pkgs(total_packages)
    fil_xml.set_num_of_pkgs(total_packages)
    oth_xml.set_num_of_pkgs(total_packages)

    # Process all packages
    for package in packages.iterator():
        pkg = package.to_createrepo_c(package_checksum_type)
        pkg_filename = os.path.basename(package.location_href)
        # this can cause an issue when two same RPM package names appears
        # a/name1.rpm b/name1.rpm
        pkg.location_href = os.path.join(PACKAGES_DIRECTORY,
                                         pkg_filename[0].lower(), pkg_filename)
        pri_xml.add_pkg(pkg)
        fil_xml.add_pkg(pkg)
        oth_xml.add_pkg(pkg)
        pri_db.add_pkg(pkg)
        fil_db.add_pkg(pkg)
        oth_db.add_pkg(pkg)

    # Process update records
    for update_record in UpdateRecord.objects.filter(
            pk__in=content).iterator():
        upd_xml.add_chunk(
            cr.xml_dump_updaterecord(update_record.to_createrepo_c()))

    # Process modulemd and modulemd_defaults
    with open(mod_yml_path, 'ab') as mod_yml:
        for modulemd in Modulemd.objects.filter(pk__in=content).iterator():
            mod_yml.write(modulemd._artifacts.get().file.read())
            has_modules = True
        for default in ModulemdDefaults.objects.filter(
                pk__in=content).iterator():
            mod_yml.write(default._artifacts.get().file.read())
            has_modules = True

    # Process comps
    comps = libcomps.Comps()
    for pkg_grp in PackageGroup.objects.filter(pk__in=content).iterator():
        group = pkg_grp.pkg_grp_to_libcomps()
        comps.groups.append(group)
        has_comps = True
    for pkg_cat in PackageCategory.objects.filter(pk__in=content).iterator():
        cat = pkg_cat.pkg_cat_to_libcomps()
        comps.categories.append(cat)
        has_comps = True
    for pkg_env in PackageEnvironment.objects.filter(
            pk__in=content).iterator():
        env = pkg_env.pkg_env_to_libcomps()
        comps.environments.append(env)
        has_comps = True
    for pkg_lng in PackageLangpacks.objects.filter(pk__in=content).iterator():
        comps.langpacks = dict_to_strdict(pkg_lng.matches)
        has_comps = True

    comps.toxml_f(comps_xml_path,
                  xml_options={
                      "default_explicit": True,
                      "empty_groups": True,
                      "uservisible_explicit": True
                  })

    pri_xml.close()
    fil_xml.close()
    oth_xml.close()
    upd_xml.close()

    repomd = cr.Repomd()

    repomdrecords = [("primary", pri_xml_path, pri_db),
                     ("filelists", fil_xml_path, fil_db),
                     ("other", oth_xml_path, oth_db),
                     ("primary_db", pri_db_path, None),
                     ("filelists_db", fil_db_path, None),
                     ("other_db", oth_db_path, None),
                     ("updateinfo", upd_xml_path, None)]

    if has_modules:
        repomdrecords.append(("modules", mod_yml_path, None))

    if has_comps:
        repomdrecords.append(("group", comps_xml_path, None))

    repomdrecords.extend(extra_repomdrecords)

    sqlite_files = ("primary_db", "filelists_db", "other_db")
    for name, path, db_to_update in repomdrecords:
        record = cr.RepomdRecord(name, path)
        checksum_type = get_checksum_type(name, checksum_types)
        if name in sqlite_files:
            record_bz = record.compress_and_fill(checksum_type, cr.BZ2)
            record_bz.type = name
            record_bz.rename_file()
            path = record_bz.location_href.split('/')[-1]
            repomd.set_record(record_bz)
        else:
            record.fill(checksum_type)
            if (db_to_update):
                db_to_update.dbinfo_update(record.checksum)
                db_to_update.close()
            record.rename_file()
            path = record.location_href.split('/')[-1]
            repomd.set_record(record)

        if sub_folder:
            path = os.path.join(sub_folder, path)

        PublishedMetadata.create_from_file(relative_path=os.path.join(
            repodata_path, os.path.basename(path)),
                                           publication=publication,
                                           file=File(open(path, 'rb')))

    with open(repomd_path, "w") as repomd_f:
        repomd_f.write(repomd.xml_dump())

    if metadata_signing_service:
        signing_service = AsciiArmoredDetachedSigningService.objects.get(
            pk=metadata_signing_service.pk)
        sign_results = signing_service.sign(repomd_path)

        # publish a signed file
        PublishedMetadata.create_from_file(
            relative_path=os.path.join(repodata_path,
                                       os.path.basename(sign_results['file'])),
            publication=publication,
            file=File(open(sign_results['file'], 'rb')))

        # publish a detached signature
        PublishedMetadata.create_from_file(relative_path=os.path.join(
            repodata_path, os.path.basename(sign_results['signature'])),
                                           publication=publication,
                                           file=File(
                                               open(sign_results['signature'],
                                                    'rb')))

        # publish a public key required for further verification
        PublishedMetadata.create_from_file(
            relative_path=os.path.join(repodata_path,
                                       os.path.basename(sign_results['key'])),
            publication=publication,
            file=File(open(sign_results['key'], 'rb')))
    else:
        PublishedMetadata.create_from_file(relative_path=os.path.join(
            repodata_path, os.path.basename(repomd_path)),
                                           publication=publication,
                                           file=File(open(repomd_path, 'rb')))
def do_repodata(path):
    # Prepare repodata/ directory
    repodata_path = os.path.join(path, "repodata")
    if os.path.exists(repodata_path):
        x = 0
        while True:
            new_repodata_path = "%s_%s" % (repodata_path, x)
            if not os.path.exists(new_repodata_path):
                shutil.move(repodata_path, new_repodata_path)
                break
            x += 1
    os.mkdir(repodata_path)

    # Prepare metadata files
    repomd_path  = os.path.join(repodata_path, "repomd.xml")
    pri_xml_path = os.path.join(repodata_path, "primary.xml.gz")
    fil_xml_path = os.path.join(repodata_path, "filelists.xml.gz")
    oth_xml_path = os.path.join(repodata_path, "other.xml.gz")
    pri_db_path  = os.path.join(repodata_path, "primary.sqlite")
    fil_db_path  = os.path.join(repodata_path, "filelists.sqlite")
    oth_db_path  = os.path.join(repodata_path, "other.sqlite")

    pri_xml = cr.PrimaryXmlFile(pri_xml_path)
    fil_xml = cr.FilelistsXmlFile(fil_xml_path)
    oth_xml = cr.OtherXmlFile(oth_xml_path)
    pri_db  = cr.PrimarySqlite(pri_db_path)
    fil_db  = cr.FilelistsSqlite(fil_db_path)
    oth_db  = cr.OtherSqlite(oth_db_path)

    # List directory and prepare list of files to process
    pkg_list = []
    for filename in os.listdir(path):
        filename = os.path.join(path, filename)
        if os.path.isfile(filename) and filename.endswith(".rpm"):
            pkg_list.append(filename)

    pri_xml.set_num_of_pkgs(len(pkg_list))
    fil_xml.set_num_of_pkgs(len(pkg_list))
    oth_xml.set_num_of_pkgs(len(pkg_list))

    # Process all packages
    for filename in pkg_list:
        pkg = cr.package_from_rpm(filename)
        pkg.location_href = os.path.basename(filename)
        print "Processing: %s" % pkg.nevra()
        pri_xml.add_pkg(pkg)
        fil_xml.add_pkg(pkg)
        oth_xml.add_pkg(pkg)
        pri_db.add_pkg(pkg)
        fil_db.add_pkg(pkg)
        oth_db.add_pkg(pkg)

    pri_xml.close()
    fil_xml.close()
    oth_xml.close()

    # Note: DBs are still open! We have to calculate checksums of xml files
    # and insert them to the databases first!

    # Prepare repomd.xml
    repomd = cr.Repomd()

    # Add records into the repomd.xml
    repomdrecords = (("primary",      pri_xml_path, pri_db),
                     ("filelists",    fil_xml_path, fil_db),
                     ("other",        oth_xml_path, oth_db),
                     ("primary_db",   pri_db_path,  None),
                     ("filelists_db", fil_db_path,  None),
                     ("other_db",     oth_db_path,  None))
    for name, path, db_to_update in repomdrecords:
        record = cr.RepomdRecord(name, path)
        record.fill(cr.SHA256)
        if (db_to_update):
            db_to_update.dbinfo_update(record.checksum)
            db_to_update.close()
        repomd.set_record(record)

    # Write repomd.xml
    open(repomd_path, "w").write(repomd.xml_dump())
Exemple #21
0
def create_repomd_xml(
    content,
    publication,
    checksum_types,
    extra_repomdrecords,
    sub_folder=None,
    metadata_signing_service=None,
):
    """
    Creates a repomd.xml file.

    Args:
        content(app.models.Content): content set
        publication(pulpcore.plugin.models.Publication): the publication
        extra_repomdrecords(list): list with data relative to repo metadata files
        sub_folder(str): name of the folder for sub repos
        metadata_signing_service (pulpcore.app.models.AsciiArmoredDetachedSigningService):
            A reference to an associated signing service.

    """
    cwd = os.getcwd()
    repodata_path = REPODATA_PATH
    has_modules = False
    has_comps = False
    package_checksum_type = checksum_types.get("package")

    if sub_folder:
        cwd = os.path.join(cwd, sub_folder)
        repodata_path = os.path.join(sub_folder, repodata_path)

    # Prepare metadata files
    repomd_path = os.path.join(cwd, "repomd.xml")
    pri_xml_path = os.path.join(cwd, "primary.xml.gz")
    fil_xml_path = os.path.join(cwd, "filelists.xml.gz")
    oth_xml_path = os.path.join(cwd, "other.xml.gz")
    upd_xml_path = os.path.join(cwd, "updateinfo.xml.gz")
    mod_yml_path = os.path.join(cwd, "modules.yaml")
    comps_xml_path = os.path.join(cwd, "comps.xml")

    pri_xml = cr.PrimaryXmlFile(pri_xml_path)
    fil_xml = cr.FilelistsXmlFile(fil_xml_path)
    oth_xml = cr.OtherXmlFile(oth_xml_path)
    upd_xml = cr.UpdateInfoXmlFile(upd_xml_path)

    if publication.sqlite_metadata:
        pri_db_path = os.path.join(cwd, "primary.sqlite")
        fil_db_path = os.path.join(cwd, "filelists.sqlite")
        oth_db_path = os.path.join(cwd, "other.sqlite")
        pri_db = cr.PrimarySqlite(pri_db_path)
        fil_db = cr.FilelistsSqlite(fil_db_path)
        oth_db = cr.OtherSqlite(oth_db_path)

    packages = Package.objects.filter(pk__in=content)
    total_packages = packages.count()

    pri_xml.set_num_of_pkgs(total_packages)
    fil_xml.set_num_of_pkgs(total_packages)
    oth_xml.set_num_of_pkgs(total_packages)

    # We want to support publishing with a different checksum type than the one built-in to the
    # package itself, so we need to get the correct checksums somehow if there is an override.
    # We must also take into consideration that if the package has not been downloaded the only
    # checksum that is available is the one built-in.
    #
    # Since this lookup goes from Package->Content->ContentArtifact->Artifact, performance is a
    # challenge. We use ContentArtifact as our starting point because it enables us to work with
    # simple foreign keys and avoid messing with the many-to-many relationship, which doesn't
    # work with select_related() and performs poorly with prefetch_related(). This is fine
    # because we know that Packages should only ever have one artifact per content.
    contentartifact_qs = (
        ContentArtifact.objects.filter(content__in=packages.only("pk")).
        select_related(
            # content__rpm_package is a bit of a hack, exploiting the way django sets up model
            # inheritance, but it works and is unlikely to break. All content artifacts being
            # accessed here have an associated Package since they originally came from the
            # Package queryset.
            "artifact",
            "content__rpm_package",
        ).only("artifact", "content__rpm_package__checksum_type",
               "content__rpm_package__pkgId"))

    pkg_to_hash = {}
    for ca in contentartifact_qs.iterator():
        pkgid = None
        if package_checksum_type:
            package_checksum_type = package_checksum_type.lower()
            pkgid = getattr(ca.artifact, package_checksum_type, None)
        if pkgid:
            pkg_to_hash[ca.content_id] = (package_checksum_type, pkgid)
        else:
            pkg_to_hash[ca.content_id] = (
                ca.content.rpm_package.checksum_type,
                ca.content.rpm_package.pkgId,
            )

    # Process all packages
    for package in packages.iterator():
        pkg = package.to_createrepo_c()

        # rewrite the checksum and checksum type with the desired ones
        (checksum, pkgId) = pkg_to_hash[package.pk]
        pkg.checksum_type = checksum
        pkg.pkgId = pkgId

        pkg_filename = os.path.basename(package.location_href)
        # this can cause an issue when two same RPM package names appears
        # a/name1.rpm b/name1.rpm
        pkg.location_href = os.path.join(PACKAGES_DIRECTORY,
                                         pkg_filename[0].lower(), pkg_filename)
        pri_xml.add_pkg(pkg)
        fil_xml.add_pkg(pkg)
        oth_xml.add_pkg(pkg)
        if publication.sqlite_metadata:
            pri_db.add_pkg(pkg)
            fil_db.add_pkg(pkg)
            oth_db.add_pkg(pkg)

    # Process update records
    for update_record in UpdateRecord.objects.filter(
            pk__in=content).iterator():
        upd_xml.add_chunk(
            cr.xml_dump_updaterecord(update_record.to_createrepo_c()))

    # Process modulemd and modulemd_defaults
    with open(mod_yml_path, "ab") as mod_yml:
        for modulemd in Modulemd.objects.filter(pk__in=content).iterator():
            mod_yml.write(modulemd._artifacts.get().file.read())
            has_modules = True
        for default in ModulemdDefaults.objects.filter(
                pk__in=content).iterator():
            mod_yml.write(default._artifacts.get().file.read())
            has_modules = True

    # Process comps
    comps = libcomps.Comps()
    for pkg_grp in PackageGroup.objects.filter(pk__in=content).iterator():
        group = pkg_grp.pkg_grp_to_libcomps()
        comps.groups.append(group)
        has_comps = True
    for pkg_cat in PackageCategory.objects.filter(pk__in=content).iterator():
        cat = pkg_cat.pkg_cat_to_libcomps()
        comps.categories.append(cat)
        has_comps = True
    for pkg_env in PackageEnvironment.objects.filter(
            pk__in=content).iterator():
        env = pkg_env.pkg_env_to_libcomps()
        comps.environments.append(env)
        has_comps = True
    for pkg_lng in PackageLangpacks.objects.filter(pk__in=content).iterator():
        comps.langpacks = dict_to_strdict(pkg_lng.matches)
        has_comps = True

    comps.toxml_f(
        comps_xml_path,
        xml_options={
            "default_explicit": True,
            "empty_groups": True,
            "uservisible_explicit": True
        },
    )

    pri_xml.close()
    fil_xml.close()
    oth_xml.close()
    upd_xml.close()

    repomd = cr.Repomd()

    if publication.sqlite_metadata:
        repomdrecords = [
            ("primary", pri_xml_path, pri_db),
            ("filelists", fil_xml_path, fil_db),
            ("other", oth_xml_path, oth_db),
            ("primary_db", pri_db_path, None),
            ("filelists_db", fil_db_path, None),
            ("other_db", oth_db_path, None),
            ("updateinfo", upd_xml_path, None),
        ]
    else:
        repomdrecords = [
            ("primary", pri_xml_path, None),
            ("filelists", fil_xml_path, None),
            ("other", oth_xml_path, None),
            ("updateinfo", upd_xml_path, None),
        ]

    if has_modules:
        repomdrecords.append(("modules", mod_yml_path, None))

    if has_comps:
        repomdrecords.append(("group", comps_xml_path, None))

    repomdrecords.extend(extra_repomdrecords)

    sqlite_files = ("primary_db", "filelists_db", "other_db")
    for name, path, db_to_update in repomdrecords:
        record = cr.RepomdRecord(name, path)
        checksum_type = get_checksum_type(name, checksum_types)
        if name in sqlite_files:
            record_bz = record.compress_and_fill(checksum_type, cr.BZ2)
            record_bz.type = name
            record_bz.rename_file()
            path = record_bz.location_href.split("/")[-1]
            repomd.set_record(record_bz)
        else:
            record.fill(checksum_type)
            if db_to_update:
                db_to_update.dbinfo_update(record.checksum)
                db_to_update.close()
            record.rename_file()
            path = record.location_href.split("/")[-1]
            repomd.set_record(record)

        if sub_folder:
            path = os.path.join(sub_folder, path)

        PublishedMetadata.create_from_file(
            relative_path=os.path.join(repodata_path, os.path.basename(path)),
            publication=publication,
            file=File(open(path, "rb")),
        )

    with open(repomd_path, "w") as repomd_f:
        repomd_f.write(repomd.xml_dump())

    if metadata_signing_service:
        signing_service = AsciiArmoredDetachedSigningService.objects.get(
            pk=metadata_signing_service.pk)
        sign_results = signing_service.sign(repomd_path)

        # publish a signed file
        PublishedMetadata.create_from_file(
            relative_path=os.path.join(repodata_path,
                                       os.path.basename(sign_results["file"])),
            publication=publication,
            file=File(open(sign_results["file"], "rb")),
        )

        # publish a detached signature
        PublishedMetadata.create_from_file(
            relative_path=os.path.join(
                repodata_path, os.path.basename(sign_results["signature"])),
            publication=publication,
            file=File(open(sign_results["signature"], "rb")),
        )

        # publish a public key required for further verification
        PublishedMetadata.create_from_file(
            relative_path=os.path.join(repodata_path,
                                       os.path.basename(sign_results["key"])),
            publication=publication,
            file=File(open(sign_results["key"], "rb")),
        )
    else:
        PublishedMetadata.create_from_file(
            relative_path=os.path.join(repodata_path,
                                       os.path.basename(repomd_path)),
            publication=publication,
            file=File(open(repomd_path, "rb")),
        )
Exemple #22
0
def cr_create_md(repodata_path, pkglist=None, log=sys.stdout):
    if pkglist is None:
        pkglist = cr_get_pkg_list(repo_base, log)

    pri_xml_path = os.path.join(repodata_path, 'primary.xml.gz')
    fil_xml_path = os.path.join(repodata_path, 'filelists.xml.gz')
    oth_xml_path = os.path.join(repodata_path, 'other.xml.gz')
    pri_db_path = os.path.join(repodata_path, 'primary.sqlite')
    fil_db_path = os.path.join(repodata_path, 'filelists.sqlite')
    oth_db_path = os.path.join(repodata_path, 'other.sqlite')

    def __create_xml(queues, xml_path, xml_func, name):
        cs = cr.ContentStat(cr.SHA256)
        xml = xml_func(xml_path, contentstat=cs)

        xml.set_num_of_pkgs(len(pkglist))

        for pkg in pkglist:
            xml.add_pkg(pkg)

        xml.close()

        queues['master'].put(
            ((name, xml_path), (cs.checksum, cs.size, cs.checksum_type)), True)

    def __create_db(queues, db_path, db_func, name):
        db = db_func(db_path)

        for pkg in pkglist:
            db.add_pkg(pkg)

        db.dbinfo_update(queues[name].get(True))

        db.close()

        cs = cr.ContentStat(cr.SHA256)
        cr.compress_file_with_stat(
            db_path, db_path + cr.compression_suffix(cr.BZ2_COMPRESSION),
            cr.BZ2_COMPRESSION, cs)
        os.remove(db_path)
        queues['master'].put(
            ((name + '_db',
              db_path + cr.compression_suffix(cr.BZ2_COMPRESSION)),
             (cs.checksum, cs.size, cs.checksum_type)), True)

    queue_manager = multiprocessing.Manager()
    queues = dict({
        'master': queue_manager.Queue(),
        'primary': queue_manager.Queue(),
        'filelists': queue_manager.Queue(),
        'other': queue_manager.Queue(),
    })

    log.write('[%s] Generating metadata in %s\n' % (stamp(), repodata_path))

    th = [0] * 6
    th[0] = multiprocessing.Process(target=__create_xml,
                                    args=(queues, pri_xml_path,
                                          cr.PrimaryXmlFile, 'primary'))
    th[0].start()
    th[1] = multiprocessing.Process(target=__create_xml,
                                    args=(queues, fil_xml_path,
                                          cr.FilelistsXmlFile, 'filelists'))
    th[1].start()
    th[2] = multiprocessing.Process(target=__create_xml,
                                    args=(queues, oth_xml_path,
                                          cr.OtherXmlFile, 'other'))
    th[2].start()
    th[3] = multiprocessing.Process(target=__create_db,
                                    args=(queues, pri_db_path,
                                          cr.PrimarySqlite, 'primary'))
    th[3].start()
    th[4] = multiprocessing.Process(target=__create_db,
                                    args=(queues, fil_db_path,
                                          cr.FilelistsSqlite, 'filelists'))
    th[4].start()
    th[5] = multiprocessing.Process(target=__create_db,
                                    args=(queues, oth_db_path, cr.OtherSqlite,
                                          'other'))
    th[5].start()

    repomd = cr.Repomd()

    data_files = set()
    for i in range(0, 6):
        rf = queues['master'].get(True)
        r = cr.RepomdRecord(*rf[0])
        r.checksum_open_type = cr.checksum_name_str(rf[1][2])
        r.checksum_open = rf[1][0]
        r.size_open = rf[1][1]
        r.fill(cr.SHA256)
        if not rf[0][0].endswith('_db'):
            queues[rf[0][0]].put(r.checksum, True)
        r.rename_file()
        r.location_href = os.path.join('repodata',
                                       os.path.basename(r.location_href))
        data_files.add(r.location_real)
        repomd.set_record(r)

    for t in th:
        t.join()

    repomd.sort_records()
    return (repomd.xml_dump(), data_files)
Exemple #23
0
 def get_record(self) -> createrepo_c.RepomdRecord:
     record = createrepo_c.RepomdRecord(self.name, f'{self.path}')
     record.fill(createrepo_c.SHA256)
     return record
Exemple #24
0
    def _apply_basic_delta(self, md, notes):
        """

        """

        if not md:
            # No metadata - Nothing to do
            return (True, None)

        # Init some stuff in md
        # This variables should be set only if new record was generated
        # Otherwise it should by None/False
        md.new_rec = None
        md.new_fn_exists = False

        if not notes:
            # No notes - Nothing to do
            return (True, None)

        if not md.old_rec and not md.delta_rec:
            # None metadata record exists.
            self._debug("\"{0}\": Doesn't exist "
                        "in any repo".format(md.metadata_type))
            return (True, None)

        if not md.delta_rec:
            # This record is missing in delta repo
            if notes.get("unchanged") != "1":
                # This metadata were removed in the new version of repo
                self._debug("\"{0}\": Removed in the new version of repodata"
                            "".format(md.metadata_type))
                return (True, None)

            # Copy from the old repo should be used
            if not md.old_fn_exists:
                # This is missing in the old repo
                self._warning("\"{0}\": From old repo should be used, but "
                              "it is missing".format(md.metadata_type))
                return (True, None)

            # Use copy from the old repo

            # Check if old file should have a new name
            basename = notes.get("new_name")
            if not basename:
                basename = os.path.basename(md.old_fn)

            md.new_fn = os.path.join(md.out_dir, basename)

            checksum_name = notes.get("checksum_name", DEFAULT_CHECKSUM_NAME)
            checksum_type = cr.checksum_type(checksum_name)

            # Copy the file and create repomd record
            shutil.copy2(md.old_fn, md.new_fn)
            rec = cr.RepomdRecord(md.metadata_type, md.new_fn)
            rec.fill(checksum_type)
            if self.globalbundle.unique_md_filenames:
                rec.rename_file()
                md.new_fn = rec.location_real

            md.new_rec = rec
            md.new_fn_exists = True

            return (True, rec)

        if not md.delta_fn_exists:
            # Delta is missing
            self._warning("\"{0}\": Delta file is missing"
                          "".format(md.metadata_type))
            return (True, None)

        # At this point we are sure, we have a delta file

        if notes.get("original") == "1":
            # Delta file is the target file

            # Check if file should be uncompressed
            decompress = False
            if notes.get("compressed") == "1":
                decompress = True

            rec = self.apply_use_original(md, decompress)
            self._debug("\"{0}\": Used delta is just a copy")

            md.new_rec = rec
            md.new_fn_exists = True

            return (True, rec)

        if not md.old_fn_exists:
            # Old file is missing
            self._warning("\"{0}\": Old file is missing"
                          "".format(md.metadata_type))
            return (True, None)

        # Delta file exists and it is not a copy nor metadata
        # file from old repo should be used.
        # this is job for a real delta plugin :)
        return (False, None)
Exemple #25
0
def create_rempomd_xml(packages,
                       publication,
                       extra_repomdrecords,
                       sub_folder=None):
    """
    Creates a repomd.xml file.

    Args:
        packages(app.models.Package): set of packages
        publication(pulpcore.plugin.models.Publication): the publication
        extra_repomdrecords(list): list with data relative to repo metadata files
        sub_folder(str): name of the folder for sub repos

    """
    cwd = os.getcwd()
    repodata_path = REPODATA_PATH
    has_modules = False

    if sub_folder:
        cwd = os.path.join(cwd, sub_folder)
        repodata_path = os.path.join(sub_folder, repodata_path)

    # Prepare metadata files
    repomd_path = os.path.join(cwd, "repomd.xml")
    pri_xml_path = os.path.join(cwd, "primary.xml.gz")
    fil_xml_path = os.path.join(cwd, "filelists.xml.gz")
    oth_xml_path = os.path.join(cwd, "other.xml.gz")
    pri_db_path = os.path.join(cwd, "primary.sqlite")
    fil_db_path = os.path.join(cwd, "filelists.sqlite")
    oth_db_path = os.path.join(cwd, "other.sqlite")
    upd_xml_path = os.path.join(cwd, "updateinfo.xml.gz")
    mod_yml_path = os.path.join(cwd, "modules.yaml")

    pri_xml = cr.PrimaryXmlFile(pri_xml_path)
    fil_xml = cr.FilelistsXmlFile(fil_xml_path)
    oth_xml = cr.OtherXmlFile(oth_xml_path)
    pri_db = cr.PrimarySqlite(pri_db_path)
    fil_db = cr.FilelistsSqlite(fil_db_path)
    oth_db = cr.OtherSqlite(oth_db_path)
    upd_xml = cr.UpdateInfoXmlFile(upd_xml_path)

    pri_xml.set_num_of_pkgs(len(packages))
    fil_xml.set_num_of_pkgs(len(packages))
    oth_xml.set_num_of_pkgs(len(packages))

    # Process all packages
    for package in packages:
        pkg = package.to_createrepo_c()
        pkg.location_href = package.contentartifact_set.first().relative_path
        pri_xml.add_pkg(pkg)
        fil_xml.add_pkg(pkg)
        oth_xml.add_pkg(pkg)
        pri_db.add_pkg(pkg)
        fil_db.add_pkg(pkg)
        oth_db.add_pkg(pkg)

    # Process update records
    for update_record in UpdateRecord.objects.filter(
            pk__in=publication.repository_version.content):
        upd_xml.add_chunk(update_record_xml(update_record))

    # Process modulemd and modulemd_defaults
    with open(mod_yml_path, 'ab') as mod_yml:
        for modulemd in Modulemd.objects.filter(
                pk__in=publication.repository_version.content):
            mod_yml.write(modulemd._artifacts.get().file.read())
            has_modules = True
        for default in ModulemdDefaults.objects.filter(
                pk__in=publication.repository_version.content):
            mod_yml.write(default._artifacts.get().file.read())
            has_modules = True

    pri_xml.close()
    fil_xml.close()
    oth_xml.close()
    upd_xml.close()

    repomd = cr.Repomd()

    repomdrecords = [("primary", pri_xml_path, pri_db),
                     ("filelists", fil_xml_path, fil_db),
                     ("other", oth_xml_path, oth_db),
                     ("primary_db", pri_db_path, None),
                     ("filelists_db", fil_db_path, None),
                     ("other_db", oth_db_path, None),
                     ("updateinfo", upd_xml_path, None)]

    if has_modules:
        repomdrecords.append(("modules", mod_yml_path, None))

    repomdrecords.extend(extra_repomdrecords)

    sqlite_files = ("primary_db", "filelists_db", "other_db")
    for name, path, db_to_update in repomdrecords:
        record = cr.RepomdRecord(name, path)
        if name in sqlite_files:
            record_bz = record.compress_and_fill(cr.SHA256, cr.BZ2)
            record_bz.type = name
            record_bz.rename_file()
            path = record_bz.location_href.split('/')[-1]
            repomd.set_record(record_bz)
        elif name == "modules":
            record_md = record.compress_and_fill(cr.SHA256, cr.GZ)
            record_md.type = name
            record_md.rename_file()
            path = record_md.location_href.split('/')[-1]
            repomd.set_record(record_md)
        else:
            record.fill(cr.SHA256)
            if (db_to_update):
                db_to_update.dbinfo_update(record.checksum)
                db_to_update.close()
            record.rename_file()
            path = record.location_href.split('/')[-1]
            repomd.set_record(record)

        if sub_folder:
            path = os.path.join(sub_folder, path)

        PublishedMetadata.create_from_file(relative_path=os.path.join(
            repodata_path, os.path.basename(path)),
                                           publication=publication,
                                           file=File(open(path, 'rb')))

    with open(repomd_path, "w") as repomd_f:
        repomd_f.write(repomd.xml_dump())

    PublishedMetadata.create_from_file(relative_path=os.path.join(
        repodata_path, os.path.basename(repomd_path)),
                                       publication=publication,
                                       file=File(open(repomd_path, 'rb')))
Exemple #26
0
    def gen(self):

        # Prepare output path
        os.mkdir(self.delta_repodata_path)

        # Set of types of processed metadata records ("primary", "primary_db"...)
        processed_metadata = set()

        for plugin in PLUGINS:

            # Prepare metadata for the plugin
            metadata_objects = {}
            for metadata_name in plugin.METADATA:
                metadata_object = self._new_metadata(metadata_name)
                if metadata_object is not None:
                    metadata_objects[metadata_name] = metadata_object

            # Skip plugin if no supported metadata available
            if not metadata_objects:
                self._debug("Plugin {0}: Skipped - None of supported " \
                            "metadata {1} available".format(
                            plugin.NAME, plugin.METADATA))
                continue

            # Prepare plugin bundle
            pluginbundle = PluginBundle(plugin.NAME, plugin.VERSION)
            self.deltametadata.add_pluginbundle(pluginbundle)

            # Use the plugin
            self._debug("Plugin {0}: Active".format(plugin.NAME))
            plugin_instance = plugin(pluginbundle,
                                     self.globalbundle,
                                     logger=self._get_logger())
            repomd_records = plugin_instance.gen(metadata_objects)

            # Put repomd records from processed metadatas to repomd
            self._debug("Plugin {0}: Processed {1} record(s) " \
                "and produced:".format(plugin.NAME, metadata_objects.keys()))
            for rec in repomd_records:
                self._debug(" - {0}".format(rec.type))
                self.delta_repomd.set_record(rec)

            # Organization stuff
            for md in metadata_objects.keys():
                processed_metadata.add(md)

        # Process rest of the metadata files
        metadata_objects = {}
        for rectype, rec in self.new_records.items():
            if rectype not in processed_metadata:
                metadata_object = self._new_metadata(rectype)
                if metadata_object is not None:
                    self._debug("To be processed by general delta plugin: " \
                                "{0}".format(rectype))
                    metadata_objects[rectype] = metadata_object
                else:
                    self._debug("Not processed - even by general delta " \
                                "plugin: {0}".format(rectype))

        if metadata_objects:
            # Use the plugin
            pluginbundle = PluginBundle(GENERAL_PLUGIN.NAME,
                                        GENERAL_PLUGIN.VERSION)
            self.deltametadata.add_pluginbundle(pluginbundle)
            self._debug("Plugin {0}: Active".format(GENERAL_PLUGIN.NAME))
            plugin_instance = GENERAL_PLUGIN(pluginbundle,
                                             self.globalbundle,
                                             logger=self._get_logger())
            repomd_records = plugin_instance.gen(metadata_objects)

            # Put repomd records from processed metadatas to repomd
            self._debug("Plugin {0}: Processed {1} record(s) " \
                "and produced:".format(GENERAL_PLUGIN.NAME, metadata_objects.keys()))
            for rec in repomd_records:
                self._debug(" - {0}".format(rec.type))
                self.delta_repomd.set_record(rec)

        # Check if calculated contenthashes match
        # and calculate them if they don't exist
        self.check_content_hashes()

        # Write out deltametadata.xml
        self.fill_deltametadata()
        deltametadata_path = os.path.join(self.delta_repodata_path,
                                          "deltametadata.xml")
        stat = cr.ContentStat(self.checksum_type)
        deltametadata_path = self.deltametadata.dump(
            deltametadata_path,
            compression_type=self.compression_type,
            stat=stat)

        deltametadata_rec = cr.RepomdRecord("deltametadata",
                                            deltametadata_path)
        deltametadata_rec.load_contentstat(stat)
        deltametadata_rec.fill(self.checksum_type)
        if self.unique_md_filenames:
            deltametadata_rec.rename_file()
        self.delta_repomd.set_record(deltametadata_rec)

        # Prepare and write out the new repomd.xml
        self._debug("Preparing repomd.xml ...")
        deltacontenthash = "{0}-{1}".format(
            self.globalbundle.calculated_old_contenthash,
            self.globalbundle.calculated_new_contenthash)
        self.delta_repomd.set_contenthash(deltacontenthash,
                                          self.contenthash_type_str)
        self.delta_repomd.sort_records()
        delta_repomd_xml = self.delta_repomd.xml_dump()

        self._debug("Writing repomd.xml ...")
        open(self.delta_repomd_path, "w").write(delta_repomd_xml)

        # Final move
        if os.path.exists(self.final_path):
            self._warning("Destination dir already exists! Removing %s" % \
                          self.final_path)
            shutil.rmtree(self.final_path)
        self._debug("Moving %s -> %s" %
                    (self.delta_repodata_path, self.final_path))
        os.rename(self.delta_repodata_path, self.final_path)
Exemple #27
0
def create_repomd_xml(content, publication, extra_repomdrecords, sub_folder=None):
    """
    Creates a repomd.xml file.

    Args:
        content(app.models.Content): content set
        publication(pulpcore.plugin.models.Publication): the publication
        extra_repomdrecords(list): list with data relative to repo metadata files
        sub_folder(str): name of the folder for sub repos

    """
    cwd = os.getcwd()
    repodata_path = REPODATA_PATH
    has_modules = False
    has_comps = False

    if sub_folder:
        cwd = os.path.join(cwd, sub_folder)
        repodata_path = os.path.join(sub_folder, repodata_path)

    # Prepare metadata files
    repomd_path = os.path.join(cwd, "repomd.xml")
    pri_xml_path = os.path.join(cwd, "primary.xml.gz")
    fil_xml_path = os.path.join(cwd, "filelists.xml.gz")
    oth_xml_path = os.path.join(cwd, "other.xml.gz")
    pri_db_path = os.path.join(cwd, "primary.sqlite")
    fil_db_path = os.path.join(cwd, "filelists.sqlite")
    oth_db_path = os.path.join(cwd, "other.sqlite")
    upd_xml_path = os.path.join(cwd, "updateinfo.xml.gz")
    mod_yml_path = os.path.join(cwd, "modules.yaml")
    comps_xml_path = os.path.join(cwd, "comps.xml")

    pri_xml = cr.PrimaryXmlFile(pri_xml_path)
    fil_xml = cr.FilelistsXmlFile(fil_xml_path)
    oth_xml = cr.OtherXmlFile(oth_xml_path)
    pri_db = cr.PrimarySqlite(pri_db_path)
    fil_db = cr.FilelistsSqlite(fil_db_path)
    oth_db = cr.OtherSqlite(oth_db_path)
    upd_xml = cr.UpdateInfoXmlFile(upd_xml_path)

    packages = Package.objects.filter(pk__in=content)
    total_packages = packages.count()

    pri_xml.set_num_of_pkgs(total_packages)
    fil_xml.set_num_of_pkgs(total_packages)
    oth_xml.set_num_of_pkgs(total_packages)

    # Process all packages
    for package in packages.iterator():
        pkg = package.to_createrepo_c()
        pkg.location_href = package.contentartifact_set.only('relative_path').first().relative_path
        pri_xml.add_pkg(pkg)
        fil_xml.add_pkg(pkg)
        oth_xml.add_pkg(pkg)
        pri_db.add_pkg(pkg)
        fil_db.add_pkg(pkg)
        oth_db.add_pkg(pkg)

    # Process update records
    for update_record in UpdateRecord.objects.filter(pk__in=content).iterator():
        upd_xml.add_chunk(cr.xml_dump_updaterecord(update_record.to_createrepo_c()))

    # Process modulemd and modulemd_defaults
    with open(mod_yml_path, 'ab') as mod_yml:
        for modulemd in Modulemd.objects.filter(pk__in=content).iterator():
            mod_yml.write(modulemd._artifacts.get().file.read())
            has_modules = True
        for default in ModulemdDefaults.objects.filter(pk__in=content).iterator():
            mod_yml.write(default._artifacts.get().file.read())
            has_modules = True

    # Process comps
    comps = libcomps.Comps()
    for pkg_grp in PackageGroup.objects.filter(pk__in=content).iterator():
        group = pkg_grp.pkg_grp_to_libcomps()
        comps.groups.append(group)
        has_comps = True
    for pkg_cat in PackageCategory.objects.filter(pk__in=content).iterator():
        cat = pkg_cat.pkg_cat_to_libcomps()
        comps.categories.append(cat)
        has_comps = True
    for pkg_env in PackageEnvironment.objects.filter(pk__in=content).iterator():
        env = pkg_env.pkg_env_to_libcomps()
        comps.environments.append(env)
        has_comps = True
    for pkg_lng in PackageLangpacks.objects.filter(pk__in=content).iterator():
        comps.langpacks = dict_to_strdict(pkg_lng.matches)
        has_comps = True

    comps.toxml_f(comps_xml_path, xml_options={"default_explicit": True,
                                               "empty_groups": True,
                                               "uservisible_explicit": True})

    pri_xml.close()
    fil_xml.close()
    oth_xml.close()
    upd_xml.close()

    repomd = cr.Repomd()

    repomdrecords = [("primary", pri_xml_path, pri_db),
                     ("filelists", fil_xml_path, fil_db),
                     ("other", oth_xml_path, oth_db),
                     ("primary_db", pri_db_path, None),
                     ("filelists_db", fil_db_path, None),
                     ("other_db", oth_db_path, None),
                     ("updateinfo", upd_xml_path, None)]

    if has_modules:
        repomdrecords.append(("modules", mod_yml_path, None))

    if has_comps:
        repomdrecords.append(("group", comps_xml_path, None))

    repomdrecords.extend(extra_repomdrecords)

    sqlite_files = ("primary_db", "filelists_db", "other_db")
    for name, path, db_to_update in repomdrecords:
        record = cr.RepomdRecord(name, path)
        if name in sqlite_files:
            record_bz = record.compress_and_fill(cr.SHA256, cr.BZ2)
            record_bz.type = name
            record_bz.rename_file()
            path = record_bz.location_href.split('/')[-1]
            repomd.set_record(record_bz)
        else:
            record.fill(cr.SHA256)
            if (db_to_update):
                db_to_update.dbinfo_update(record.checksum)
                db_to_update.close()
            record.rename_file()
            path = record.location_href.split('/')[-1]
            repomd.set_record(record)

        if sub_folder:
            path = os.path.join(sub_folder, path)

        PublishedMetadata.create_from_file(
            relative_path=os.path.join(repodata_path, os.path.basename(path)),
            publication=publication,
            file=File(open(path, 'rb'))
        )

    with open(repomd_path, "w") as repomd_f:
        repomd_f.write(repomd.xml_dump())

    PublishedMetadata.create_from_file(
        relative_path=os.path.join(repodata_path, os.path.basename(repomd_path)),
        publication=publication,
        file=File(open(repomd_path, 'rb'))
    )
Exemple #28
0
    def build_metadata(self):
        staging = tempfile.mkdtemp(prefix='yumsync-', suffix='-metadata')

        if self.checksum == 'sha' or self.checksum == 'sha1':
            sumtype = 'sha'
        else:
            sumtype = 'sha256'

        repodata_path = os.path.join(staging, 'repodata')
        os.mkdir(repodata_path)

        # Prepare metadata files
        repomd_path  = os.path.join(repodata_path, "repomd.xml")
        pri_xml_path = os.path.join(repodata_path, "primary.xml.gz")
        fil_xml_path = os.path.join(repodata_path, "filelists.xml.gz")
        oth_xml_path = os.path.join(repodata_path, "other.xml.gz")
        pri_db_path  = os.path.join(repodata_path, "primary.sqlite")
        fil_db_path  = os.path.join(repodata_path, "filelists.sqlite")
        oth_db_path  = os.path.join(repodata_path, "other.sqlite")

        # Related python objects
        pri_xml = createrepo.PrimaryXmlFile(pri_xml_path)
        fil_xml = createrepo.FilelistsXmlFile(fil_xml_path)
        oth_xml = createrepo.OtherXmlFile(oth_xml_path)
        pri_db  = createrepo.PrimarySqlite(pri_db_path)
        fil_db  = createrepo.FilelistsSqlite(fil_db_path)
        oth_db  = createrepo.OtherSqlite(oth_db_path)

        # Set package list
        if self.local_dir and self.link_type == "individual_symlink" and self.version_dir:
            pkg_list = [
                (
                    os.path.join(self.version_package_dir,"{}".format(pkg)),
                    os.path.join("packages","{}".format(pkg))
                ) for pkg in self._packages]
        else:
            pkg_list = [
                (
                    os.path.join(self.package_dir,"{}".format(pkg)),
                    os.path.join("packages","{}".format(pkg))
                )for pkg in self._packages]

        pri_xml.set_num_of_pkgs(len(pkg_list))
        fil_xml.set_num_of_pkgs(len(pkg_list))
        oth_xml.set_num_of_pkgs(len(pkg_list))

        # Process all packages in // if possible
        self.metadata_progress = 0
        self.total_pkgs = len(pkg_list)
        metadata_mutex = Lock()

        def collect_result(future):
            self.metadata_progress += 1
            self._callback('repo_metadata', int((self.metadata_progress+1)*100//self.total_pkgs))

        def process_pkg(filename, href):
            pkg = createrepo.package_from_rpm(filename)
            pkg.location_href = href
            return pkg

        try:
            from concurrent.futures import ThreadPoolExecutor
            parallelize = True
        except:
            parallelize = False

        if parallelize:
            with ThreadPoolExecutor(max_workers=self._workers) as executor:
                futures = []
                for filename in pkg_list:
                    future = executor.submit(process_pkg, filename[0], filename[1])
                    future.add_done_callback(collect_result)
                    futures.append(future)
                for future in futures:
                    try:
                        pkg = future.result(10)
                    except Exception as exc:
                        logging.exception("Thread generated an exception")
                    else:
                        pri_xml.add_pkg(pkg)
                        fil_xml.add_pkg(pkg)
                        oth_xml.add_pkg(pkg)
                        pri_db.add_pkg(pkg)
                        fil_db.add_pkg(pkg)
                        oth_db.add_pkg(pkg)
        else:
            for idx, filename in enumerate(pkg_list):
                process_pkg(filename[0], filename[1])
                collect_result(None)

        pri_xml.close()
        fil_xml.close()
        oth_xml.close()

        # Note: DBs are still open! We have to calculate checksums of xml files
        # and insert them to the databases first!

        self._callback('repo_metadata', 'building')
        # Prepare repomd.xml
        repomd = createrepo.Repomd()

        # Order is important !
        repomdrecords = (("primary",      pri_xml_path, pri_db, False),
                         ("filelists",    fil_xml_path, fil_db, False),
                         ("other",        oth_xml_path, oth_db, False),
                         ("primary_db",   pri_db_path,  None,   True),
                         ("filelists_db", fil_db_path,  None,   True),
                         ("other_db",     oth_db_path,  None,   True))

        for name, path, db_to_update, compress in repomdrecords:
            record = createrepo.RepomdRecord(name, path)
            if compress:
                record.compress_and_fill(createrepo.SHA256, createrepo.XZ_COMPRESSION)
            else:
                record.fill(createrepo.SHA256)

            if (db_to_update):
                db_to_update.dbinfo_update(record.checksum)
                db_to_update.close()

            repomd.set_record(record)

        if self._repomd:
            for md_type, md_content in six.iteritems(self._repomd):
                md_file = os.path.join(repodata_path, md_type[1])
                with open(md_file, 'w') as f:
                    f.write(md_content)
                record = createrepo.RepomdRecord(md_type[0], md_file)
                record.fill(createrepo.SHA256)
                repomd.set_record(record)

        open(repomd_path, "w").write(repomd.xml_dump())

        return staging
Exemple #29
0
def publish(repository_version_pk):
    """
    Create a Publication based on a RepositoryVersion.

    Args:
        repository_version_pk (str): Create a publication from this repository version.
    """
    repository_version = RepositoryVersion.objects.get(
        pk=repository_version_pk)

    log.info(
        _('Publishing: repository={repo}, version={version}').format(
            repo=repository_version.repository.name,
            version=repository_version.number,
        ))

    with WorkingDirectory():
        with RpmPublication.create(repository_version) as publication:
            packages = populate(publication)

            # Prepare metadata files
            repomd_path = os.path.join(os.getcwd(), "repomd.xml")
            pri_xml_path = os.path.join(os.getcwd(), "primary.xml.gz")
            fil_xml_path = os.path.join(os.getcwd(), "filelists.xml.gz")
            oth_xml_path = os.path.join(os.getcwd(), "other.xml.gz")
            pri_db_path = os.path.join(os.getcwd(), "primary.sqlite")
            fil_db_path = os.path.join(os.getcwd(), "filelists.sqlite")
            oth_db_path = os.path.join(os.getcwd(), "other.sqlite")
            upd_xml_path = os.path.join(os.getcwd(), "updateinfo.xml.gz")

            pri_xml = cr.PrimaryXmlFile(pri_xml_path)
            fil_xml = cr.FilelistsXmlFile(fil_xml_path)
            oth_xml = cr.OtherXmlFile(oth_xml_path)
            pri_db = cr.PrimarySqlite(pri_db_path)
            fil_db = cr.FilelistsSqlite(fil_db_path)
            oth_db = cr.OtherSqlite(oth_db_path)
            upd_xml = cr.UpdateInfoXmlFile(upd_xml_path)

            pri_xml.set_num_of_pkgs(len(packages))
            fil_xml.set_num_of_pkgs(len(packages))
            oth_xml.set_num_of_pkgs(len(packages))

            # Process all packages
            for package in packages:
                pkg = package.to_createrepo_c()
                pkg.location_href = package.contentartifact_set.first(
                ).relative_path
                pri_xml.add_pkg(pkg)
                fil_xml.add_pkg(pkg)
                oth_xml.add_pkg(pkg)
                pri_db.add_pkg(pkg)
                fil_db.add_pkg(pkg)
                oth_db.add_pkg(pkg)

            # Process update records
            for update_record in UpdateRecord.objects.filter(
                    pk__in=publication.repository_version.content):
                upd_xml.add_chunk(update_record_xml(update_record))

            pri_xml.close()
            fil_xml.close()
            oth_xml.close()
            upd_xml.close()

            repomd = cr.Repomd()

            repomdrecords = (("primary", pri_xml_path,
                              pri_db), ("filelists", fil_xml_path, fil_db),
                             ("other", oth_xml_path,
                              oth_db), ("primary_db", pri_db_path, None),
                             ("filelists_db", fil_db_path,
                              None), ("other_db", oth_db_path, None),
                             ("updateinfo", upd_xml_path, None))

            sqlite_files = ("primary_db", "filelists_db", "other_db")
            for name, path, db_to_update in repomdrecords:
                record = cr.RepomdRecord(name, path)
                if name in sqlite_files:
                    record_bz = record.compress_and_fill(cr.SHA256, cr.BZ2)
                    record_bz.type = name
                    record_bz.rename_file()
                    path = record_bz.location_href.split('/')[-1]
                    repomd.set_record(record_bz)
                else:
                    record.fill(cr.SHA256)
                    if (db_to_update):
                        db_to_update.dbinfo_update(record.checksum)
                        db_to_update.close()
                    record.rename_file()
                    path = record.location_href.split('/')[-1]
                    repomd.set_record(record)
                metadata = PublishedMetadata(
                    relative_path=os.path.join(REPODATA_PATH,
                                               os.path.basename(path)),
                    publication=publication,
                    file=File(open(os.path.basename(path), 'rb')))
                metadata.save()

            with open(repomd_path, "w") as repomd_f:
                repomd_f.write(repomd.xml_dump())

            metadata = PublishedMetadata(
                relative_path=os.path.join(REPODATA_PATH,
                                           os.path.basename(repomd_path)),
                publication=publication,
                file=File(open(os.path.basename(repomd_path), 'rb')))
            metadata.save()
Exemple #30
0
    def test_repomd(self):
        shutil.copyfile(REPO_00_PRIXML, self.path00)
        self.assertTrue(os.path.exists(self.path00))

        md = cr.Repomd()
        self.assertTrue(md)

        xml = md.xml_dump()
        # Revision shoud be current Unix time
        self.assertTrue(re.search(r"<revision>[0-9]+</revision>", xml))

        self.assertEqual(md.revision, None)
        md.set_revision("foobar")
        self.assertEqual(md.revision, "foobar")

        self.assertEqual(md.repoid, None)
        md.set_repoid("barid", "sha256")
        self.assertEqual(md.repoid, "barid")

        self.assertEqual(md.contenthash, None)
        md.set_contenthash("fooid", "sha256")
        self.assertEqual(md.contenthash, "fooid")

        self.assertEqual(md.distro_tags, [])
        md.add_distro_tag("tag1")
        md.add_distro_tag("tag2", "cpeid1")
        md.add_distro_tag("tag3", cpeid="cpeid2")
        self.assertEqual(md.distro_tags, [(None, 'tag1'), ('cpeid1', 'tag2'),
                                          ('cpeid2', 'tag3')])

        self.assertEqual(md.repo_tags, [])
        md.add_repo_tag("repotag")
        self.assertEqual(md.repo_tags, ['repotag'])

        self.assertEqual(md.content_tags, [])
        md.add_content_tag("contenttag")
        self.assertEqual(md.content_tags, ['contenttag'])

        self.assertEqual(md.records, [])

        xml = md.xml_dump()
        self.assertEqual(
            xml, """<?xml version="1.0" encoding="UTF-8"?>
<repomd xmlns="http://linux.duke.edu/metadata/repo" xmlns:rpm="http://linux.duke.edu/metadata/rpm">
  <revision>foobar</revision>
  <repoid type="sha256">barid</repoid>
  <contenthash type="sha256">fooid</contenthash>
  <tags>
    <content>contenttag</content>
    <repo>repotag</repo>
    <distro>tag1</distro>
    <distro cpeid="cpeid1">tag2</distro>
    <distro cpeid="cpeid2">tag3</distro>
  </tags>
</repomd>
""")

        rec = cr.RepomdRecord("primary", self.path00)
        rec.fill(cr.SHA256)
        rec.timestamp = 1
        rec.location_base = "http://foo/"
        md.set_record(rec)

        self.assertEqual(len(md.records), 1)

        md.set_record(rec)

        self.assertEqual(len(md.records), 1)

        md.repoid = None
        md.contenthash = None

        xml = md.xml_dump()
        self.assertEqual(
            xml, """<?xml version="1.0" encoding="UTF-8"?>
<repomd xmlns="http://linux.duke.edu/metadata/repo" xmlns:rpm="http://linux.duke.edu/metadata/rpm">
  <revision>foobar</revision>
  <tags>
    <content>contenttag</content>
    <repo>repotag</repo>
    <distro>tag1</distro>
    <distro cpeid="cpeid1">tag2</distro>
    <distro cpeid="cpeid2">tag3</distro>
  </tags>
  <data type="primary">
    <checksum type="sha256">1cb61ea996355add02b1426ed4c1780ea75ce0c04c5d1107c025c3fbd7d8bcae</checksum>
    <open-checksum type="sha256">e1e2ffd2fb1ee76f87b70750d00ca5677a252b397ab6c2389137a0c33e7b359f</open-checksum>
    <location href="repodata/primary.xml.gz" xml:base="http://foo/"/>
    <timestamp>1</timestamp>
    <size>134</size>
    <open-size>167</open-size>
  </data>
</repomd>
""")