示例#1
0
def publish(publisher_pk, repository_version_pk):
    """
    Use provided publisher to create a Publication based on a RepositoryVersion.

    Args:
        publisher_pk (str): Use the publish settings provided by this publisher.
        repository_version_pk (str): Create a publication from this repository version.
    """
    publisher = FilePublisher.objects.get(pk=publisher_pk)
    repository_version = RepositoryVersion.objects.get(pk=repository_version_pk)

    log.info(
        _('Publishing: repository=%(repository)s, version=%(version)d, publisher=%(publisher)s'),
        {
            'repository': repository_version.repository.name,
            'version': repository_version.number,
            'publisher': publisher.name,
        })

    with WorkingDirectory():
        with Publication.create(repository_version, publisher) as publication:
            manifest = Manifest('PULP_MANIFEST')
            manifest.write(populate(publication))
            metadata = PublishedMetadata(
                relative_path=os.path.basename(manifest.relative_path),
                publication=publication,
                file=File(open(manifest.relative_path, 'rb')))
            metadata.save()

    log.info(
        _('Publication: %(publication)s created'),
        {
            'publication': publication.pk
        })
示例#2
0
def publish(repository_version_pk):
    """
    Create a Publication based on a RepositoryVersion.

    Args:
        repository_version_pk (str): Create a publication from this repository version.
    """
    repository_version = RepositoryVersion.objects.get(
        pk=repository_version_pk)

    log.info(
        _("Publishing: repository=%(repository)s, version=%(version)d"),
        {
            "repository": repository_version.repository.name,
            "version": repository_version.number
        },
    )

    with WorkingDirectory():
        with CookbookPublication.create(repository_version) as publication:
            check_repo_version_constraint(publication)
            universe = Universe("__universe__")
            universe.write(populate(publication))
            metadata = PublishedMetadata(
                relative_path=os.path.basename(universe.relative_path),
                publication=publication,
                file=File(open(universe.relative_path, "rb")),
            )
            metadata.save()

    log.info(_("Publication: %(publication)s created"),
             {"publication": publication.pk})
示例#3
0
def publish(repository_version_pk):
    """
    Create a Publication based on a RepositoryVersion.

    Args:
        repository_version_pk (str): Create a publication from this repository version.

    """
    repository_version = RepositoryVersion.objects.get(
        pk=repository_version_pk)

    log.info(
        _("Publishing: repository=%(repository)s, version=%(version)d"),
        {
            "repository": repository_version.repository.name,
            "version": repository_version.number
        },
    )

    with tempfile.TemporaryDirectory("."):
        with ProgressReport(message="Publishing Content",
                            code="publishing.content") as progress_report:
            with CookbookPublication.create(repository_version) as publication:
                universe = Universe("__universe__")
                universe.write(
                    populate(publication, progress_report=progress_report))
                PublishedMetadata.create_from_file(
                    relative_path=os.path.basename(universe.relative_path),
                    publication=publication,
                    file=File(open(universe.relative_path, "rb")),
                )

    log.info(_("Publication: %(publication)s created"),
             {"publication": publication.pk})
示例#4
0
def publish(publisher_pk, repository_version_pk):
    """
    Use provided publisher to create a Publication based on a RepositoryVersion.

    Args:
        publisher_pk (str): Use the publish settings provided by this publisher.
        repository_version_pk (str): Create a publication from this repository version.

    """
    publisher = FilePublisher.objects.get(pk=publisher_pk)
    repository_version = RepositoryVersion.objects.get(pk=repository_version_pk)

    log.info(_('Publishing: repository={repo}, version={ver}, publisher={pub}').format(
        repo=repository_version.repository.name,
        ver=repository_version.number,
        pub=publisher.name,
    ))

    with WorkingDirectory():
        with Publication.create(repository_version, publisher, pass_through=True) as publication:
            manifest = Manifest(publisher.manifest)
            manifest.write(populate(publication))
            metadata = PublishedMetadata(
                relative_path=os.path.basename(manifest.relative_path),
                publication=publication,
                file=File(open(manifest.relative_path, 'rb')))
            metadata.save()

    log.info(_('Publication: {publication} created').format(publication=publication.pk))
示例#5
0
    async def migrate_to_pulp3(cls, pulp2distributor, repo_version):
        """
        Migrate distributor to Pulp 3.

        Args:
            pulp2distributor(Pulp2ditributor): Pre-migrated pulp2 distributor to migrate

        Return:
            publication and distribution: FilePublication and FileDistribution in Pulp3
            created(bool): True if Distribution has just been created; False if Distribution
                           is an existing one
        """
        if not repo_version:
            repo_version = pulp2distributor.pulp2_repository.pulp3_repository_version
        publication = repo_version.publication_set.first()
        if not publication:
            # create publication
            with FilePublication.create(repo_version,
                                        pass_through=True) as publication:
                manifest = Manifest('PULP_MANIFEST')
                manifest.write(populate(publication))
                PublishedMetadata.create_from_file(file=File(
                    open(manifest.relative_path, "rb")),
                                                   publication=publication)
        # create distribution
        pulp2_config = pulp2distributor.pulp2_config
        base_config = cls.parse_base_config(pulp2distributor, pulp2_config)
        base_config['base_path'] = pulp2_config.get(
            'relative_url', pulp2distributor.pulp2_repository.pulp2_repo_id)
        base_config['publication'] = publication
        distribution, created = FileDistribution.objects.update_or_create(
            **base_config)

        return publication, distribution, created
示例#6
0
 def finish(self):
     # Publish Packages files
     for component in self.components.values():
         component.finish()
     # Publish Release file
     self.release["Components"] = " ".join(self.components.keys())
     release_dir = os.path.join("dists", self.distribution.strip("/"))
     release_path = os.path.join(release_dir, "Release")
     os.makedirs(os.path.dirname(release_path), exist_ok=True)
     with open(release_path, "wb") as release_file:
         self.release.dump(release_file)
     release_metadata = PublishedMetadata.create_from_file(
         publication=self.publication,
         file=File(open(release_path, "rb")),
     )
     release_metadata.save()
     if self.signing_service:
         signed = self.signing_service.sign(release_path)
         for signature_file in signed["signatures"].values():
             file_name = os.path.basename(signature_file)
             relative_path = os.path.join(release_dir, file_name)
             metadata = PublishedMetadata.create_from_file(
                 publication=self.publication,
                 file=File(open(signature_file, "rb")),
                 relative_path=relative_path,
             )
             metadata.save()
示例#7
0
    def populate(self):
        """
        Populate a publication.

        Create published artifacts for a publication.

        """
        content = self.publication.repository_version.content
        self.repomdrecords = self.prepare_metadata_files(content)
        self.publish_artifacts(content)

        distribution_trees = DistributionTree.objects.filter(
            pk__in=content
        ).prefetch_related(
            "addons",
            "variants",
            "addons__repository",
            "variants__repository",
            "contentartifact_set"
        )

        for distribution_tree in distribution_trees:
            self.handle_sub_repos(distribution_tree)

            treeinfo_file = create_treeinfo(distribution_tree)
            PublishedMetadata.create_from_file(
                publication=self.publication,
                file=File(open(treeinfo_file.name, 'rb'))
            )

        for name, content in self.sub_repos:
            os.mkdir(name)
            setattr(self, f"{name}_content", content)
            setattr(self, f"{name}_repomdrecords", self.prepare_metadata_files(content, name))
示例#8
0
    def populate(self):
        """
        Populate a publication.

        Create published artifacts for a publication.

        """
        publication = self.publication
        main_content = publication.repository_version.content

        distribution_trees = DistributionTree.objects.filter(
            pk__in=publication.repository_version.content).prefetch_related(
                "addons", "variants", "addons__repository",
                "variants__repository", "contentartifact_set")

        for distribution_tree in distribution_trees:
            for content_artifact in distribution_tree.contentartifact_set.all(
            ):
                self.published_artifacts.append(
                    PublishedArtifact(
                        relative_path=content_artifact.relative_path,
                        publication=publication,
                        content_artifact=content_artifact))
            for addon in distribution_tree.addons.all():
                repository_version = addon.repository.latest_version()
                if repository_version and repository_version.content != main_content:
                    self.sub_repos.append(
                        (addon.addon_id, repository_version.content))
            for variant in distribution_tree.variants.all():
                repository_version = variant.repository.latest_version()
                if repository_version and repository_version.content != main_content:
                    self.sub_repos.append(
                        (variant.variant_id, repository_version.content))

            treeinfo_file = create_treeinfo(distribution_tree)
            PublishedMetadata.create_from_file(
                publication=publication,
                file=File(open(treeinfo_file.name, 'rb')))

        self.packages = self.get_packages(main_content)
        self.repomdrecords = self.prepare_metadata_files(main_content)

        all_packages = self.packages
        for name, content in self.sub_repos:
            os.mkdir(name)
            sub_repo_packages = self.get_packages(content)
            all_packages = all_packages | sub_repo_packages
            setattr(self, f"{name}_packages", sub_repo_packages)
            setattr(self, f"{name}_repomdrecords",
                    self.prepare_metadata_files(content, name))

        for package in all_packages.distinct():
            for content_artifact in package.contentartifact_set.all():
                self.published_artifacts.append(
                    PublishedArtifact(
                        relative_path=content_artifact.relative_path,
                        publication=self.publication,
                        content_artifact=content_artifact))

        PublishedArtifact.objects.bulk_create(self.published_artifacts)
示例#9
0
def publish(manifest, repository_version_pk):
    """
    Create a Publication based on a RepositoryVersion.

    Args:
        manifest (str): Filename to use for manifest file.
        repository_version_pk (str): Create a publication from this repository version.

    """
    repo_version = RepositoryVersion.objects.get(pk=repository_version_pk)

    log.info(
        _("Publishing: repository={repo}, version={ver}, manifest={manifest}").
        format(repo=repo_version.repository.name,
               ver=repo_version.number,
               manifest=manifest))

    with WorkingDirectory():
        with FilePublication.create(repo_version,
                                    pass_through=True) as publication:
            manifest = Manifest(manifest)
            manifest.write(populate(publication))
            PublishedMetadata.create_from_file(file=File(
                open(manifest.relative_path, "rb")),
                                               publication=publication)

    log.info(
        _("Publication: {publication} created").format(
            publication=publication.pk))
示例#10
0
    def handle_sub_repos(self, distribution_tree):
        """
        Get sub-repo content and publish them.

        Args:
            distribution_tree (pulp_rpm.models.DistributionTree): A distribution_tree object.

        """
        original_treeinfo_content_artifact = distribution_tree.contentartifact_set.get(
            relative_path__in=[".treeinfo", "treeinfo"]
        )
        artifact_file = storage.open(original_treeinfo_content_artifact.artifact.file.name)
        with tempfile.NamedTemporaryFile("wb", dir=".") as temp_file:
            shutil.copyfileobj(artifact_file, temp_file)
            temp_file.flush()
            treeinfo = PulpTreeInfo()
            treeinfo.load(f=temp_file.name)
            treeinfo_data = TreeinfoData(treeinfo.parsed_sections())

            # rewrite the treeinfo file such that the variant repository and package location
            # is a relative subtree
            treeinfo.rewrite_subrepo_paths(treeinfo_data)

            # TODO: better way to do this?
            main_variant = treeinfo.original_parser._sections.get("general", {}).get(
                "variant", None
            )
            treeinfo_file = tempfile.NamedTemporaryFile(dir=".")
            treeinfo.dump(treeinfo_file.name, main_variant=main_variant)
            with open(treeinfo_file.name, "rb") as treeinfo_fd:
                PublishedMetadata.create_from_file(
                    relative_path=original_treeinfo_content_artifact.relative_path,
                    publication=self.publication,
                    file=File(treeinfo_fd),
                )
        artifact_file.close()
        relations = ["addon", "variant"]
        for relation in relations:
            addons_or_variants = getattr(distribution_tree, f"{relation}s").all()
            for addon_or_variant in addons_or_variants:
                if not addon_or_variant.repository:
                    # a variant of the main repo
                    continue
                repository = addon_or_variant.repository.cast()
                repository_version = repository.latest_version()

                if repository_version and repository.user_hidden:
                    addon_or_variant_id = getattr(addon_or_variant, f"{relation}_id")
                    self.sub_repos.append(
                        (
                            addon_or_variant_id,
                            repository_version.content,
                            repository.original_checksum_types,
                        )
                    )
示例#11
0
 def publish(self):
     """
     Publish the repository.
     """
     manifest = Manifest('PULP_MANIFEST')
     manifest.write(self._publish())
     metadata = PublishedMetadata(
         relative_path=os.path.basename(manifest.path),
         publication=self.publication,
         file=File(open(manifest.path, 'rb')))
     metadata.save()
示例#12
0
def _publish_specs(specs, relative_path, publication):
    write_specs(specs, relative_path)
    with open(relative_path, "rb") as f_in:
        with gzip.open(relative_path + ".gz", "wb") as f_out:
            shutil.copyfileobj(f_in, f_out)
    specs_metadata = PublishedMetadata.create_from_file(
        publication=publication, file=File(open(relative_path, "rb")))
    specs_metadata.save()
    specs_metadata_gz = PublishedMetadata.create_from_file(
        publication=publication, file=File(open(relative_path + ".gz", "rb")))
    specs_metadata_gz.save()
示例#13
0
    def handle_sub_repos(self, distribution_tree):
        """
        Get sub-repo content and publish them.

        Args:
            distribution_tree (pulp_rpm.models.DistributionTree): A distribution_tree object.

        """
        original_treeinfo_content_artifact = distribution_tree.contentartifact_set.get(
            relative_path__in=[".treeinfo", "treeinfo"])
        artifact_file = storage.open(
            original_treeinfo_content_artifact.artifact.file.name)
        with NamedTemporaryFile("wb") as temp_file:
            shutil.copyfileobj(artifact_file, temp_file)
            temp_file.flush()
            treeinfo = PulpTreeInfo()
            treeinfo.load(f=temp_file.name)
            treeinfo_parsed = treeinfo.parsed_sections()
            treeinfodata = TreeinfoData(treeinfo_parsed)
            for variant in treeinfo.variants.get_variants():
                variant.paths.repository = treeinfodata.variants[
                    variant.id]["repository"]
                variant.paths.packages = treeinfodata.variants[
                    variant.id]["packages"]
            treeinfo_file = NamedTemporaryFile()
            treeinfo.dump(treeinfo_file.name)
            PublishedMetadata.create_from_file(
                relative_path=original_treeinfo_content_artifact.relative_path,
                publication=self.publication,
                file=File(open(treeinfo_file.name, "rb")),
            )
        relations = ["addon", "variant"]
        for relation in relations:
            addons_or_variants = getattr(distribution_tree,
                                         f"{relation}s").all()
            for addon_or_variant in addons_or_variants:
                if not addon_or_variant.repository:
                    # a variant of the main repo
                    continue
                repository = addon_or_variant.repository.cast()
                repository_version = repository.latest_version()

                if repository_version and repository.sub_repo:
                    addon_or_variant_id = getattr(addon_or_variant,
                                                  f"{relation}_id")
                    self.sub_repos.append((
                        addon_or_variant_id,
                        repository_version.content,
                        repository.original_checksum_types,
                    ))
示例#14
0
 def finish(self):
     # Publish Packages files
     for (package_index_file, package_index_path) in self.package_index_files.values():
         package_index_file.close()
         gz_package_index_path = _zip_file(package_index_path)
         package_index = PublishedMetadata.create_from_file(
             publication=self.parent.publication, file=File(open(package_index_path, "rb"))
         )
         package_index.save()
         gz_package_index = PublishedMetadata.create_from_file(
             publication=self.parent.publication, file=File(open(gz_package_index_path, "rb"))
         )
         gz_package_index.save()
         self.parent.add_metadata(package_index)
         self.parent.add_metadata(gz_package_index)
示例#15
0
    def publish(self):
        """
        Publish the repository.
        """
        with ProgressBar(message=_("Publishing repository metadata"), total=1) as bar:
            manifest_name = 'PULP_MANIFEST'
            with open(manifest_name, 'w+') as fp:
                for entry in self._publish():
                    fp.write(entry)
                    fp.write('\n')

            metadata = PublishedMetadata(
                relative_path=os.path.basename(manifest_name),
                publication=self.publication,
                file=File(open(manifest_name, 'rb')))
            metadata.save()
            bar.increment()
示例#16
0
def synchronize(remote_pk, repository_pk, mirror):
    """
    Sync content from the remote repository.

    Create a new version of the repository that is synchronized with the remote.

    Args:
        remote_pk (str): The remote PK.
        repository_pk (str): The repository PK.
        mirror (bool): True for mirror mode, False for additive.

    Raises:
        ValueError: If the remote does not specify a URL to sync.

    """
    remote = FileRemote.objects.get(pk=remote_pk)
    repository = FileRepository.objects.get(pk=repository_pk)

    if not remote.url:
        raise ValueError(
            _("A remote must have a url specified to synchronize."))

    first_stage = FileFirstStage(remote)
    dv = DeclarativeVersion(first_stage, repository, mirror=mirror)
    rv = dv.create()
    if rv and mirror:
        # TODO: this is awful, we really should rewrite the DeclarativeVersion API to
        # accomodate this use case
        global metadata_files
        with FilePublication.create(rv, pass_through=True) as publication:
            (mdfile_path, relative_path) = metadata_files.pop()
            PublishedMetadata.create_from_file(
                file=File(open(mdfile_path, "rb")),
                relative_path=relative_path,
                publication=publication,
            )
            publication.manifest = relative_path
            publication.save()

        log.info(
            _("Publication: {publication} created").format(
                publication=publication.pk))

    return rv
示例#17
0
def _publish_specs(specs, relative_path, publication):
    write_specs(specs, relative_path)
    with open(relative_path, 'rb') as f_in:
        with gzip.open(relative_path + '.gz', 'wb') as f_out:
            shutil.copyfileobj(f_in, f_out)
    specs_metadata = PublishedMetadata(relative_path=relative_path,
                                       publication=publication,
                                       file=File(open(relative_path, 'rb')))
    specs_metadata.save()
    specs_metadata_gz = PublishedMetadata(relative_path=relative_path + '.gz',
                                          publication=publication,
                                          file=File(
                                              open(relative_path + '.gz',
                                                   'rb')))
    specs_metadata_gz.save()
示例#18
0
def publish_chart_content(publication):
    """
    Create published artifacts and metadata for a publication

    Args:
        publication (ChartPublication): The publication to store
    """
    entries = {}
    for content in ChartContent.objects.filter(
            pk__in=publication.repository_version.content).order_by(
                'name', '-created'):
        artifacts = content.contentartifact_set.all()
        for artifact in artifacts:
            published = PublishedArtifact(relative_path=artifact.relative_path,
                                          publication=publication,
                                          content_artifact=artifact)
            published.save()

        entry = {
            'apiVersion': 'v1',
            'created': content.created.isoformat(),
            'description': content.description,
            'digest': content.digest,
            'icon': content.icon,
            'keywords': content.keywords,
            'name': content.name,
            'urls': [artifact.relative_path for artifact in artifacts],
            'version': content.version
        }

        if content.name not in entries:
            entries[content.name] = []

        # Strip away empty keys when building metadata
        entries[content.name].append(
            {k: v
             for k, v in entry.items() if (v is not None and v != [])})

    doc = {
        'apiVersion': 'v1',
        'entries': entries,
        'generated': timezone.now().isoformat()
    }

    with open('index.yaml', 'w') as index:
        index.write(yaml.dump(doc))

    index = PublishedMetadata.create_from_file(publication=publication,
                                               file=File(
                                                   open('index.yaml', 'rb')))
    index.save()
示例#19
0
def publish(repository_version_pk):
    """
    Create a Publication based on a RepositoryVersion.

    Args:
        repository_version_pk (str): Create a publication from this repository version.
    """
    repository_version = RepositoryVersion.objects.get(
        pk=repository_version_pk)

    log.info(
        _('Publishing: repository={repo}, version={version}').format(
            repo=repository_version.repository.name,
            version=repository_version.number,
        ))

    with WorkingDirectory():
        with RpmPublication.create(repository_version) as publication:
            packages = populate(publication)

            # Prepare metadata files
            repomd_path = os.path.join(os.getcwd(), "repomd.xml")
            pri_xml_path = os.path.join(os.getcwd(), "primary.xml.gz")
            fil_xml_path = os.path.join(os.getcwd(), "filelists.xml.gz")
            oth_xml_path = os.path.join(os.getcwd(), "other.xml.gz")
            pri_db_path = os.path.join(os.getcwd(), "primary.sqlite")
            fil_db_path = os.path.join(os.getcwd(), "filelists.sqlite")
            oth_db_path = os.path.join(os.getcwd(), "other.sqlite")
            upd_xml_path = os.path.join(os.getcwd(), "updateinfo.xml.gz")

            pri_xml = cr.PrimaryXmlFile(pri_xml_path)
            fil_xml = cr.FilelistsXmlFile(fil_xml_path)
            oth_xml = cr.OtherXmlFile(oth_xml_path)
            pri_db = cr.PrimarySqlite(pri_db_path)
            fil_db = cr.FilelistsSqlite(fil_db_path)
            oth_db = cr.OtherSqlite(oth_db_path)
            upd_xml = cr.UpdateInfoXmlFile(upd_xml_path)

            pri_xml.set_num_of_pkgs(len(packages))
            fil_xml.set_num_of_pkgs(len(packages))
            oth_xml.set_num_of_pkgs(len(packages))

            # Process all packages
            for package in packages:
                pkg = package.to_createrepo_c()
                pkg.location_href = package.contentartifact_set.first(
                ).relative_path
                pri_xml.add_pkg(pkg)
                fil_xml.add_pkg(pkg)
                oth_xml.add_pkg(pkg)
                pri_db.add_pkg(pkg)
                fil_db.add_pkg(pkg)
                oth_db.add_pkg(pkg)

            # Process update records
            for update_record in UpdateRecord.objects.filter(
                    pk__in=publication.repository_version.content):
                upd_xml.add_chunk(update_record_xml(update_record))

            pri_xml.close()
            fil_xml.close()
            oth_xml.close()
            upd_xml.close()

            repomd = cr.Repomd()

            repomdrecords = (("primary", pri_xml_path,
                              pri_db), ("filelists", fil_xml_path, fil_db),
                             ("other", oth_xml_path,
                              oth_db), ("primary_db", pri_db_path, None),
                             ("filelists_db", fil_db_path,
                              None), ("other_db", oth_db_path, None),
                             ("updateinfo", upd_xml_path, None))

            sqlite_files = ("primary_db", "filelists_db", "other_db")
            for name, path, db_to_update in repomdrecords:
                record = cr.RepomdRecord(name, path)
                if name in sqlite_files:
                    record_bz = record.compress_and_fill(cr.SHA256, cr.BZ2)
                    record_bz.type = name
                    record_bz.rename_file()
                    path = record_bz.location_href.split('/')[-1]
                    repomd.set_record(record_bz)
                else:
                    record.fill(cr.SHA256)
                    if (db_to_update):
                        db_to_update.dbinfo_update(record.checksum)
                        db_to_update.close()
                    record.rename_file()
                    path = record.location_href.split('/')[-1]
                    repomd.set_record(record)
                metadata = PublishedMetadata(
                    relative_path=os.path.join(REPODATA_PATH,
                                               os.path.basename(path)),
                    publication=publication,
                    file=File(open(os.path.basename(path), 'rb')))
                metadata.save()

            with open(repomd_path, "w") as repomd_f:
                repomd_f.write(repomd.xml_dump())

            metadata = PublishedMetadata(
                relative_path=os.path.join(REPODATA_PATH,
                                           os.path.basename(repomd_path)),
                publication=publication,
                file=File(open(os.path.basename(repomd_path), 'rb')))
            metadata.save()
示例#20
0
def publish(publisher_pk, repository_version_pk):
    """
    Use provided publisher to create a Publication based on a RepositoryVersion.

    Args:
        publisher_pk (str): Use the publish settings provided by this publisher.
        repository_version_pk (str): Create a publication from this repository version.
    """
    publisher = RpmPublisher.objects.get(pk=publisher_pk)
    repository_version = RepositoryVersion.objects.get(
        pk=repository_version_pk)

    log.info(
        _('Publishing: repository={repo}, version={version}, publisher={publisher}'
          ).format(
              repo=repository_version.repository.name,
              version=repository_version.number,
              publisher=publisher.name,
          ))

    with WorkingDirectory():
        with Publication.create(repository_version, publisher) as publication:
            populate(publication)

            # Prepare metadata files
            repomd_path = os.path.join(os.getcwd(), "repomd.xml")
            pri_xml_path = os.path.join(os.getcwd(), "primary.xml.gz")
            fil_xml_path = os.path.join(os.getcwd(), "filelists.xml.gz")
            oth_xml_path = os.path.join(os.getcwd(), "other.xml.gz")
            pri_db_path = os.path.join(os.getcwd(), "primary.sqlite")
            fil_db_path = os.path.join(os.getcwd(), "filelists.sqlite")
            oth_db_path = os.path.join(os.getcwd(), "other.sqlite")
            upd_xml_path = os.path.join(os.getcwd(), "updateinfo.xml.gz")

            pri_xml = cr.PrimaryXmlFile(pri_xml_path)
            fil_xml = cr.FilelistsXmlFile(fil_xml_path)
            oth_xml = cr.OtherXmlFile(oth_xml_path)
            pri_db = cr.PrimarySqlite(pri_db_path)
            fil_db = cr.FilelistsSqlite(fil_db_path)
            oth_db = cr.OtherSqlite(oth_db_path)
            upd_xml = cr.UpdateInfoXmlFile(upd_xml_path)

            artifacts = publication.published_artifact.all()
            pri_xml.set_num_of_pkgs(len(artifacts))
            fil_xml.set_num_of_pkgs(len(artifacts))
            oth_xml.set_num_of_pkgs(len(artifacts))

            # Process all packages
            for artifact in artifacts:
                # TODO: pass attributes from db rather than use the filesystem
                pkg = cr.package_from_rpm(
                    artifact.content_artifact.artifact.file.path)
                pkg.location_href = artifact.content_artifact.relative_path
                pri_xml.add_pkg(pkg)
                fil_xml.add_pkg(pkg)
                oth_xml.add_pkg(pkg)
                pri_db.add_pkg(pkg)
                fil_db.add_pkg(pkg)
                oth_db.add_pkg(pkg)

            # Process update records
            for update_record in UpdateRecord.objects.filter(
                    pk__in=publication.repository_version.content):
                upd_xml.add_chunk(update_record_xml(update_record))

            pri_xml.close()
            fil_xml.close()
            oth_xml.close()
            upd_xml.close()

            repomd = cr.Repomd()

            repomdrecords = (("primary", pri_xml_path,
                              pri_db), ("filelists", fil_xml_path, fil_db),
                             ("other", oth_xml_path,
                              oth_db), ("primary_db", pri_db_path, None),
                             ("filelists_db", fil_db_path,
                              None), ("other_db", oth_db_path, None),
                             ("updateinfo", upd_xml_path, None))

            for name, path, db_to_update in repomdrecords:
                record = cr.RepomdRecord(name, path)
                record.fill(cr.SHA256)
                if (db_to_update):
                    db_to_update.dbinfo_update(record.checksum)
                    db_to_update.close()
                repomd.set_record(record)
                metadata = PublishedMetadata(
                    relative_path=os.path.join(REPODATA_PATH,
                                               os.path.basename(path)),
                    publication=publication,
                    file=File(open(os.path.basename(path), 'rb')))
                metadata.save()

            open(repomd_path, "w").write(repomd.xml_dump())

            metadata = PublishedMetadata(
                relative_path=os.path.join(REPODATA_PATH,
                                           os.path.basename(repomd_path)),
                publication=publication,
                file=File(open(os.path.basename(repomd_path), 'rb')))
            metadata.save()
示例#21
0
def create_repomd_xml(
    content,
    publication,
    checksum_types,
    extra_repomdrecords,
    sub_folder=None,
    metadata_signing_service=None,
):
    """
    Creates a repomd.xml file.

    Args:
        content(app.models.Content): content set
        publication(pulpcore.plugin.models.Publication): the publication
        extra_repomdrecords(list): list with data relative to repo metadata files
        sub_folder(str): name of the folder for sub repos
        metadata_signing_service (pulpcore.app.models.AsciiArmoredDetachedSigningService):
            A reference to an associated signing service.

    """
    cwd = os.getcwd()
    repodata_path = REPODATA_PATH
    has_modules = False
    has_comps = False
    package_checksum_type = checksum_types.get("package")

    if sub_folder:
        cwd = os.path.join(cwd, sub_folder)
        repodata_path = os.path.join(sub_folder, repodata_path)

    # Prepare metadata files
    repomd_path = os.path.join(cwd, "repomd.xml")
    pri_xml_path = os.path.join(cwd, "primary.xml.gz")
    fil_xml_path = os.path.join(cwd, "filelists.xml.gz")
    oth_xml_path = os.path.join(cwd, "other.xml.gz")
    upd_xml_path = os.path.join(cwd, "updateinfo.xml.gz")
    mod_yml_path = os.path.join(cwd, "modules.yaml")
    comps_xml_path = os.path.join(cwd, "comps.xml")

    pri_xml = cr.PrimaryXmlFile(pri_xml_path)
    fil_xml = cr.FilelistsXmlFile(fil_xml_path)
    oth_xml = cr.OtherXmlFile(oth_xml_path)
    upd_xml = cr.UpdateInfoXmlFile(upd_xml_path)

    if publication.sqlite_metadata:
        pri_db_path = os.path.join(cwd, "primary.sqlite")
        fil_db_path = os.path.join(cwd, "filelists.sqlite")
        oth_db_path = os.path.join(cwd, "other.sqlite")
        pri_db = cr.PrimarySqlite(pri_db_path)
        fil_db = cr.FilelistsSqlite(fil_db_path)
        oth_db = cr.OtherSqlite(oth_db_path)

    packages = Package.objects.filter(pk__in=content)
    total_packages = packages.count()

    pri_xml.set_num_of_pkgs(total_packages)
    fil_xml.set_num_of_pkgs(total_packages)
    oth_xml.set_num_of_pkgs(total_packages)

    # We want to support publishing with a different checksum type than the one built-in to the
    # package itself, so we need to get the correct checksums somehow if there is an override.
    # We must also take into consideration that if the package has not been downloaded the only
    # checksum that is available is the one built-in.
    #
    # Since this lookup goes from Package->Content->ContentArtifact->Artifact, performance is a
    # challenge. We use ContentArtifact as our starting point because it enables us to work with
    # simple foreign keys and avoid messing with the many-to-many relationship, which doesn't
    # work with select_related() and performs poorly with prefetch_related(). This is fine
    # because we know that Packages should only ever have one artifact per content.
    contentartifact_qs = (
        ContentArtifact.objects.filter(content__in=packages.only("pk")).
        select_related(
            # content__rpm_package is a bit of a hack, exploiting the way django sets up model
            # inheritance, but it works and is unlikely to break. All content artifacts being
            # accessed here have an associated Package since they originally came from the
            # Package queryset.
            "artifact",
            "content__rpm_package",
        ).only("artifact", "content__rpm_package__checksum_type",
               "content__rpm_package__pkgId"))

    pkg_to_hash = {}
    for ca in contentartifact_qs.iterator():
        pkgid = None
        if package_checksum_type:
            package_checksum_type = package_checksum_type.lower()
            pkgid = getattr(ca.artifact, package_checksum_type, None)
        if pkgid:
            pkg_to_hash[ca.content_id] = (package_checksum_type, pkgid)
        else:
            pkg_to_hash[ca.content_id] = (
                ca.content.rpm_package.checksum_type,
                ca.content.rpm_package.pkgId,
            )

    # Process all packages
    for package in packages.iterator():
        pkg = package.to_createrepo_c()

        # rewrite the checksum and checksum type with the desired ones
        (checksum, pkgId) = pkg_to_hash[package.pk]
        pkg.checksum_type = checksum
        pkg.pkgId = pkgId

        pkg_filename = os.path.basename(package.location_href)
        # this can cause an issue when two same RPM package names appears
        # a/name1.rpm b/name1.rpm
        pkg.location_href = os.path.join(PACKAGES_DIRECTORY,
                                         pkg_filename[0].lower(), pkg_filename)
        pri_xml.add_pkg(pkg)
        fil_xml.add_pkg(pkg)
        oth_xml.add_pkg(pkg)
        if publication.sqlite_metadata:
            pri_db.add_pkg(pkg)
            fil_db.add_pkg(pkg)
            oth_db.add_pkg(pkg)

    # Process update records
    for update_record in UpdateRecord.objects.filter(
            pk__in=content).iterator():
        upd_xml.add_chunk(
            cr.xml_dump_updaterecord(update_record.to_createrepo_c()))

    # Process modulemd and modulemd_defaults
    with open(mod_yml_path, "ab") as mod_yml:
        for modulemd in Modulemd.objects.filter(pk__in=content).iterator():
            mod_yml.write(modulemd._artifacts.get().file.read())
            has_modules = True
        for default in ModulemdDefaults.objects.filter(
                pk__in=content).iterator():
            mod_yml.write(default._artifacts.get().file.read())
            has_modules = True

    # Process comps
    comps = libcomps.Comps()
    for pkg_grp in PackageGroup.objects.filter(pk__in=content).iterator():
        group = pkg_grp.pkg_grp_to_libcomps()
        comps.groups.append(group)
        has_comps = True
    for pkg_cat in PackageCategory.objects.filter(pk__in=content).iterator():
        cat = pkg_cat.pkg_cat_to_libcomps()
        comps.categories.append(cat)
        has_comps = True
    for pkg_env in PackageEnvironment.objects.filter(
            pk__in=content).iterator():
        env = pkg_env.pkg_env_to_libcomps()
        comps.environments.append(env)
        has_comps = True
    for pkg_lng in PackageLangpacks.objects.filter(pk__in=content).iterator():
        comps.langpacks = dict_to_strdict(pkg_lng.matches)
        has_comps = True

    comps.toxml_f(
        comps_xml_path,
        xml_options={
            "default_explicit": True,
            "empty_groups": True,
            "uservisible_explicit": True
        },
    )

    pri_xml.close()
    fil_xml.close()
    oth_xml.close()
    upd_xml.close()

    repomd = cr.Repomd()

    if publication.sqlite_metadata:
        repomdrecords = [
            ("primary", pri_xml_path, pri_db),
            ("filelists", fil_xml_path, fil_db),
            ("other", oth_xml_path, oth_db),
            ("primary_db", pri_db_path, None),
            ("filelists_db", fil_db_path, None),
            ("other_db", oth_db_path, None),
            ("updateinfo", upd_xml_path, None),
        ]
    else:
        repomdrecords = [
            ("primary", pri_xml_path, None),
            ("filelists", fil_xml_path, None),
            ("other", oth_xml_path, None),
            ("updateinfo", upd_xml_path, None),
        ]

    if has_modules:
        repomdrecords.append(("modules", mod_yml_path, None))

    if has_comps:
        repomdrecords.append(("group", comps_xml_path, None))

    repomdrecords.extend(extra_repomdrecords)

    sqlite_files = ("primary_db", "filelists_db", "other_db")
    for name, path, db_to_update in repomdrecords:
        record = cr.RepomdRecord(name, path)
        checksum_type = get_checksum_type(name, checksum_types)
        if name in sqlite_files:
            record_bz = record.compress_and_fill(checksum_type, cr.BZ2)
            record_bz.type = name
            record_bz.rename_file()
            path = record_bz.location_href.split("/")[-1]
            repomd.set_record(record_bz)
        else:
            record.fill(checksum_type)
            if db_to_update:
                db_to_update.dbinfo_update(record.checksum)
                db_to_update.close()
            record.rename_file()
            path = record.location_href.split("/")[-1]
            repomd.set_record(record)

        if sub_folder:
            path = os.path.join(sub_folder, path)

        PublishedMetadata.create_from_file(
            relative_path=os.path.join(repodata_path, os.path.basename(path)),
            publication=publication,
            file=File(open(path, "rb")),
        )

    with open(repomd_path, "w") as repomd_f:
        repomd_f.write(repomd.xml_dump())

    if metadata_signing_service:
        signing_service = AsciiArmoredDetachedSigningService.objects.get(
            pk=metadata_signing_service.pk)
        sign_results = signing_service.sign(repomd_path)

        # publish a signed file
        PublishedMetadata.create_from_file(
            relative_path=os.path.join(repodata_path,
                                       os.path.basename(sign_results["file"])),
            publication=publication,
            file=File(open(sign_results["file"], "rb")),
        )

        # publish a detached signature
        PublishedMetadata.create_from_file(
            relative_path=os.path.join(
                repodata_path, os.path.basename(sign_results["signature"])),
            publication=publication,
            file=File(open(sign_results["signature"], "rb")),
        )

        # publish a public key required for further verification
        PublishedMetadata.create_from_file(
            relative_path=os.path.join(repodata_path,
                                       os.path.basename(sign_results["key"])),
            publication=publication,
            file=File(open(sign_results["key"], "rb")),
        )
    else:
        PublishedMetadata.create_from_file(
            relative_path=os.path.join(repodata_path,
                                       os.path.basename(repomd_path)),
            publication=publication,
            file=File(open(repomd_path, "rb")),
        )
示例#22
0
def publish(repository_version_pk):
    """
    Create a Publication based on a RepositoryVersion.

    Args:
        repository_version_pk (str): Create a publication from this repository version.
    """
    repository_version = RepositoryVersion.objects.get(pk=repository_version_pk)

    log.info(_('Publishing: repository={repo}, version={version}').format(
        repo=repository_version.repository.name,
        version=repository_version.number,
    ))

    with WorkingDirectory():
        with RpmPublication.create(repository_version) as publication:
            populate(publication)

            # Prepare metadata files
            repomd_path = os.path.join(os.getcwd(), "repomd.xml")
            pri_xml_path = os.path.join(os.getcwd(), "primary.xml.gz")
            fil_xml_path = os.path.join(os.getcwd(), "filelists.xml.gz")
            oth_xml_path = os.path.join(os.getcwd(), "other.xml.gz")
            pri_db_path = os.path.join(os.getcwd(), "primary.sqlite")
            fil_db_path = os.path.join(os.getcwd(), "filelists.sqlite")
            oth_db_path = os.path.join(os.getcwd(), "other.sqlite")
            upd_xml_path = os.path.join(os.getcwd(), "updateinfo.xml.gz")

            pri_xml = cr.PrimaryXmlFile(pri_xml_path)
            fil_xml = cr.FilelistsXmlFile(fil_xml_path)
            oth_xml = cr.OtherXmlFile(oth_xml_path)
            pri_db = cr.PrimarySqlite(pri_db_path)
            fil_db = cr.FilelistsSqlite(fil_db_path)
            oth_db = cr.OtherSqlite(oth_db_path)
            upd_xml = cr.UpdateInfoXmlFile(upd_xml_path)

            artifacts = publication.published_artifact.all()
            pri_xml.set_num_of_pkgs(len(artifacts))
            fil_xml.set_num_of_pkgs(len(artifacts))
            oth_xml.set_num_of_pkgs(len(artifacts))

            # Process all packages
            for artifact in artifacts:
                pkg = artifact.content_artifact.content.cast().to_createrepo_c()
                pkg.location_href = artifact.content_artifact.relative_path
                pri_xml.add_pkg(pkg)
                fil_xml.add_pkg(pkg)
                oth_xml.add_pkg(pkg)
                pri_db.add_pkg(pkg)
                fil_db.add_pkg(pkg)
                oth_db.add_pkg(pkg)

            # Process update records
            for update_record in UpdateRecord.objects.filter(
                    pk__in=publication.repository_version.content):
                upd_xml.add_chunk(update_record_xml(update_record))

            pri_xml.close()
            fil_xml.close()
            oth_xml.close()
            upd_xml.close()

            repomd = cr.Repomd()

            repomdrecords = (("primary", pri_xml_path, pri_db),
                             ("filelists", fil_xml_path, fil_db),
                             ("other", oth_xml_path, oth_db),
                             ("primary_db", pri_db_path, None),
                             ("filelists_db", fil_db_path, None),
                             ("other_db", oth_db_path, None),
                             ("updateinfo", upd_xml_path, None))

            sqlite_files = ("primary_db", "filelists_db", "other_db")
            for name, path, db_to_update in repomdrecords:
                record = cr.RepomdRecord(name, path)
                if name in sqlite_files:
                    record_bz = record.compress_and_fill(cr.SHA256, cr.BZ2)
                    record_bz.type = name
                    record_bz.rename_file()
                    path = record_bz.location_href.split('/')[-1]
                    repomd.set_record(record_bz)
                else:
                    record.fill(cr.SHA256)
                    if (db_to_update):
                        db_to_update.dbinfo_update(record.checksum)
                        db_to_update.close()
                    record.rename_file()
                    path = record.location_href.split('/')[-1]
                    repomd.set_record(record)
                metadata = PublishedMetadata(
                    relative_path=os.path.join(REPODATA_PATH, os.path.basename(path)),
                    publication=publication,
                    file=File(open(os.path.basename(path), 'rb'))
                )
                metadata.save()

            with open(repomd_path, "w") as repomd_f:
                repomd_f.write(repomd.xml_dump())

            metadata = PublishedMetadata(
                relative_path=os.path.join(REPODATA_PATH, os.path.basename(repomd_path)),
                publication=publication,
                file=File(open(os.path.basename(repomd_path), 'rb'))
            )
            metadata.save()
示例#23
0
def publish(publisher_pk, repository_version_pk):
    """
    Use provided publisher to create a Publication based on a RepositoryVersion.

    Args:
        publisher_pk (str): Use the publish settings provided by this publisher.
        repository_version_pk (str): Create a publication from this repository version.
    """
    publisher = RpmPublisher.objects.get(pk=publisher_pk)
    repository_version = RepositoryVersion.objects.get(pk=repository_version_pk)

    log.info(_('Publishing: repository={repo}, version={version}, publisher={publisher}').format(
        repo=repository_version.repository.name,
        version=repository_version.number,
        publisher=publisher.name,
    ))

    with WorkingDirectory():
        with Publication.create(repository_version, publisher) as publication:
            populate(publication)

            # Prepare metadata files
            repomd_path = os.path.join(os.getcwd(), "repomd.xml")
            pri_xml_path = os.path.join(os.getcwd(), "primary.xml.gz")
            fil_xml_path = os.path.join(os.getcwd(), "filelists.xml.gz")
            oth_xml_path = os.path.join(os.getcwd(), "other.xml.gz")
            pri_db_path = os.path.join(os.getcwd(), "primary.sqlite")
            fil_db_path = os.path.join(os.getcwd(), "filelists.sqlite")
            oth_db_path = os.path.join(os.getcwd(), "other.sqlite")
            upd_xml_path = os.path.join(os.getcwd(), "updateinfo.xml.gz")

            pri_xml = cr.PrimaryXmlFile(pri_xml_path)
            fil_xml = cr.FilelistsXmlFile(fil_xml_path)
            oth_xml = cr.OtherXmlFile(oth_xml_path)
            pri_db = cr.PrimarySqlite(pri_db_path)
            fil_db = cr.FilelistsSqlite(fil_db_path)
            oth_db = cr.OtherSqlite(oth_db_path)
            upd_xml = cr.UpdateInfoXmlFile(upd_xml_path)

            artifacts = publication.published_artifact.all()
            pri_xml.set_num_of_pkgs(len(artifacts))
            fil_xml.set_num_of_pkgs(len(artifacts))
            oth_xml.set_num_of_pkgs(len(artifacts))

            # Process all packages
            for artifact in artifacts:
                # TODO: pass attributes from db rather than use the filesystem
                pkg = cr.package_from_rpm(artifact.content_artifact.artifact.file.path)
                pkg.location_href = artifact.content_artifact.relative_path
                pri_xml.add_pkg(pkg)
                fil_xml.add_pkg(pkg)
                oth_xml.add_pkg(pkg)
                pri_db.add_pkg(pkg)
                fil_db.add_pkg(pkg)
                oth_db.add_pkg(pkg)

            # Process update records
            for update_record in UpdateRecord.objects.filter(
                    pk__in=publication.repository_version.content):
                upd_xml.add_chunk(update_record_xml(update_record))

            pri_xml.close()
            fil_xml.close()
            oth_xml.close()
            upd_xml.close()

            repomd = cr.Repomd()

            repomdrecords = (("primary", pri_xml_path, pri_db),
                             ("filelists", fil_xml_path, fil_db),
                             ("other", oth_xml_path, oth_db),
                             ("primary_db", pri_db_path, None),
                             ("filelists_db", fil_db_path, None),
                             ("other_db", oth_db_path, None),
                             ("updateinfo", upd_xml_path, None))

            for name, path, db_to_update in repomdrecords:
                record = cr.RepomdRecord(name, path)
                record.fill(cr.SHA256)
                if (db_to_update):
                    db_to_update.dbinfo_update(record.checksum)
                    db_to_update.close()
                    repomd.set_record(record)
                metadata = PublishedMetadata(
                    relative_path=os.path.join(REPODATA_PATH, os.path.basename(path)),
                    publication=publication,
                    file=File(open(os.path.basename(path), 'rb'))
                )
                metadata.save()

            open(repomd_path, "w").write(repomd.xml_dump())

            metadata = PublishedMetadata(
                relative_path=os.path.join(REPODATA_PATH, os.path.basename(repomd_path)),
                publication=publication,
                file=File(open(os.path.basename(repomd_path), 'rb'))
            )
            metadata.save()
示例#24
0
def publish(publisher_pk, repository_version_pk):
    """
    Use provided publisher to create a Publication based on a RepositoryVersion.

    Args:
        publisher_pk (str): Use the publish settings provided by this publisher.
        repository_version_pk (str): Create a publication from this repository version.
    """
    publisher = DebPublisher.objects.get(pk=publisher_pk)
    repository_version = RepositoryVersion.objects.get(pk=repository_version_pk)

    log.info(_('Publishing: repository={repo}, version={ver}, publisher={pub}').format(
        repo=repository_version.repository.name,
        ver=repository_version.number,
        pub=publisher.name
    ))
    with WorkingDirectory():
        with Publication.create(repository_version, publisher, pass_through=False) as publication:
            if publisher.simple:
                repository = repository_version.repository
                release = deb822.Release()
                # TODO: release['Label']
                release['Codename'] = 'default'
                release['Components'] = 'all'
                release['Architectures'] = ''
                if repository.description:
                    release['Description'] = repository.description
                release['MD5sum'] = []
                release['SHA1'] = []
                release['SHA256'] = []
                release['SHA512'] = []
                package_index_files = {}
                for package in Package.objects.filter(
                    pk__in=repository_version.content.order_by('-_created')
                ):
                    published_artifact = PublishedArtifact(
                        relative_path=package.filename(),
                        publication=publication,
                        content_artifact=package.contentartifact_set.get(),
                    )
                    published_artifact.save()
                    if package.architecture not in package_index_files:
                        package_index_path = os.path.join(
                            'dists',
                            'default',
                            'all',
                            'binary-{}'.format(package.architecture),
                            'Packages',
                        )
                        os.makedirs(os.path.dirname(
                            package_index_path), exist_ok=True)
                        package_index_files[package.architecture] = (
                            open(package_index_path, 'wb'), package_index_path)
                    package.to822('all').dump(
                        package_index_files[package.architecture][0])
                    package_index_files[package.architecture][0].write(b'\n')
                for package_index_file, package_index_path in package_index_files.values():
                    package_index_file.close()
                    gz_package_index_path = _zip_file(package_index_path)
                    _add_to_release(release, package_index_path)
                    _add_to_release(release, gz_package_index_path)

                    package_index = PublishedMetadata(
                        relative_path=package_index_path,
                        publication=publication,
                        file=File(open(package_index_path, 'rb')),
                    )
                    package_index.save()
                    gz_package_index = PublishedMetadata(
                        relative_path=gz_package_index_path,
                        publication=publication,
                        file=File(open(gz_package_index_path, 'rb')),
                    )
                    gz_package_index.save()
                release['Architectures'] = ', '.join(package_index_files.keys())
                release_path = os.path.join('dists', 'default', 'Release')
                os.makedirs(os.path.dirname(release_path), exist_ok=True)
                with open(release_path, 'wb') as release_file:
                    release.dump(release_file)
                release_metadata = PublishedMetadata(
                    relative_path=release_path,
                    publication=publication,
                    file=File(open(release_path, 'rb')),
                )
                release_metadata.save()

            if publisher.structured:
                raise NotImplementedError(
                    "Structured publishing is not yet implemented.")

    log.info(_('Publication: {publication} created').format(publication=publication.pk))
示例#25
0
def publish(publisher_pk, repository_version_pk):
    """
    Use provided publisher to create a Publication based on a RepositoryVersion.

    Args:
        publisher_pk (str): Use the publish settings provided by this publisher.
        repository_version_pk (str): Create a publication from this repository version.
    """
    publisher = DebPublisher.objects.get(pk=publisher_pk)
    repository_version = RepositoryVersion.objects.get(
        pk=repository_version_pk)

    log.info(
        _('Publishing: repository={repo}, version={ver}, publisher={pub}').
        format(repo=repository_version.repository.name,
               ver=repository_version.number,
               pub=publisher.name))
    with WorkingDirectory():
        with Publication.create(repository_version,
                                publisher,
                                pass_through=False) as publication:
            if publisher.simple:
                repository = repository_version.repository
                release = deb822.Release()
                # TODO: release['Label']
                release['Codename'] = 'default'
                release['Components'] = 'all'
                release['Architectures'] = ''
                if repository.description:
                    release['Description'] = repository.description
                release['MD5sum'] = []
                release['SHA1'] = []
                release['SHA256'] = []
                release['SHA512'] = []
                package_index_files = {}
                for package in Package.objects.filter(
                        pk__in=repository_version.content.order_by(
                            '-_created')):
                    published_artifact = PublishedArtifact(
                        relative_path=package.filename(),
                        publication=publication,
                        content_artifact=package.contentartifact_set.get(),
                    )
                    published_artifact.save()
                    if package.architecture not in package_index_files:
                        package_index_path = os.path.join(
                            'dists',
                            'default',
                            'all',
                            'binary-{}'.format(package.architecture),
                            'Packages',
                        )
                        os.makedirs(os.path.dirname(package_index_path),
                                    exist_ok=True)
                        package_index_files[package.architecture] = (open(
                            package_index_path, 'wb'), package_index_path)
                    package.to822('all').dump(
                        package_index_files[package.architecture][0])
                    package_index_files[package.architecture][0].write(b'\n')
                for package_index_file, package_index_path in package_index_files.values(
                ):
                    package_index_file.close()
                    gz_package_index_path = _zip_file(package_index_path)
                    _add_to_release(release, package_index_path)
                    _add_to_release(release, gz_package_index_path)

                    package_index = PublishedMetadata(
                        relative_path=package_index_path,
                        publication=publication,
                        file=File(open(package_index_path, 'rb')),
                    )
                    package_index.save()
                    gz_package_index = PublishedMetadata(
                        relative_path=gz_package_index_path,
                        publication=publication,
                        file=File(open(gz_package_index_path, 'rb')),
                    )
                    gz_package_index.save()
                release['Architectures'] = ', '.join(
                    package_index_files.keys())
                release_path = os.path.join('dists', 'default', 'Release')
                os.makedirs(os.path.dirname(release_path), exist_ok=True)
                with open(release_path, 'wb') as release_file:
                    release.dump(release_file)
                release_metadata = PublishedMetadata(
                    relative_path=release_path,
                    publication=publication,
                    file=File(open(release_path, 'rb')),
                )
                release_metadata.save()

            if publisher.structured:
                raise NotImplementedError(
                    "Structured publishing is not yet implemented.")

    log.info(
        _('Publication: {publication} created').format(
            publication=publication.pk))
示例#26
0
def create_repomd_xml(content, publication, extra_repomdrecords, sub_folder=None):
    """
    Creates a repomd.xml file.

    Args:
        content(app.models.Content): content set
        publication(pulpcore.plugin.models.Publication): the publication
        extra_repomdrecords(list): list with data relative to repo metadata files
        sub_folder(str): name of the folder for sub repos

    """
    cwd = os.getcwd()
    repodata_path = REPODATA_PATH
    has_modules = False
    has_comps = False

    if sub_folder:
        cwd = os.path.join(cwd, sub_folder)
        repodata_path = os.path.join(sub_folder, repodata_path)

    # Prepare metadata files
    repomd_path = os.path.join(cwd, "repomd.xml")
    pri_xml_path = os.path.join(cwd, "primary.xml.gz")
    fil_xml_path = os.path.join(cwd, "filelists.xml.gz")
    oth_xml_path = os.path.join(cwd, "other.xml.gz")
    pri_db_path = os.path.join(cwd, "primary.sqlite")
    fil_db_path = os.path.join(cwd, "filelists.sqlite")
    oth_db_path = os.path.join(cwd, "other.sqlite")
    upd_xml_path = os.path.join(cwd, "updateinfo.xml.gz")
    mod_yml_path = os.path.join(cwd, "modules.yaml")
    comps_xml_path = os.path.join(cwd, "comps.xml")

    pri_xml = cr.PrimaryXmlFile(pri_xml_path)
    fil_xml = cr.FilelistsXmlFile(fil_xml_path)
    oth_xml = cr.OtherXmlFile(oth_xml_path)
    pri_db = cr.PrimarySqlite(pri_db_path)
    fil_db = cr.FilelistsSqlite(fil_db_path)
    oth_db = cr.OtherSqlite(oth_db_path)
    upd_xml = cr.UpdateInfoXmlFile(upd_xml_path)

    packages = Package.objects.filter(pk__in=content)
    total_packages = packages.count()

    pri_xml.set_num_of_pkgs(total_packages)
    fil_xml.set_num_of_pkgs(total_packages)
    oth_xml.set_num_of_pkgs(total_packages)

    # Process all packages
    for package in packages.iterator():
        pkg = package.to_createrepo_c()
        pkg.location_href = package.contentartifact_set.only('relative_path').first().relative_path
        pri_xml.add_pkg(pkg)
        fil_xml.add_pkg(pkg)
        oth_xml.add_pkg(pkg)
        pri_db.add_pkg(pkg)
        fil_db.add_pkg(pkg)
        oth_db.add_pkg(pkg)

    # Process update records
    for update_record in UpdateRecord.objects.filter(pk__in=content).iterator():
        upd_xml.add_chunk(cr.xml_dump_updaterecord(update_record.to_createrepo_c()))

    # Process modulemd and modulemd_defaults
    with open(mod_yml_path, 'ab') as mod_yml:
        for modulemd in Modulemd.objects.filter(pk__in=content).iterator():
            mod_yml.write(modulemd._artifacts.get().file.read())
            has_modules = True
        for default in ModulemdDefaults.objects.filter(pk__in=content).iterator():
            mod_yml.write(default._artifacts.get().file.read())
            has_modules = True

    # Process comps
    comps = libcomps.Comps()
    for pkg_grp in PackageGroup.objects.filter(pk__in=content).iterator():
        group = pkg_grp.pkg_grp_to_libcomps()
        comps.groups.append(group)
        has_comps = True
    for pkg_cat in PackageCategory.objects.filter(pk__in=content).iterator():
        cat = pkg_cat.pkg_cat_to_libcomps()
        comps.categories.append(cat)
        has_comps = True
    for pkg_env in PackageEnvironment.objects.filter(pk__in=content).iterator():
        env = pkg_env.pkg_env_to_libcomps()
        comps.environments.append(env)
        has_comps = True
    for pkg_lng in PackageLangpacks.objects.filter(pk__in=content).iterator():
        comps.langpacks = dict_to_strdict(pkg_lng.matches)
        has_comps = True

    comps.toxml_f(comps_xml_path, xml_options={"default_explicit": True,
                                               "empty_groups": True,
                                               "uservisible_explicit": True})

    pri_xml.close()
    fil_xml.close()
    oth_xml.close()
    upd_xml.close()

    repomd = cr.Repomd()

    repomdrecords = [("primary", pri_xml_path, pri_db),
                     ("filelists", fil_xml_path, fil_db),
                     ("other", oth_xml_path, oth_db),
                     ("primary_db", pri_db_path, None),
                     ("filelists_db", fil_db_path, None),
                     ("other_db", oth_db_path, None),
                     ("updateinfo", upd_xml_path, None)]

    if has_modules:
        repomdrecords.append(("modules", mod_yml_path, None))

    if has_comps:
        repomdrecords.append(("group", comps_xml_path, None))

    repomdrecords.extend(extra_repomdrecords)

    sqlite_files = ("primary_db", "filelists_db", "other_db")
    for name, path, db_to_update in repomdrecords:
        record = cr.RepomdRecord(name, path)
        if name in sqlite_files:
            record_bz = record.compress_and_fill(cr.SHA256, cr.BZ2)
            record_bz.type = name
            record_bz.rename_file()
            path = record_bz.location_href.split('/')[-1]
            repomd.set_record(record_bz)
        else:
            record.fill(cr.SHA256)
            if (db_to_update):
                db_to_update.dbinfo_update(record.checksum)
                db_to_update.close()
            record.rename_file()
            path = record.location_href.split('/')[-1]
            repomd.set_record(record)

        if sub_folder:
            path = os.path.join(sub_folder, path)

        PublishedMetadata.create_from_file(
            relative_path=os.path.join(repodata_path, os.path.basename(path)),
            publication=publication,
            file=File(open(path, 'rb'))
        )

    with open(repomd_path, "w") as repomd_f:
        repomd_f.write(repomd.xml_dump())

    PublishedMetadata.create_from_file(
        relative_path=os.path.join(repodata_path, os.path.basename(repomd_path)),
        publication=publication,
        file=File(open(repomd_path, 'rb'))
    )
示例#27
0
def publish(repository_version_pk, simple=False, structured=False):
    """
    Use provided publisher to create a Publication based on a RepositoryVersion.

    Args:
        repository_version_pk (str): Create a publication from this repository version.
        simple (bool): Create a simple publication with all packages contained in default/all.
        structured (bool): Create a structured publication with releases and components.
            (Not yet implemented)

    """
    repo_version = RepositoryVersion.objects.get(pk=repository_version_pk)

    log.info(
        _("Publishing: repository={repo}, version={ver}, simple={simple}, structured={structured}"
          ).format(  # noqa
              repo=repo_version.repository.name,
              ver=repo_version.number,
              simple=simple,
              structured=structured,
          ))
    with WorkingDirectory():
        with DebPublication.create(repo_version,
                                   pass_through=False) as publication:
            publication.simple = simple
            publication.structured = structured
            if simple:
                repository = repo_version.repository
                release = deb822.Release()
                # TODO: release['Label']
                release["Codename"] = "default"
                release["Components"] = "all"
                release["Architectures"] = ""
                if repository.description:
                    release["Description"] = repository.description
                release["MD5sum"] = []
                release["SHA1"] = []
                release["SHA256"] = []
                release["SHA512"] = []
                package_index_files = {}
                for package in Package.objects.filter(
                        pk__in=repo_version.content.order_by("-pulp_created")):
                    published_artifact = PublishedArtifact(
                        relative_path=package.filename(),
                        publication=publication,
                        content_artifact=package.contentartifact_set.get(),
                    )
                    published_artifact.save()
                    if package.architecture not in package_index_files:
                        package_index_path = os.path.join(
                            "dists",
                            "default",
                            "all",
                            "binary-{}".format(package.architecture),
                            "Packages",
                        )
                        os.makedirs(os.path.dirname(package_index_path),
                                    exist_ok=True)
                        package_index_files[package.architecture] = (
                            open(package_index_path, "wb"),
                            package_index_path,
                        )
                    package_serializer = Package822Serializer(
                        package, context={"request": None})
                    package_serializer.to822("all").dump(
                        package_index_files[package.architecture][0])
                    package_index_files[package.architecture][0].write(b"\n")
                for (package_index_file,
                     package_index_path) in package_index_files.values():
                    package_index_file.close()
                    gz_package_index_path = _zip_file(package_index_path)
                    _add_to_release(release, package_index_path)
                    _add_to_release(release, gz_package_index_path)

                    package_index = PublishedMetadata.create_from_file(
                        publication=publication,
                        file=File(open(package_index_path, "rb")))
                    package_index.save()
                    gz_package_index = PublishedMetadata.create_from_file(
                        publication=publication,
                        file=File(open(gz_package_index_path, "rb")))
                    gz_package_index.save()
                release["Architectures"] = ", ".join(
                    package_index_files.keys())
                release_path = os.path.join("dists", "default", "Release")
                os.makedirs(os.path.dirname(release_path), exist_ok=True)
                with open(release_path, "wb") as release_file:
                    release.dump(release_file)
                release_metadata = PublishedMetadata.create_from_file(
                    publication=publication,
                    file=File(open(release_path, "rb")))
                release_metadata.save()

            if structured:
                raise NotImplementedError(
                    "Structured publishing is not yet implemented.")

    log.info(
        _("Publication: {publication} created").format(
            publication=publication.pk))
示例#28
0
def create_repomd_xml(content,
                      publication,
                      checksum_types,
                      extra_repomdrecords,
                      sub_folder=None,
                      metadata_signing_service=None):
    """
    Creates a repomd.xml file.

    Args:
        content(app.models.Content): content set
        publication(pulpcore.plugin.models.Publication): the publication
        extra_repomdrecords(list): list with data relative to repo metadata files
        sub_folder(str): name of the folder for sub repos
        metadata_signing_service (pulpcore.app.models.AsciiArmoredDetachedSigningService):
            A reference to an associated signing service.

    """
    cwd = os.getcwd()
    repodata_path = REPODATA_PATH
    has_modules = False
    has_comps = False
    package_checksum_type = checksum_types.get("package")

    if sub_folder:
        cwd = os.path.join(cwd, sub_folder)
        repodata_path = os.path.join(sub_folder, repodata_path)

    # Prepare metadata files
    repomd_path = os.path.join(cwd, "repomd.xml")
    pri_xml_path = os.path.join(cwd, "primary.xml.gz")
    fil_xml_path = os.path.join(cwd, "filelists.xml.gz")
    oth_xml_path = os.path.join(cwd, "other.xml.gz")
    pri_db_path = os.path.join(cwd, "primary.sqlite")
    fil_db_path = os.path.join(cwd, "filelists.sqlite")
    oth_db_path = os.path.join(cwd, "other.sqlite")
    upd_xml_path = os.path.join(cwd, "updateinfo.xml.gz")
    mod_yml_path = os.path.join(cwd, "modules.yaml")
    comps_xml_path = os.path.join(cwd, "comps.xml")

    pri_xml = cr.PrimaryXmlFile(pri_xml_path)
    fil_xml = cr.FilelistsXmlFile(fil_xml_path)
    oth_xml = cr.OtherXmlFile(oth_xml_path)
    pri_db = cr.PrimarySqlite(pri_db_path)
    fil_db = cr.FilelistsSqlite(fil_db_path)
    oth_db = cr.OtherSqlite(oth_db_path)
    upd_xml = cr.UpdateInfoXmlFile(upd_xml_path)

    packages = Package.objects.filter(pk__in=content)
    total_packages = packages.count()

    pri_xml.set_num_of_pkgs(total_packages)
    fil_xml.set_num_of_pkgs(total_packages)
    oth_xml.set_num_of_pkgs(total_packages)

    # Process all packages
    for package in packages.iterator():
        pkg = package.to_createrepo_c(package_checksum_type)
        pkg_filename = os.path.basename(package.location_href)
        # this can cause an issue when two same RPM package names appears
        # a/name1.rpm b/name1.rpm
        pkg.location_href = os.path.join(PACKAGES_DIRECTORY,
                                         pkg_filename[0].lower(), pkg_filename)
        pri_xml.add_pkg(pkg)
        fil_xml.add_pkg(pkg)
        oth_xml.add_pkg(pkg)
        pri_db.add_pkg(pkg)
        fil_db.add_pkg(pkg)
        oth_db.add_pkg(pkg)

    # Process update records
    for update_record in UpdateRecord.objects.filter(
            pk__in=content).iterator():
        upd_xml.add_chunk(
            cr.xml_dump_updaterecord(update_record.to_createrepo_c()))

    # Process modulemd and modulemd_defaults
    with open(mod_yml_path, 'ab') as mod_yml:
        for modulemd in Modulemd.objects.filter(pk__in=content).iterator():
            mod_yml.write(modulemd._artifacts.get().file.read())
            has_modules = True
        for default in ModulemdDefaults.objects.filter(
                pk__in=content).iterator():
            mod_yml.write(default._artifacts.get().file.read())
            has_modules = True

    # Process comps
    comps = libcomps.Comps()
    for pkg_grp in PackageGroup.objects.filter(pk__in=content).iterator():
        group = pkg_grp.pkg_grp_to_libcomps()
        comps.groups.append(group)
        has_comps = True
    for pkg_cat in PackageCategory.objects.filter(pk__in=content).iterator():
        cat = pkg_cat.pkg_cat_to_libcomps()
        comps.categories.append(cat)
        has_comps = True
    for pkg_env in PackageEnvironment.objects.filter(
            pk__in=content).iterator():
        env = pkg_env.pkg_env_to_libcomps()
        comps.environments.append(env)
        has_comps = True
    for pkg_lng in PackageLangpacks.objects.filter(pk__in=content).iterator():
        comps.langpacks = dict_to_strdict(pkg_lng.matches)
        has_comps = True

    comps.toxml_f(comps_xml_path,
                  xml_options={
                      "default_explicit": True,
                      "empty_groups": True,
                      "uservisible_explicit": True
                  })

    pri_xml.close()
    fil_xml.close()
    oth_xml.close()
    upd_xml.close()

    repomd = cr.Repomd()

    repomdrecords = [("primary", pri_xml_path, pri_db),
                     ("filelists", fil_xml_path, fil_db),
                     ("other", oth_xml_path, oth_db),
                     ("primary_db", pri_db_path, None),
                     ("filelists_db", fil_db_path, None),
                     ("other_db", oth_db_path, None),
                     ("updateinfo", upd_xml_path, None)]

    if has_modules:
        repomdrecords.append(("modules", mod_yml_path, None))

    if has_comps:
        repomdrecords.append(("group", comps_xml_path, None))

    repomdrecords.extend(extra_repomdrecords)

    sqlite_files = ("primary_db", "filelists_db", "other_db")
    for name, path, db_to_update in repomdrecords:
        record = cr.RepomdRecord(name, path)
        checksum_type = get_checksum_type(name, checksum_types)
        if name in sqlite_files:
            record_bz = record.compress_and_fill(checksum_type, cr.BZ2)
            record_bz.type = name
            record_bz.rename_file()
            path = record_bz.location_href.split('/')[-1]
            repomd.set_record(record_bz)
        else:
            record.fill(checksum_type)
            if (db_to_update):
                db_to_update.dbinfo_update(record.checksum)
                db_to_update.close()
            record.rename_file()
            path = record.location_href.split('/')[-1]
            repomd.set_record(record)

        if sub_folder:
            path = os.path.join(sub_folder, path)

        PublishedMetadata.create_from_file(relative_path=os.path.join(
            repodata_path, os.path.basename(path)),
                                           publication=publication,
                                           file=File(open(path, 'rb')))

    with open(repomd_path, "w") as repomd_f:
        repomd_f.write(repomd.xml_dump())

    if metadata_signing_service:
        signing_service = AsciiArmoredDetachedSigningService.objects.get(
            pk=metadata_signing_service.pk)
        sign_results = signing_service.sign(repomd_path)

        # publish a signed file
        PublishedMetadata.create_from_file(
            relative_path=os.path.join(repodata_path,
                                       os.path.basename(sign_results['file'])),
            publication=publication,
            file=File(open(sign_results['file'], 'rb')))

        # publish a detached signature
        PublishedMetadata.create_from_file(relative_path=os.path.join(
            repodata_path, os.path.basename(sign_results['signature'])),
                                           publication=publication,
                                           file=File(
                                               open(sign_results['signature'],
                                                    'rb')))

        # publish a public key required for further verification
        PublishedMetadata.create_from_file(
            relative_path=os.path.join(repodata_path,
                                       os.path.basename(sign_results['key'])),
            publication=publication,
            file=File(open(sign_results['key'], 'rb')))
    else:
        PublishedMetadata.create_from_file(relative_path=os.path.join(
            repodata_path, os.path.basename(repomd_path)),
                                           publication=publication,
                                           file=File(open(repomd_path, 'rb')))
示例#29
0
def generate_repo_metadata(
    content,
    publication,
    checksum_types,
    extra_repomdrecords,
    sub_folder=None,
    metadata_signing_service=None,
):
    """
    Creates a repomd.xml file.

    Args:
        content(app.models.Content): content set
        publication(pulpcore.plugin.models.Publication): the publication
        extra_repomdrecords(list): list with data relative to repo metadata files
        sub_folder(str): name of the folder for sub repos
        metadata_signing_service (pulpcore.app.models.AsciiArmoredDetachedSigningService):
            A reference to an associated signing service.

    """
    cwd = os.getcwd()
    repodata_path = REPODATA_PATH
    has_modules = False
    has_comps = False
    package_checksum_type = checksum_types.get("package")

    if sub_folder:
        cwd = os.path.join(cwd, sub_folder)
        repodata_path = os.path.join(sub_folder, repodata_path)

    if package_checksum_type and package_checksum_type not in settings.ALLOWED_CONTENT_CHECKSUMS:
        raise ValueError(
            "Repository contains disallowed package checksum type '{}', "
            "thus can't be published. {}".format(package_checksum_type,
                                                 ALLOWED_CHECKSUM_ERROR_MSG))

    # Prepare metadata files
    repomd_path = os.path.join(cwd, "repomd.xml")
    pri_xml_path = os.path.join(cwd, "primary.xml.gz")
    fil_xml_path = os.path.join(cwd, "filelists.xml.gz")
    oth_xml_path = os.path.join(cwd, "other.xml.gz")
    upd_xml_path = os.path.join(cwd, "updateinfo.xml.gz")
    mod_yml_path = os.path.join(cwd, "modules.yaml")
    comps_xml_path = os.path.join(cwd, "comps.xml")

    pri_xml = cr.PrimaryXmlFile(pri_xml_path)
    fil_xml = cr.FilelistsXmlFile(fil_xml_path)
    oth_xml = cr.OtherXmlFile(oth_xml_path)
    upd_xml = cr.UpdateInfoXmlFile(upd_xml_path)

    if publication.sqlite_metadata:
        pri_db_path = os.path.join(cwd, "primary.sqlite")
        fil_db_path = os.path.join(cwd, "filelists.sqlite")
        oth_db_path = os.path.join(cwd, "other.sqlite")
        pri_db = cr.PrimarySqlite(pri_db_path)
        fil_db = cr.FilelistsSqlite(fil_db_path)
        oth_db = cr.OtherSqlite(oth_db_path)

    packages = Package.objects.filter(pk__in=content)

    # We want to support publishing with a different checksum type than the one built-in to the
    # package itself, so we need to get the correct checksums somehow if there is an override.
    # We must also take into consideration that if the package has not been downloaded the only
    # checksum that is available is the one built-in.
    #
    # Since this lookup goes from Package->Content->ContentArtifact->Artifact, performance is a
    # challenge. We use ContentArtifact as our starting point because it enables us to work with
    # simple foreign keys and avoid messing with the many-to-many relationship, which doesn't
    # work with select_related() and performs poorly with prefetch_related(). This is fine
    # because we know that Packages should only ever have one artifact per content.
    contentartifact_qs = (
        ContentArtifact.objects.filter(content__in=packages.only("pk")).
        select_related(
            # content__rpm_package is a bit of a hack, exploiting the way django sets up model
            # inheritance, but it works and is unlikely to break. All content artifacts being
            # accessed here have an associated Package since they originally came from the
            # Package queryset.
            "artifact",
            "content__rpm_package",
        ).only("artifact", "content__rpm_package__checksum_type",
               "content__rpm_package__pkgId"))

    pkg_to_hash = {}
    for ca in contentartifact_qs.iterator():
        if package_checksum_type:
            package_checksum_type = package_checksum_type.lower()
            pkgid = getattr(ca.artifact, package_checksum_type, None)

        if not package_checksum_type or not pkgid:
            if ca.content.rpm_package.checksum_type not in settings.ALLOWED_CONTENT_CHECKSUMS:
                raise ValueError(
                    "Package {} as content unit {} contains forbidden checksum type '{}', "
                    "thus can't be published. {}".format(
                        ca.content.rpm_package.nevra,
                        ca.content.pk,
                        ca.content.rpm_package.checksum_type,
                        ALLOWED_CHECKSUM_ERROR_MSG,
                    ))
            package_checksum_type = ca.content.rpm_package.checksum_type
            pkgid = ca.content.rpm_package.pkgId

        pkg_to_hash[ca.content_id] = (package_checksum_type, pkgid)

    # TODO: this is meant to be a !! *temporary* !! fix for
    # https://github.com/pulp/pulp_rpm/issues/2407
    pkg_pks_to_ignore = set()
    latest_build_time_by_nevra = defaultdict(list)
    for pkg in packages.only("pk", "name", "epoch", "version", "release",
                             "arch", "time_build").iterator():
        latest_build_time_by_nevra[pkg.nevra].append((pkg.time_build, pkg.pk))
    for nevra, pkg_data in latest_build_time_by_nevra.items():
        # sort the packages by when they were built
        if len(pkg_data) > 1:
            pkg_data.sort(key=lambda p: p[0], reverse=True)
            pkg_pks_to_ignore |= set(entry[1] for entry in pkg_data[1:])
            log.warning(
                "Duplicate packages found competing for NEVRA {nevra}, selected the one with "
                "the most recent build time, excluding {others} others.".
                format(nevra=nevra, others=len(pkg_data[1:])))

    total_packages = packages.count() - len(pkg_pks_to_ignore)

    pri_xml.set_num_of_pkgs(total_packages)
    fil_xml.set_num_of_pkgs(total_packages)
    oth_xml.set_num_of_pkgs(total_packages)

    # Process all packages
    for package in packages.order_by("name", "evr").iterator():
        if package.pk in pkg_pks_to_ignore:  # Temporary!
            continue
        pkg = package.to_createrepo_c()

        # rewrite the checksum and checksum type with the desired ones
        (checksum, pkgId) = pkg_to_hash[package.pk]
        pkg.checksum_type = checksum
        pkg.pkgId = pkgId

        pkg_filename = os.path.basename(package.location_href)
        # this can cause an issue when two same RPM package names appears
        # a/name1.rpm b/name1.rpm
        pkg.location_href = os.path.join(PACKAGES_DIRECTORY,
                                         pkg_filename[0].lower(), pkg_filename)
        pri_xml.add_pkg(pkg)
        fil_xml.add_pkg(pkg)
        oth_xml.add_pkg(pkg)
        if publication.sqlite_metadata:
            pri_db.add_pkg(pkg)
            fil_db.add_pkg(pkg)
            oth_db.add_pkg(pkg)

    # Process update records
    for update_record in UpdateRecord.objects.filter(
            pk__in=content).iterator():
        upd_xml.add_chunk(
            cr.xml_dump_updaterecord(update_record.to_createrepo_c()))

    # Process modulemd, modulemd_defaults and obsoletes
    with open(mod_yml_path, "ab") as mod_yml:
        for modulemd in Modulemd.objects.filter(pk__in=content).iterator():
            mod_yml.write(modulemd.snippet.encode())
            has_modules = True
        for default in ModulemdDefaults.objects.filter(
                pk__in=content).iterator():
            mod_yml.write(default.snippet.encode())
            has_modules = True
        for obsolete in ModulemdObsolete.objects.filter(
                pk__in=content).iterator():
            mod_yml.write(obsolete.snippet.encode())
            has_modules = True

    # Process comps
    comps = libcomps.Comps()
    for pkg_grp in PackageGroup.objects.filter(pk__in=content).iterator():
        group = pkg_grp.pkg_grp_to_libcomps()
        comps.groups.append(group)
        has_comps = True
    for pkg_cat in PackageCategory.objects.filter(pk__in=content).iterator():
        cat = pkg_cat.pkg_cat_to_libcomps()
        comps.categories.append(cat)
        has_comps = True
    for pkg_env in PackageEnvironment.objects.filter(
            pk__in=content).iterator():
        env = pkg_env.pkg_env_to_libcomps()
        comps.environments.append(env)
        has_comps = True
    for pkg_lng in PackageLangpacks.objects.filter(pk__in=content).iterator():
        comps.langpacks = dict_to_strdict(pkg_lng.matches)
        has_comps = True

    comps.toxml_f(
        comps_xml_path,
        xml_options={
            "default_explicit": True,
            "empty_groups": True,
            "empty_packages": True,
            "uservisible_explicit": True,
        },
    )

    pri_xml.close()
    fil_xml.close()
    oth_xml.close()
    upd_xml.close()

    repomd = cr.Repomd()
    # If the repository is empty, use a revision of 0
    # See: https://pulp.plan.io/issues/9402
    if not content.exists():
        repomd.revision = "0"

    if publication.sqlite_metadata:
        repomdrecords = [
            ("primary", pri_xml_path, pri_db),
            ("filelists", fil_xml_path, fil_db),
            ("other", oth_xml_path, oth_db),
            ("primary_db", pri_db_path, None),
            ("filelists_db", fil_db_path, None),
            ("other_db", oth_db_path, None),
            ("updateinfo", upd_xml_path, None),
        ]
    else:
        repomdrecords = [
            ("primary", pri_xml_path, None),
            ("filelists", fil_xml_path, None),
            ("other", oth_xml_path, None),
            ("updateinfo", upd_xml_path, None),
        ]

    if has_modules:
        repomdrecords.append(("modules", mod_yml_path, None))

    if has_comps:
        repomdrecords.append(("group", comps_xml_path, None))

    repomdrecords.extend(extra_repomdrecords)

    sqlite_files = ("primary_db", "filelists_db", "other_db")

    for name, path, db_to_update in repomdrecords:
        record = cr.RepomdRecord(name, path)
        checksum_type = cr_checksum_type_from_string(
            get_checksum_type(name,
                              checksum_types,
                              default=publication.metadata_checksum_type))
        if name in sqlite_files:
            record_bz = record.compress_and_fill(checksum_type, cr.BZ2)
            record_bz.type = name
            record_bz.rename_file()
            path = record_bz.location_href.split("/")[-1]
            repomd.set_record(record_bz)
        else:
            record.fill(checksum_type)
            if db_to_update:
                db_to_update.dbinfo_update(record.checksum)
                db_to_update.close()
            record.rename_file()
            path = record.location_href.split("/")[-1]
            repomd.set_record(record)

        if sub_folder:
            path = os.path.join(sub_folder, path)

        with open(path, "rb") as repodata_fd:
            PublishedMetadata.create_from_file(
                relative_path=os.path.join(repodata_path,
                                           os.path.basename(path)),
                publication=publication,
                file=File(repodata_fd),
            )

    with open(repomd_path, "w") as repomd_f:
        repomd_f.write(repomd.xml_dump())

    if metadata_signing_service:
        signing_service = AsciiArmoredDetachedSigningService.objects.get(
            pk=metadata_signing_service)
        sign_results = signing_service.sign(repomd_path)

        # publish a signed file
        with open(sign_results["file"], "rb") as signed_file_fd:
            PublishedMetadata.create_from_file(
                relative_path=os.path.join(
                    repodata_path, os.path.basename(sign_results["file"])),
                publication=publication,
                file=File(signed_file_fd),
            )

        # publish a detached signature
        with open(sign_results["signature"], "rb") as signature_fd:
            PublishedMetadata.create_from_file(
                relative_path=os.path.join(
                    repodata_path,
                    os.path.basename(sign_results["signature"])),
                publication=publication,
                file=File(signature_fd),
            )

        # publish a public key required for further verification
        pubkey_name = "repomd.xml.key"
        with open(pubkey_name, "wb+") as f:
            f.write(signing_service.public_key.encode("utf-8"))
            f.flush()
            PublishedMetadata.create_from_file(
                relative_path=os.path.join(repodata_path, pubkey_name),
                publication=publication,
                file=File(f),
            )
    else:
        with open(repomd_path, "rb") as repomd_fd:
            PublishedMetadata.create_from_file(
                relative_path=os.path.join(repodata_path,
                                           os.path.basename(repomd_path)),
                publication=publication,
                file=File(repomd_fd),
            )
示例#30
0
def create_rempomd_xml(packages,
                       publication,
                       extra_repomdrecords,
                       sub_folder=None):
    """
    Creates a repomd.xml file.

    Args:
        packages(app.models.Package): set of packages
        publication(pulpcore.plugin.models.Publication): the publication
        extra_repomdrecords(list): list with data relative to repo metadata files
        sub_folder(str): name of the folder for sub repos

    """
    cwd = os.getcwd()
    repodata_path = REPODATA_PATH
    has_modules = False

    if sub_folder:
        cwd = os.path.join(cwd, sub_folder)
        repodata_path = os.path.join(sub_folder, repodata_path)

    # Prepare metadata files
    repomd_path = os.path.join(cwd, "repomd.xml")
    pri_xml_path = os.path.join(cwd, "primary.xml.gz")
    fil_xml_path = os.path.join(cwd, "filelists.xml.gz")
    oth_xml_path = os.path.join(cwd, "other.xml.gz")
    pri_db_path = os.path.join(cwd, "primary.sqlite")
    fil_db_path = os.path.join(cwd, "filelists.sqlite")
    oth_db_path = os.path.join(cwd, "other.sqlite")
    upd_xml_path = os.path.join(cwd, "updateinfo.xml.gz")
    mod_yml_path = os.path.join(cwd, "modules.yaml")

    pri_xml = cr.PrimaryXmlFile(pri_xml_path)
    fil_xml = cr.FilelistsXmlFile(fil_xml_path)
    oth_xml = cr.OtherXmlFile(oth_xml_path)
    pri_db = cr.PrimarySqlite(pri_db_path)
    fil_db = cr.FilelistsSqlite(fil_db_path)
    oth_db = cr.OtherSqlite(oth_db_path)
    upd_xml = cr.UpdateInfoXmlFile(upd_xml_path)

    pri_xml.set_num_of_pkgs(len(packages))
    fil_xml.set_num_of_pkgs(len(packages))
    oth_xml.set_num_of_pkgs(len(packages))

    # Process all packages
    for package in packages:
        pkg = package.to_createrepo_c()
        pkg.location_href = package.contentartifact_set.first().relative_path
        pri_xml.add_pkg(pkg)
        fil_xml.add_pkg(pkg)
        oth_xml.add_pkg(pkg)
        pri_db.add_pkg(pkg)
        fil_db.add_pkg(pkg)
        oth_db.add_pkg(pkg)

    # Process update records
    for update_record in UpdateRecord.objects.filter(
            pk__in=publication.repository_version.content):
        upd_xml.add_chunk(update_record_xml(update_record))

    # Process modulemd and modulemd_defaults
    with open(mod_yml_path, 'ab') as mod_yml:
        for modulemd in Modulemd.objects.filter(
                pk__in=publication.repository_version.content):
            mod_yml.write(modulemd._artifacts.get().file.read())
            has_modules = True
        for default in ModulemdDefaults.objects.filter(
                pk__in=publication.repository_version.content):
            mod_yml.write(default._artifacts.get().file.read())
            has_modules = True

    pri_xml.close()
    fil_xml.close()
    oth_xml.close()
    upd_xml.close()

    repomd = cr.Repomd()

    repomdrecords = [("primary", pri_xml_path, pri_db),
                     ("filelists", fil_xml_path, fil_db),
                     ("other", oth_xml_path, oth_db),
                     ("primary_db", pri_db_path, None),
                     ("filelists_db", fil_db_path, None),
                     ("other_db", oth_db_path, None),
                     ("updateinfo", upd_xml_path, None)]

    if has_modules:
        repomdrecords.append(("modules", mod_yml_path, None))

    repomdrecords.extend(extra_repomdrecords)

    sqlite_files = ("primary_db", "filelists_db", "other_db")
    for name, path, db_to_update in repomdrecords:
        record = cr.RepomdRecord(name, path)
        if name in sqlite_files:
            record_bz = record.compress_and_fill(cr.SHA256, cr.BZ2)
            record_bz.type = name
            record_bz.rename_file()
            path = record_bz.location_href.split('/')[-1]
            repomd.set_record(record_bz)
        elif name == "modules":
            record_md = record.compress_and_fill(cr.SHA256, cr.GZ)
            record_md.type = name
            record_md.rename_file()
            path = record_md.location_href.split('/')[-1]
            repomd.set_record(record_md)
        else:
            record.fill(cr.SHA256)
            if (db_to_update):
                db_to_update.dbinfo_update(record.checksum)
                db_to_update.close()
            record.rename_file()
            path = record.location_href.split('/')[-1]
            repomd.set_record(record)

        if sub_folder:
            path = os.path.join(sub_folder, path)

        PublishedMetadata.create_from_file(relative_path=os.path.join(
            repodata_path, os.path.basename(path)),
                                           publication=publication,
                                           file=File(open(path, 'rb')))

    with open(repomd_path, "w") as repomd_f:
        repomd_f.write(repomd.xml_dump())

    PublishedMetadata.create_from_file(relative_path=os.path.join(
        repodata_path, os.path.basename(repomd_path)),
                                       publication=publication,
                                       file=File(open(repomd_path, 'rb')))