def populate(publication): """ Populate a publication. Create published artifacts and yield a Manifest Entry for each. Args: publication (pulpcore.plugin.models.Publication): A Publication to populate. Yields: Entry: Each manifest entry. """ def find_artifact(): _artifact = content_artifact.artifact if not _artifact: _artifact = RemoteArtifact.objects.filter(content_artifact=content_artifact).first() return _artifact for content in publication.repository_version.content: content_artifact = content.contentartifact_set.get() published_artifact = PublishedArtifact( relative_path=content_artifact.relative_path, publication=publication, content_artifact=content_artifact) published_artifact.save()
def _publish(publication): """ Create published artifacts and yield a Manifest Entry for each. Args: publication (pulpcore.plugin.models.Publication): The Publication being created. Yields: Entry: The manifest entry. """ def find_artifact(): _artifact = content_artifact.artifact if not _artifact: _artifact = RemoteArtifact.objects.get( content_artifact=content_artifact, importer__repository=publication.repository_version.repository) return _artifact for content in publication.repository_version.content: for content_artifact in content.contentartifact_set.all(): artifact = find_artifact() published_artifact = PublishedArtifact( relative_path=content_artifact.relative_path, publication=publication, content_artifact=content_artifact) published_artifact.save() entry = Entry(path=content_artifact.relative_path, digest=artifact.sha256, size=artifact.size) yield entry
def populate(self): """ Populate a publication. Create published artifacts for a publication. """ publication = self.publication main_content = publication.repository_version.content distribution_trees = DistributionTree.objects.filter( pk__in=publication.repository_version.content).prefetch_related( "addons", "variants", "addons__repository", "variants__repository", "contentartifact_set") for distribution_tree in distribution_trees: for content_artifact in distribution_tree.contentartifact_set.all( ): self.published_artifacts.append( PublishedArtifact( relative_path=content_artifact.relative_path, publication=publication, content_artifact=content_artifact)) for addon in distribution_tree.addons.all(): repository_version = addon.repository.latest_version() if repository_version and repository_version.content != main_content: self.sub_repos.append( (addon.addon_id, repository_version.content)) for variant in distribution_tree.variants.all(): repository_version = variant.repository.latest_version() if repository_version and repository_version.content != main_content: self.sub_repos.append( (variant.variant_id, repository_version.content)) treeinfo_file = create_treeinfo(distribution_tree) PublishedMetadata.create_from_file( publication=publication, file=File(open(treeinfo_file.name, 'rb'))) self.packages = self.get_packages(main_content) self.repomdrecords = self.prepare_metadata_files(main_content) all_packages = self.packages for name, content in self.sub_repos: os.mkdir(name) sub_repo_packages = self.get_packages(content) all_packages = all_packages | sub_repo_packages setattr(self, f"{name}_packages", sub_repo_packages) setattr(self, f"{name}_repomdrecords", self.prepare_metadata_files(content, name)) for package in all_packages.distinct(): for content_artifact in package.contentartifact_set.all(): self.published_artifacts.append( PublishedArtifact( relative_path=content_artifact.relative_path, publication=self.publication, content_artifact=content_artifact)) PublishedArtifact.objects.bulk_create(self.published_artifacts)
def populate(publication): """ Populate a publication. Create published artifacts for a publication. Args: publication (pulpcore.plugin.models.Publication): A Publication to populate. """ def find_artifact(): _artifact = content_artifact.artifact if not _artifact: _artifact = RemoteArtifact.objects.filter( content_artifact=content_artifact).first() return _artifact for package in Package.objects.filter( pk__in=publication.repository_version.content): for content_artifact in package.contentartifact_set.all(): published_artifact = PublishedArtifact( relative_path=content_artifact.relative_path, publication=publication, content_artifact=content_artifact) published_artifact.save()
def populate(publication): """ Populate a publication. Create published artifacts and yield a Universe Entry for each. Args: publication (pulpcore.plugin.models.Publication): A Publication to populate. Yields: Entry: Universe entry for each cookbook """ for content in CookbookPackageContent.objects.filter( pk__in=publication.repository_version.content).order_by( '-created'): relative_path = 'cookbook_files/{}/{}/'.format( content.name, content.version.replace('.', '_')) for content_artifact in content.contentartifact_set.all(): art_path = os.path.join(relative_path, content_artifact.relative_path) published_artifact = PublishedArtifact( relative_path=art_path, publication=publication, content_artifact=content_artifact) published_artifact.save() entry = Entry(name=content.name, version=content.version, download_url=path_template(art_path), dependencies=content.dependencies) yield entry
def populate(publication): """ Populate a publication. Create published artifacts and yield a Manifest Entry for each. Args: publication (pulpcore.plugin.models.Publication): A Publication to populate. Yields: Entry: Each manifest entry. """ def find_artifact(): _artifact = content_artifact.artifact if not _artifact: _artifact = RemoteArtifact.objects.filter(content_artifact=content_artifact).first() return _artifact paths = set() for content in FileContent.objects.filter( pk__in=publication.repository_version.content).order_by('-created'): if content.relative_path in paths: continue paths.add(content.relative_path) for content_artifact in content.contentartifact_set.all(): artifact = find_artifact() published_artifact = PublishedArtifact( relative_path=content_artifact.relative_path, publication=publication, content_artifact=content_artifact) published_artifact.save() entry = Entry( relative_path=content_artifact.relative_path, digest=artifact.sha256, size=artifact.size) yield entry
def publish_artifacts(self, content, prefix=""): """ Publish artifacts. Args: content (pulpcore.plugin.models.Content): content set. prefix (str): a relative path prefix for the published artifact """ published_artifacts = [] # Special case for Packages contentartifact_qs = ContentArtifact.objects.filter( content__in=content).filter( content__pulp_type=Package.get_pulp_type()) for content_artifact in contentartifact_qs.values( "pk", "relative_path").iterator(): relative_path = content_artifact["relative_path"] relative_path = os.path.join(prefix, PACKAGES_DIRECTORY, relative_path.lower()[0], relative_path) published_artifacts.append( PublishedArtifact( relative_path=relative_path, publication=self.publication, content_artifact_id=content_artifact["pk"], )) # Handle everything else is_treeinfo = Q(relative_path__in=["treeinfo", ".treeinfo"]) unpublishable_types = Q(content__pulp_type__in=[ RepoMetadataFile.get_pulp_type(), Modulemd.get_pulp_type(), ModulemdDefaults.get_pulp_type(), # already dealt with Package.get_pulp_type(), ]) contentartifact_qs = (ContentArtifact.objects.filter( content__in=content).exclude(unpublishable_types).exclude( is_treeinfo)) for content_artifact in contentartifact_qs.values( "pk", "relative_path").iterator(): published_artifacts.append( PublishedArtifact( relative_path=content_artifact["relative_path"], publication=self.publication, content_artifact_id=content_artifact["pk"], )) PublishedArtifact.objects.bulk_create(published_artifacts, batch_size=2000)
def publish(publisher_pk, repository_version_pk): """ Use provided publisher to create a Publication based on a RepositoryVersion. Args: publisher_pk (str): Use the publish settings provided by this publisher. repository_version_pk (str): Create a publication from this repository version. """ publisher = GemPublisher.objects.get(pk=publisher_pk) repository_version = RepositoryVersion.objects.get( pk=repository_version_pk) log.info( _('Publishing: repository=%(repository)s, version=%(version)d, publisher=%(publisher)s' ), { 'repository': repository_version.repository.name, 'version': repository_version.number, 'publisher': publisher.name, }) with WorkingDirectory(): with Publication.create(repository_version, publisher) as publication: specs = [] latest_versions = {} prerelease_specs = [] for content in GemContent.objects.filter( pk__in=publication.repository_version.content).order_by( '-created'): for content_artifact in content.contentartifact_set.all(): published_artifact = PublishedArtifact( relative_path=content_artifact.relative_path, publication=publication, content_artifact=content_artifact) published_artifact.save() if re.fullmatch(r"[0-9.]*", content.version): specs.append(Key(content.name, content.version)) old_ver = latest_versions.get(content.name) if old_ver is None or version.parse( old_ver) < version.parse(content.version): latest_versions[content.name] = content.version else: prerelease_specs.append(Key(content.name, content.version)) latest_specs = [ Key(name, version) for name, version in latest_versions.items() ] _publish_specs(specs, 'specs.4.8', publication) _publish_specs(latest_specs, 'latest_specs.4.8', publication) _publish_specs(prerelease_specs, 'prerelease_specs.4.8', publication) log.info(_('Publication: %(publication)s created'), {'publication': publication.pk})
def add_package(self, package): published_artifact = PublishedArtifact( relative_path=package.filename(self.component), publication=self.parent.publication, content_artifact=package.contentartifact_set.get(), ) published_artifact.save() package_serializer = Package822Serializer(package, context={"request": None}) package_serializer.to822(self.component).dump( self.package_index_files[package.architecture][0]) self.package_index_files[package.architecture][0].write(b"\n")
def publish_chart_content(publication): """ Create published artifacts and metadata for a publication Args: publication (ChartPublication): The publication to store """ entries = {} for content in ChartContent.objects.filter( pk__in=publication.repository_version.content).order_by( 'name', '-created'): artifacts = content.contentartifact_set.all() for artifact in artifacts: published = PublishedArtifact(relative_path=artifact.relative_path, publication=publication, content_artifact=artifact) published.save() entry = { 'apiVersion': 'v1', 'created': content.created.isoformat(), 'description': content.description, 'digest': content.digest, 'icon': content.icon, 'keywords': content.keywords, 'name': content.name, 'urls': [artifact.relative_path for artifact in artifacts], 'version': content.version } if content.name not in entries: entries[content.name] = [] # Strip away empty keys when building metadata entries[content.name].append( {k: v for k, v in entry.items() if (v is not None and v != [])}) doc = { 'apiVersion': 'v1', 'entries': entries, 'generated': timezone.now().isoformat() } with open('index.yaml', 'w') as index: index.write(yaml.dump(doc)) index = PublishedMetadata.create_from_file(publication=publication, file=File( open('index.yaml', 'rb'))) index.save()
def publish(repository_version_pk): """ Create a Publication based on a RepositoryVersion. Args: repository_version_pk (str): Create a publication from this repository version. """ repository_version = RepositoryVersion.objects.get( pk=repository_version_pk) log.info( _("Publishing: repository={repo}, version={ver}").format( repo=repository_version.repository.name, ver=repository_version.number)) with WorkingDirectory(): with GemPublication.create(repository_version) as publication: specs = [] latest_versions = {} prerelease_specs = [] for content in GemContent.objects.filter( pk__in=publication.repository_version.content).order_by( "-pulp_created"): for content_artifact in content.contentartifact_set.all(): published_artifact = PublishedArtifact( relative_path=content_artifact.relative_path, publication=publication, content_artifact=content_artifact, ) published_artifact.save() if re.fullmatch(r"[0-9.]*", content.version): specs.append(Key(content.name, content.version)) old_ver = latest_versions.get(content.name) if old_ver is None or version.parse( old_ver) < version.parse(content.version): latest_versions[content.name] = content.version else: prerelease_specs.append(Key(content.name, content.version)) latest_specs = [ Key(name, version) for name, version in latest_versions.items() ] _publish_specs(specs, "specs.4.8", publication) _publish_specs(latest_specs, "latest_specs.4.8", publication) _publish_specs(prerelease_specs, "prerelease_specs.4.8", publication) log.info( _("Publication: {publication} created").format( publication=publication.pk))
def populate(publication): """ Populate a publication. Create published artifacts for a publication. Args: publication (pulpcore.plugin.models.Publication): A Publication to populate. Returns: packages (pulp_rpm.models.Package): A list of published packages. """ packages = Package.objects.filter(pk__in=publication.repository_version.content).\ prefetch_related('contentartifact_set') published_artifacts = [] for package in packages: for content_artifact in package.contentartifact_set.all(): published_artifacts.append( PublishedArtifact(relative_path=content_artifact.relative_path, publication=publication, content_artifact=content_artifact)) PublishedArtifact.objects.bulk_create(published_artifacts) return packages
def add_package(self, package): published_artifact = PublishedArtifact( relative_path=package.filename(self.name), publication=self.parent.publication, content_artifact=package.contentartifact_set.get(), ) published_artifact.save() package_serializer = Package822Serializer(package, context={"request": None}) deb822_package = package_serializer.to822(self.name) if package.architecture == "all": for index_file in self.package_index_files.values(): deb822_package.dump(index_file[0]) index_file[0].write(b"\n") else: deb822_package.dump(self.package_index_files[package.architecture][0]) self.package_index_files[package.architecture][0].write(b"\n")
def publish_artifacts(self, content): """ Publish artifacts. Args: content (pulpcore.plugin.models.Content): content set. """ published_artifacts = [] for content_artifact in ContentArtifact.objects.filter( content__in=content.exclude(pulp_type__in=[ RepoMetadataFile.get_pulp_type(), Modulemd.get_pulp_type(), ModulemdDefaults.get_pulp_type() ]).distinct()).iterator(): relative_path = content_artifact.relative_path if content_artifact.content.pulp_type == Package.get_pulp_type(): relative_path = os.path.join(PACKAGES_DIRECTORY, relative_path.lower()[0], content_artifact.relative_path) published_artifacts.append( PublishedArtifact(relative_path=relative_path, publication=self.publication, content_artifact=content_artifact)) PublishedArtifact.objects.bulk_create(published_artifacts, batch_size=2000)
def _publish(self): """ Create published artifacts and yield the manifest entry for each. Yields: Entry: The manifest entry. """ for content in self.publication.repo_version.content(): for content_artifact in content.contentartifact_set.all(): artifact = self._find_artifact(content_artifact) published_artifact = PublishedArtifact( relative_path=content_artifact.relative_path, publication=self.publication, content_artifact=content_artifact) published_artifact.save() entry = Entry( path=content_artifact.relative_path, digest=artifact.sha256, size=artifact.size) yield entry
def populate(publication): """ Populate a publication. Create published artifacts for a publication. Args: publication (pulpcore.plugin.models.Publication): A Publication to populate. """ def find_artifact(): _artifact = content_artifact.artifact if not _artifact: _artifact = RemoteArtifact.objects.filter(content_artifact=content_artifact).first() return _artifact for package in Package.objects.filter(pk__in=publication.repository_version.content): for content_artifact in package.contentartifact_set.all(): published_artifact = PublishedArtifact( relative_path=content_artifact.relative_path, publication=publication, content_artifact=content_artifact) published_artifact.save()
def _publish(self): """ Create published artifacts and yield the string representation to be written to the PULP_MANIFEST file. Yields: String: The manifest entry for the published content """ repo_content = ExampleContent.objects.filter(repositories=self.repository) with ProgressBar(message=_("Publishing ExampleContent"), total=repo_content.count()) as bar: for content in repo_content: for content_artifact in content.contentartifact_set.all(): artifact = self._find_artifact(content_artifact) published_artifact = PublishedArtifact( relative_path=content_artifact.relative_path, publication=self.publication, content_artifact=content_artifact) published_artifact.save() entry = "{},{},{}".format(content_artifact.relative_path, artifact.sha256, artifact.size) yield entry bar.increment()
def populate(publication, progress_report=None, batch_size=BATCH_SIZE): """ Populate a publication. Create published artifacts and yield a Universe Entry for each. Args: publication (:class:`~pulp_cookbook.models.CookbookPublication`): CookbookPublication to populate. progress_report (:class:`~pulpcore.plugin.models.ProgressReport`): If given, used to report progress batch_size (int): Size of content batches to process at once Yields: Entry: Universe entry for each cookbook """ content_batches = publication.repository_version.content_batch_qs( content_qs=CookbookPackageContent.objects.all(), batch_size=batch_size, ) for content_slice_qs in content_batches: published_artifacts = [] for content in content_slice_qs.prefetch_related( "contentartifact_set"): relative_path = "cookbook_files/{}/{}/".format( content.name, content.version.replace(".", "_")) for content_artifact in content.contentartifact_set.all(): art_path = os.path.join(relative_path, content_artifact.relative_path) published_artifacts.append( PublishedArtifact( relative_path=art_path, publication=publication, content_artifact=content_artifact, )) entry = Entry( name=content.name, version=content.version, download_url=path_template(art_path), dependencies=content.dependencies, ) yield entry PublishedArtifact.objects.bulk_create(published_artifacts) if progress_report: progress_report.increase_by(len(published_artifacts))
def publish_artifacts(self, content): """ Publish artifacts. Args: content (pulpcore.plugin.models.Content): content set. """ published_artifacts = [] for content_artifact in ContentArtifact.objects.filter(content__in=content).iterator(): published_artifacts.append(PublishedArtifact( relative_path=content_artifact.relative_path, publication=self.publication, content_artifact=content_artifact) ) PublishedArtifact.objects.bulk_create(published_artifacts, batch_size=2000)
def publish(publisher_pk, repository_version_pk): """ Use provided publisher to create a Publication based on a RepositoryVersion. Args: publisher_pk (str): Use the publish settings provided by this publisher. repository_version_pk (str): Create a publication from this repository version. """ publisher = DebPublisher.objects.get(pk=publisher_pk) repository_version = RepositoryVersion.objects.get( pk=repository_version_pk) log.info( _('Publishing: repository={repo}, version={ver}, publisher={pub}'). format(repo=repository_version.repository.name, ver=repository_version.number, pub=publisher.name)) with WorkingDirectory(): with Publication.create(repository_version, publisher, pass_through=False) as publication: if publisher.simple: repository = repository_version.repository release = deb822.Release() # TODO: release['Label'] release['Codename'] = 'default' release['Components'] = 'all' release['Architectures'] = '' if repository.description: release['Description'] = repository.description release['MD5sum'] = [] release['SHA1'] = [] release['SHA256'] = [] release['SHA512'] = [] package_index_files = {} for package in Package.objects.filter( pk__in=repository_version.content.order_by( '-_created')): published_artifact = PublishedArtifact( relative_path=package.filename(), publication=publication, content_artifact=package.contentartifact_set.get(), ) published_artifact.save() if package.architecture not in package_index_files: package_index_path = os.path.join( 'dists', 'default', 'all', 'binary-{}'.format(package.architecture), 'Packages', ) os.makedirs(os.path.dirname(package_index_path), exist_ok=True) package_index_files[package.architecture] = (open( package_index_path, 'wb'), package_index_path) package.to822('all').dump( package_index_files[package.architecture][0]) package_index_files[package.architecture][0].write(b'\n') for package_index_file, package_index_path in package_index_files.values( ): package_index_file.close() gz_package_index_path = _zip_file(package_index_path) _add_to_release(release, package_index_path) _add_to_release(release, gz_package_index_path) package_index = PublishedMetadata( relative_path=package_index_path, publication=publication, file=File(open(package_index_path, 'rb')), ) package_index.save() gz_package_index = PublishedMetadata( relative_path=gz_package_index_path, publication=publication, file=File(open(gz_package_index_path, 'rb')), ) gz_package_index.save() release['Architectures'] = ', '.join( package_index_files.keys()) release_path = os.path.join('dists', 'default', 'Release') os.makedirs(os.path.dirname(release_path), exist_ok=True) with open(release_path, 'wb') as release_file: release.dump(release_file) release_metadata = PublishedMetadata( relative_path=release_path, publication=publication, file=File(open(release_path, 'rb')), ) release_metadata.save() if publisher.structured: raise NotImplementedError( "Structured publishing is not yet implemented.") log.info( _('Publication: {publication} created').format( publication=publication.pk))
def publish(repository_version_pk, simple=False, structured=False): """ Use provided publisher to create a Publication based on a RepositoryVersion. Args: repository_version_pk (str): Create a publication from this repository version. simple (bool): Create a simple publication with all packages contained in default/all. structured (bool): Create a structured publication with releases and components. (Not yet implemented) """ repo_version = RepositoryVersion.objects.get(pk=repository_version_pk) log.info( _("Publishing: repository={repo}, version={ver}, simple={simple}, structured={structured}" ).format( # noqa repo=repo_version.repository.name, ver=repo_version.number, simple=simple, structured=structured, )) with WorkingDirectory(): with DebPublication.create(repo_version, pass_through=False) as publication: publication.simple = simple publication.structured = structured if simple: repository = repo_version.repository release = deb822.Release() # TODO: release['Label'] release["Codename"] = "default" release["Components"] = "all" release["Architectures"] = "" if repository.description: release["Description"] = repository.description release["MD5sum"] = [] release["SHA1"] = [] release["SHA256"] = [] release["SHA512"] = [] package_index_files = {} for package in Package.objects.filter( pk__in=repo_version.content.order_by("-pulp_created")): published_artifact = PublishedArtifact( relative_path=package.filename(), publication=publication, content_artifact=package.contentartifact_set.get(), ) published_artifact.save() if package.architecture not in package_index_files: package_index_path = os.path.join( "dists", "default", "all", "binary-{}".format(package.architecture), "Packages", ) os.makedirs(os.path.dirname(package_index_path), exist_ok=True) package_index_files[package.architecture] = ( open(package_index_path, "wb"), package_index_path, ) package_serializer = Package822Serializer( package, context={"request": None}) package_serializer.to822("all").dump( package_index_files[package.architecture][0]) package_index_files[package.architecture][0].write(b"\n") for (package_index_file, package_index_path) in package_index_files.values(): package_index_file.close() gz_package_index_path = _zip_file(package_index_path) _add_to_release(release, package_index_path) _add_to_release(release, gz_package_index_path) package_index = PublishedMetadata.create_from_file( publication=publication, file=File(open(package_index_path, "rb"))) package_index.save() gz_package_index = PublishedMetadata.create_from_file( publication=publication, file=File(open(gz_package_index_path, "rb"))) gz_package_index.save() release["Architectures"] = ", ".join( package_index_files.keys()) release_path = os.path.join("dists", "default", "Release") os.makedirs(os.path.dirname(release_path), exist_ok=True) with open(release_path, "wb") as release_file: release.dump(release_file) release_metadata = PublishedMetadata.create_from_file( publication=publication, file=File(open(release_path, "rb"))) release_metadata.save() if structured: raise NotImplementedError( "Structured publishing is not yet implemented.") log.info( _("Publication: {publication} created").format( publication=publication.pk))
def publish_artifacts(self, content, prefix=""): """ Publish artifacts. Args: content (pulpcore.plugin.models.Content): content set. prefix (str): a relative path prefix for the published artifact """ published_artifacts = [] # Special case for Packages contentartifact_qs = ContentArtifact.objects.filter(content__in=content).filter( content__pulp_type=Package.get_pulp_type() ) paths = set() duplicated_paths = [] for content_artifact in contentartifact_qs.values("pk", "relative_path").iterator(): relative_path = content_artifact["relative_path"] relative_path = os.path.join( prefix, PACKAGES_DIRECTORY, relative_path.lower()[0], relative_path ) # # Some Suboptimal Repos have the 'same' artifact living in multiple places. # Specifically, the same NEVRA, in more than once place, **with different checksums** # (since if all that was different was location_href there would be only one # ContentArtifact in the first place). # # pulp_rpm wants to publish a 'canonical' repository-layout, under which an RPM # "name-version-release-arch" appears at "Packages/n/name-version-release-arch.rpm". # Because the assumption is that Packages don't "own" their path, only the filename # is kept as relative_path. # # In this case, we have to pick one - which is essentially what the rest of the RPM # Ecosystem does when faced with the impossible. This code takes the first-found. We # could implement something more complicated, if there are better options # (choose by last-created maybe?) # # Note that this only impacts user-created publications, which produce the "standard" # RPM layout of repo/Packages/f/foo.rpm. A publication created by mirror-sync retains # whatever layout their "upstream" repo-metadata dictates. # if relative_path in paths: duplicated_paths.append(f'{relative_path}:{content_artifact["pk"]}') continue else: paths.add(relative_path) published_artifacts.append( PublishedArtifact( relative_path=relative_path, publication=self.publication, content_artifact_id=content_artifact["pk"], ) ) if duplicated_paths: log.warning( _("Duplicate paths found at publish : {problems} ").format( problems="; ".join(duplicated_paths) ) ) # Handle everything else is_treeinfo = Q(relative_path__in=["treeinfo", ".treeinfo"]) unpublishable_types = Q( content__pulp_type__in=[ RepoMetadataFile.get_pulp_type(), Modulemd.get_pulp_type(), ModulemdDefaults.get_pulp_type(), # already dealt with Package.get_pulp_type(), ] ) contentartifact_qs = ( ContentArtifact.objects.filter(content__in=content) .exclude(unpublishable_types) .exclude(is_treeinfo) ) for content_artifact in contentartifact_qs.values("pk", "relative_path").iterator(): published_artifacts.append( PublishedArtifact( relative_path=content_artifact["relative_path"], publication=self.publication, content_artifact_id=content_artifact["pk"], ) ) PublishedArtifact.objects.bulk_create(published_artifacts, batch_size=2000)
def publish(publisher_pk, repository_version_pk): """ Use provided publisher to create a Publication based on a RepositoryVersion. Args: publisher_pk (str): Use the publish settings provided by this publisher. repository_version_pk (str): Create a publication from this repository version. """ publisher = DebPublisher.objects.get(pk=publisher_pk) repository_version = RepositoryVersion.objects.get(pk=repository_version_pk) log.info(_('Publishing: repository={repo}, version={ver}, publisher={pub}').format( repo=repository_version.repository.name, ver=repository_version.number, pub=publisher.name )) with WorkingDirectory(): with Publication.create(repository_version, publisher, pass_through=False) as publication: if publisher.simple: repository = repository_version.repository release = deb822.Release() # TODO: release['Label'] release['Codename'] = 'default' release['Components'] = 'all' release['Architectures'] = '' if repository.description: release['Description'] = repository.description release['MD5sum'] = [] release['SHA1'] = [] release['SHA256'] = [] release['SHA512'] = [] package_index_files = {} for package in Package.objects.filter( pk__in=repository_version.content.order_by('-_created') ): published_artifact = PublishedArtifact( relative_path=package.filename(), publication=publication, content_artifact=package.contentartifact_set.get(), ) published_artifact.save() if package.architecture not in package_index_files: package_index_path = os.path.join( 'dists', 'default', 'all', 'binary-{}'.format(package.architecture), 'Packages', ) os.makedirs(os.path.dirname( package_index_path), exist_ok=True) package_index_files[package.architecture] = ( open(package_index_path, 'wb'), package_index_path) package.to822('all').dump( package_index_files[package.architecture][0]) package_index_files[package.architecture][0].write(b'\n') for package_index_file, package_index_path in package_index_files.values(): package_index_file.close() gz_package_index_path = _zip_file(package_index_path) _add_to_release(release, package_index_path) _add_to_release(release, gz_package_index_path) package_index = PublishedMetadata( relative_path=package_index_path, publication=publication, file=File(open(package_index_path, 'rb')), ) package_index.save() gz_package_index = PublishedMetadata( relative_path=gz_package_index_path, publication=publication, file=File(open(gz_package_index_path, 'rb')), ) gz_package_index.save() release['Architectures'] = ', '.join(package_index_files.keys()) release_path = os.path.join('dists', 'default', 'Release') os.makedirs(os.path.dirname(release_path), exist_ok=True) with open(release_path, 'wb') as release_file: release.dump(release_file) release_metadata = PublishedMetadata( relative_path=release_path, publication=publication, file=File(open(release_path, 'rb')), ) release_metadata.save() if publisher.structured: raise NotImplementedError( "Structured publishing is not yet implemented.") log.info(_('Publication: {publication} created').format(publication=publication.pk))
def publish_artifacts(self, content, prefix=""): """ Publish artifacts. Args: content (pulpcore.plugin.models.Content): content set. prefix (str): a relative path prefix for the published artifact """ published_artifacts = [] # Special case for Packages contentartifact_qs = (ContentArtifact.objects.filter( content__in=content).filter( content__pulp_type=Package.get_pulp_type()).select_related( "content__rpm_package__time_build")) rel_path_mapping = defaultdict(list) # Some Suboptimal Repos have the 'same' artifact living in multiple places. # Specifically, the same NEVRA, in more than once place, **with different checksums** # (since if all that was different was location_href there would be only one # ContentArtifact in the first place). # # pulp_rpm wants to publish a 'canonical' repository-layout, under which an RPM # "name-version-release-arch" appears at "Packages/n/name-version-release-arch.rpm". # Because the assumption is that Packages don't "own" their path, only the filename # is kept as relative_path. # # In this case, we have to pick one - which is essentially what the rest of the RPM # Ecosystem does when faced with the impossible. This code takes the one with the # most recent build time which is the same heuristic used by Yum/DNF/Zypper. # # Note that this only impacts user-created publications, which produce the "standard" # RPM layout of repo/Packages/f/foo.rpm. A publication created by mirror-sync retains # whatever layout their "upstream" repo-metadata dictates. fields = ["pk", "relative_path", "content__rpm_package__time_build"] for content_artifact in contentartifact_qs.values(*fields).iterator(): relative_path = content_artifact["relative_path"] time_build = content_artifact["content__rpm_package__time_build"] relative_path = os.path.join(prefix, PACKAGES_DIRECTORY, relative_path.lower()[0], relative_path) rel_path_mapping[relative_path].append( (content_artifact["pk"], time_build)) for rel_path, content_artifacts in rel_path_mapping.items(): # sort the content artifacts by when the package was built if len(content_artifacts) > 1: content_artifacts.sort(key=lambda p: p[1], reverse=True) log.warning( "Duplicate packages found competing for {path}, selected the one with " "the most recent build time, excluding {others} others.". format(path=rel_path, others=len(content_artifacts[1:]))) # Only add the first one (the one with the highest build time) published_artifacts.append( PublishedArtifact( relative_path=rel_path, publication=self.publication, content_artifact_id=content_artifacts[0][0], )) # Handle everything else is_treeinfo = Q(relative_path__in=["treeinfo", ".treeinfo"]) unpublishable_types = Q(content__pulp_type__in=[ RepoMetadataFile.get_pulp_type(), Modulemd.get_pulp_type(), ModulemdDefaults.get_pulp_type(), # already dealt with Package.get_pulp_type(), ]) contentartifact_qs = (ContentArtifact.objects.filter( content__in=content).exclude(unpublishable_types).exclude( is_treeinfo)) for content_artifact in contentartifact_qs.values( "pk", "relative_path").iterator(): published_artifacts.append( PublishedArtifact( relative_path=content_artifact["relative_path"], publication=self.publication, content_artifact_id=content_artifact["pk"], )) PublishedArtifact.objects.bulk_create(published_artifacts, batch_size=2000)