def set_requires_ansible_and_manifest_and_files_json(apps, schema_editor): CollectionVersion = apps.get_model("ansible", "CollectionVersion") for collection_version in CollectionVersion.objects.all(): artifact = collection_version.contentartifact_set.get().artifact with artifact.file.open() as artifact_file, tarfile.open( fileobj=artifact_file, mode="r") as tar: runtime_metadata = get_file_obj_from_tarball(tar, "meta/runtime.yml", artifact.file.name, raise_exc=False) if runtime_metadata: try: runtime_yaml = yaml.safe_load(runtime_metadata) except YAMLError: log.warning( "CollectionVersion: {namespace}.{name}-{version} - meta/runtime.yml is invalid yaml" .format(namespace=collection_version.namespace, name=collection_version.name, version=collection_version.version)) else: collection_version.requires_ansible = runtime_yaml.get( "requires_ansible") manifest = get_file_obj_from_tarball(tar, "MANIFEST.json", artifact.file.name, raise_exc=False) if manifest: try: collection_version.manifest = json.load(manifest) except JSONDecodeError: log.warning( "CollectionVersion: {namespace}.{name}-{version} - MANIFEST.json is invalid json" .format(namespace=collection_version.namespace, name=collection_version.name, version=collection_version.version)) files = get_file_obj_from_tarball(tar, "FILES.json", artifact.file.name, raise_exc=False) if files: try: collection_version.files = json.load(files) except JSONDecodeError: log.warning( "CollectionVersion: {namespace}.{name}-{version} - FILES.json is invalid json" .format(namespace=collection_version.namespace, name=collection_version.name, version=collection_version.version)) collection_version.save()
def _extract_manifest(): cartifact = collection_version.contentartifact_set.select_related( "artifact").first() artifact_name = cartifact.artifact.file.name artifact_file = storage.open(artifact_name) with tarfile.open(fileobj=artifact_file, mode="r") as tar: manifest = get_file_obj_from_tarball(tar, "MANIFEST.json", artifact_name) return manifest.read()
async def _post_save(self, batch): """ Save a batch of CollectionVersion, Tag objects. Args: batch (list of :class:`~pulpcore.plugin.stages.DeclarativeContent`): The batch of :class:`~pulpcore.plugin.stages.DeclarativeContent` objects to be saved. """ for d_content in batch: if d_content is None: continue if not isinstance(d_content.content, CollectionVersion): continue collection_version = d_content.content docs_blob = d_content.extra_data.get("docs_blob", {}) if docs_blob: collection_version.docs_blob = docs_blob for d_artifact in d_content.d_artifacts: artifact = d_artifact.artifact with artifact.file.open() as artifact_file, tarfile.open( fileobj=artifact_file, mode="r") as tar: runtime_metadata = get_file_obj_from_tarball( tar, "meta/runtime.yml", artifact.file.name, raise_exc=False) if runtime_metadata: runtime_yaml = yaml.safe_load(runtime_metadata) collection_version.requires_ansible = runtime_yaml.get( "requires_ansible") manifest_data = json.load( get_file_obj_from_tarball(tar, "MANIFEST.json", artifact.file.name)) files_data = json.load( get_file_obj_from_tarball(tar, "FILES.json", artifact.file.name)) collection_version.manifest = manifest_data collection_version.files = files_data info = manifest_data["collection_info"] # Create the tags tags = info.pop("tags") for name in tags: tag, created = Tag.objects.get_or_create(name=name) collection_version.tags.add(tag) # Remove fields not used by this model info.pop("license_file") info.pop("readme") # Update with the additional data from the Collection for attr_name, attr_value in info.items(): if attr_value is None: continue setattr(collection_version, attr_name, attr_value) _update_highest_version(collection_version) collection_version.save()
def verify_signature_upload(data): """The task code for verifying collection signature upload.""" file = data["file"] sig_data = file.read() file.seek(0) collection = data["signed_collection"] keyring = None if (repository := data.get("repository")) and repository.keyring: keyring = repository.keyring gpg = gnupg.GPG(keyring=keyring) artifact = collection.contentartifact_set.select_related( "artifact").first().artifact.file.name artifact_file = storage.open(artifact) with tarfile.open(fileobj=artifact_file, mode="r") as tar: manifest = get_file_obj_from_tarball(tar, "MANIFEST.json", artifact_file) with open("MANIFEST.json", mode="wb") as m: m.write(manifest.read()) verified = gpg.verify_file(file, m.name) if verified.trust_level is None or verified.trust_level < verified.TRUST_FULLY: # Skip verification if repository isn't specified, or it doesn't have a keyring attached if verified.fingerprint is None or keyring is not None: raise serializers.ValidationError( _("Signature verification failed: {}").format(verified.status)) data["data"] = sig_data data["digest"] = file.hashers["sha256"].hexdigest() data["pubkey_fingerprint"] = verified.fingerprint return data
def import_collection( temp_file_pk, repository_pk=None, expected_namespace=None, expected_name=None, expected_version=None, ): """ Create a Collection from an uploaded artifact and optionally validate its expected metadata. This task provides optional validation of the `namespace`, `name`, and `version` metadata attributes. If the Artifact fails validation or parsing, the Artifact is deleted and the Collection is not created. This task performs a CollectionImport object get_or_create() to allow import messages to be logged. Args: temp_file_pk (str): The pk of the PulpTemporaryFile to create the Collection from. Keyword Args: repository_pk (str): Optional. If specified, a new RepositoryVersion will be created for the Repository and any new Collection content associated with it. expected_namespace (str): Optional. The namespace is validated against the namespace specified in the Collection's metadata. If it does not match a ImporterError is raised. expected_name (str): Optional. The name is validated against the name specified in the Collection's metadata. If it does not match a ImporterError is raised. expected_version (str): Optional. The version is validated against the version specified in the Collection's metadata. If it does not match a ImporterError is raised. Raises: ImporterError: If the `expected_namespace`, `expected_name`, or `expected_version` do not match the metadata in the tarball. """ CollectionImport.objects.get_or_create(task_id=Task.current().pulp_id) temp_file = PulpTemporaryFile.objects.get(pk=temp_file_pk) filename = CollectionFilename(expected_namespace, expected_name, expected_version) log.info(f"Processing collection from {temp_file.file.name}") user_facing_logger = logging.getLogger( "pulp_ansible.app.tasks.collection.import_collection") try: with temp_file.file.open() as artifact_file: with tarfile.open(fileobj=artifact_file, mode="r") as tar: manifest_data = json.load( get_file_obj_from_tarball(tar, "MANIFEST.json", temp_file.file.name)) files_data = json.load( get_file_obj_from_tarball(tar, "FILES.json", temp_file.file.name)) url = _get_backend_storage_url(artifact_file) artifact_file.seek(0) importer_result = process_collection(artifact_file, filename=filename, file_url=url, logger=user_facing_logger) artifact = Artifact.from_pulp_temporary_file(temp_file) importer_result["artifact_url"] = reverse("artifacts-detail", args=[artifact.pk]) collection_version = create_collection_from_importer( importer_result) collection_version.manifest = manifest_data collection_version.files = files_data collection_version.save() except ImporterError as exc: log.info(f"Collection processing was not successful: {exc}") temp_file.delete() raise except Exception as exc: user_facing_logger.error( f"Collection processing was not successful: {exc}") temp_file.delete() raise ContentArtifact.objects.create( artifact=artifact, content=collection_version, relative_path=collection_version.relative_path, ) CreatedResource.objects.create(content_object=collection_version) if repository_pk: repository = AnsibleRepository.objects.get(pk=repository_pk) content_q = CollectionVersion.objects.filter(pk=collection_version.pk) with repository.new_version() as new_version: new_version.add_content(content_q) CreatedResource.objects.create(content_object=repository)