def _save_manifest(self, artifact, manifest_digest, content_type, config_blob=None): manifest = models.Manifest( digest=manifest_digest, schema_version=2, media_type=content_type, config_blob=config_blob, ) try: manifest.save() except IntegrityError: manifest = models.Manifest.objects.get(digest=manifest.digest) manifest.touch() ca = ContentArtifact(artifact=artifact, content=manifest, relative_path=manifest.digest) try: ca.save() except IntegrityError: ca = ContentArtifact.objects.get(content=manifest, relative_path=manifest.digest) if not ca.artifact: ca.artifact = artifact ca.save(update_fields=["artifact"]) return manifest
def artifact(self, artifact): if self.pk: ca = ContentArtifact(artifact=artifact, content=self, relative_path="{}/{}/{}.tar.gz".format( self.role.namespace, self.role.name, self.version)) ca.save()
def put(self, request, path, pk=None): """ Responds with the actual manifest """ _, repository = self.get_dr_push(request, path) # iterate over all the layers and create chunk = request.META["wsgi.input"] artifact = self.receive_artifact(chunk) with storage.open(artifact.file.name) as artifact_file: raw_data = artifact_file.read() content_data = json.loads(raw_data) config_layer = content_data.get("config") config_blob = models.Blob.objects.get( digest=config_layer.get("digest")) manifest = models.Manifest( digest="sha256:{id}".format(id=artifact.sha256), schema_version=2, media_type=request.content_type, config_blob=config_blob, ) try: manifest.save() except IntegrityError: manifest = models.Manifest.objects.get(digest=manifest.digest) ca = ContentArtifact(artifact=artifact, content=manifest, relative_path=manifest.digest) try: ca.save() except IntegrityError: pass layers = content_data.get("layers") blobs = [] for layer in layers: blobs.append(layer.get("digest")) blobs_qs = models.Blob.objects.filter(digest__in=blobs) thru = [] for blob in blobs_qs: thru.append( models.BlobManifest(manifest=manifest, manifest_blob=blob)) models.BlobManifest.objects.bulk_create(objs=thru, ignore_conflicts=True, batch_size=1000) tag = models.Tag(name=pk, tagged_manifest=manifest) try: tag.save() except IntegrityError: pass with repository.new_version() as new_version: new_version.add_content( models.Manifest.objects.filter(digest=manifest.digest)) new_version.remove_content( models.Tag.objects.filter(name=tag.name)) new_version.add_content( models.Tag.objects.filter(name=tag.name, tagged_manifest=manifest)) return ManifestResponse(manifest, path, request, status=201)
def artifact(self, artifact): """ Set the artifact for this FileContent. """ if self.pk: ca = ContentArtifact(artifact=artifact, content=self, relative_path=self.relative_path) ca.save()
def artifact(self, artifact): """ Set the artifact for this Ansible Role version. """ if self.pk: ca = ContentArtifact( artifact=artifact, content=self, relative_path="{namespace}/{name}/{version}.tar.gz".format( namespace=self.role.namespace, name=self.role.name, version=self.version)) ca.save()
def save(self): """ Update the DB: - Create (or fetch) the Artifact. - Create (or fetch) the ContentArtifact. - Create (or update) the RemoteArtifact. """ if self._stored_model: try: with transaction.atomic(): self._stored_model.save() except IntegrityError: q = self.artifact_q() self._stored_model = Artifact.objects.get(q) try: with transaction.atomic(): content_artifact = ContentArtifact( relative_path=self.relative_path, content=self.content.stored_model, artifact=self._stored_model) content_artifact.save() except IntegrityError: content_artifact = ContentArtifact.objects.get( relative_path=self.relative_path, content=self.content.stored_model) if self._stored_model: content_artifact.artifact = self._stored_model content_artifact.save() digests = {f: getattr(self._model, f) for f in Artifact.DIGEST_FIELDS} try: with transaction.atomic(): remote_artifact = RemoteArtifact( url=self.url, remote=self.remote, content_artifact=content_artifact, size=self._model.size, **digests) remote_artifact.save() except IntegrityError: q_set = RemoteArtifact.objects.filter( remote=self.remote, content_artifact=content_artifact) q_set.update( url=self.url, size=self._model.size, **digests)
def put(self, request, path, pk=None): """ Create a blob from uploaded chunks. """ _, repository = self.get_dr_push(request, path) digest = request.query_params["digest"] upload = models.Upload.objects.get(pk=pk, repository=repository) chunks = UploadChunk.objects.filter(upload=upload).order_by("offset") with NamedTemporaryFile("ab") as temp_file: for chunk in chunks: temp_file.write(chunk.file.read()) temp_file.flush() uploaded_file = PulpTemporaryUploadedFile.from_file( File(open(temp_file.name, "rb"))) if uploaded_file.hashers["sha256"].hexdigest() == digest[len("sha256:" ):]: try: artifact = Artifact.init_and_validate(uploaded_file) artifact.save() except IntegrityError: artifact = Artifact.objects.get(sha256=artifact.sha256) try: blob = models.Blob(digest=digest, media_type=models.MEDIA_TYPE.REGULAR_BLOB) blob.save() except IntegrityError: blob = models.Blob.objects.get(digest=digest) try: blob_artifact = ContentArtifact(artifact=artifact, content=blob, relative_path=digest) blob_artifact.save() except IntegrityError: pass with repository.new_version() as new_version: new_version.add_content(models.Blob.objects.filter(pk=blob.pk)) upload.delete() return BlobResponse(blob, path, 201, request) else: raise Exception("The digest did not match")
def put(self, request, path, pk=None): """Handles creation of Uploads.""" _, repository = self.get_dr_push(request, path) digest = request.query_params["digest"] upload = models.Upload.objects.get(pk=pk, repository=repository) if upload.sha256 == digest[len("sha256:"):]: try: artifact = Artifact( file=upload.file.name, md5=upload.md5, sha1=upload.sha1, sha256=upload.sha256, sha384=upload.sha384, sha512=upload.sha512, size=upload.file.size, ) artifact.save() except IntegrityError: artifact = Artifact.objects.get(sha256=artifact.sha256) try: blob = models.Blob(digest=digest, media_type=models.MEDIA_TYPE.REGULAR_BLOB) blob.save() except IntegrityError: blob = models.Blob.objects.get(digest=digest) try: blob_artifact = ContentArtifact(artifact=artifact, content=blob, relative_path=digest) blob_artifact.save() except IntegrityError: pass with repository.new_version() as new_version: new_version.add_content(models.Blob.objects.filter(pk=blob.pk)) upload.delete() return BlobResponse(blob, path, 201, request) else: raise Exception("The digest did not match")
def create_content_artifacts(self, dc): """ Create ContentArtifacts to associate saved Content to saved Artifacts. Args: dc (class:`~pulpcore.plugin.stages.DeclarativeContent`): Object containing Content and Artifacts to relate. """ for da in dc.d_artifacts: content_artifact = ContentArtifact( content=dc.content, artifact=da.artifact, relative_path=da.relative_path ) try: content_artifact.save() except IntegrityError: content_artifact = ContentArtifact.objects.get( content=dc.content, artifact=da.artifact, relative_path=da.relative_path ) remote_artifact_data = { 'url': da.url, 'size': da.artifact.size, 'md5': da.artifact.md5, 'sha1': da.artifact.sha1, 'sha224': da.artifact.sha224, 'sha256': da.artifact.sha256, 'sha384': da.artifact.sha384, 'sha512': da.artifact.sha512, 'remote': da.remote, } new_remote_artifact = RemoteArtifact( content_artifact=content_artifact, **remote_artifact_data ) try: new_remote_artifact.save() except IntegrityError: pass
def create(self, validated_data): """ Create a Package. Overriding default create() to deal with artifact properly. Args: validated_data (dict): Data used to create the Package Returns: models.Package: The created Package """ artifact = validated_data.pop('artifact') package = Package.objects.create(**validated_data) ca = ContentArtifact(artifact=artifact, content=package, relative_path=package.filename) ca.save() return package
def put(self, request, path, pk=None): """ Responds with the actual manifest """ _, repository = self.get_dr_push(request, path) # iterate over all the layers and create chunk = request.META["wsgi.input"] artifact = self.receive_artifact(chunk) with storage.open(artifact.file.name) as artifact_file: raw_data = artifact_file.read() content_data = json.loads(raw_data) config_layer = content_data.get("config") config_blob = models.Blob.objects.get(digest=config_layer.get("digest")) manifest = models.Manifest( digest="sha256:{id}".format(id=artifact.sha256), schema_version=2, media_type=request.content_type, config_blob=config_blob, ) try: manifest.save() except IntegrityError: manifest = models.Manifest.objects.get(digest=manifest.digest) ca = ContentArtifact(artifact=artifact, content=manifest, relative_path=manifest.digest) try: ca.save() except IntegrityError: pass layers = content_data.get("layers") blobs = [] for layer in layers: blobs.append(layer.get("digest")) blobs_qs = models.Blob.objects.filter(digest__in=blobs) thru = [] for blob in blobs_qs: thru.append(models.BlobManifest(manifest=manifest, manifest_blob=blob)) models.BlobManifest.objects.bulk_create(objs=thru, ignore_conflicts=True, batch_size=1000) tag = models.Tag(name=pk, tagged_manifest=manifest) try: tag.save() except IntegrityError: tag = models.Tag.objects.get(name=tag.name, tagged_manifest=manifest) tags_to_remove = models.Tag.objects.filter( pk__in=repository.latest_version().content.all(), name=tag ).exclude(tagged_manifest=manifest) dispatched_task = dispatch( add_and_remove, [repository], kwargs={ "repository_pk": str(repository.pk), "add_content_units": [str(tag.pk), str(manifest.pk)], "remove_content_units": [str(pk) for pk in tags_to_remove.values_list("pk")], }, ) # Wait a small amount of time for dummy in range(3): time.sleep(1) task = Task.objects.get(pk=dispatched_task.pk) if task.state == "completed": task.delete() return ManifestResponse(manifest, path, request, status=201) elif task.state in ["waiting", "running"]: continue else: error = task.error task.delete() raise Exception(str(error)) raise Throttled()
def put(self, request, path, pk=None): """ Create a blob from uploaded chunks. """ _, repository = self.get_dr_push(request, path) digest = request.query_params["digest"] # Try to see if the client came back after we told it to backoff with the ``Throttled`` # exception. In that case we answer based on the task state, or make it backoff again. # This mechanism seems to work with podman but not with docker. However we let the task run # anyway, since all clients will look with a HEAD request before attemting to upload a blob # again. try: upload = models.Upload.objects.get(pk=pk, repository=repository) except models.Upload.DoesNotExist as e_upload: # Upload has been deleted => task has started or even finished try: task = Task.objects.filter( name__endswith="add_and_remove", reserved_resources_record__resource=f"upload:{pk}", ).last() except Task.DoesNotExist: # No upload and no task for it => the upload probably never existed # return 404 raise e_upload if task.state == "completed": task.delete() blob = models.Blob.objects.get(digest=digest) return BlobResponse(blob, path, 201, request) elif task.state in ["waiting", "running"]: raise Throttled() else: error = task.error task.delete() raise Exception(str(error)) chunks = UploadChunk.objects.filter(upload=upload).order_by("offset") with NamedTemporaryFile("ab") as temp_file: for chunk in chunks: temp_file.write(chunk.file.read()) temp_file.flush() uploaded_file = PulpTemporaryUploadedFile.from_file(File(open(temp_file.name, "rb"))) if uploaded_file.hashers["sha256"].hexdigest() == digest[len("sha256:") :]: try: artifact = Artifact.init_and_validate(uploaded_file) artifact.save() except IntegrityError: artifact = Artifact.objects.get(sha256=artifact.sha256) try: blob = models.Blob(digest=digest, media_type=models.MEDIA_TYPE.REGULAR_BLOB) blob.save() except IntegrityError: blob = models.Blob.objects.get(digest=digest) try: blob_artifact = ContentArtifact( artifact=artifact, content=blob, relative_path=digest ) blob_artifact.save() except IntegrityError: pass upload.delete() dispatched_task = dispatch( add_and_remove, [f"upload:{pk}", repository], kwargs={ "repository_pk": str(repository.pk), "add_content_units": [str(blob.pk)], "remove_content_units": [], }, ) # Wait a small amount of time for dummy in range(3): time.sleep(1) task = Task.objects.get(pk=dispatched_task.pk) if task.state == "completed": task.delete() return BlobResponse(blob, path, 201, request) elif task.state in ["waiting", "running"]: continue else: error = task.error task.delete() raise Exception(str(error)) raise Throttled() else: raise Exception("The digest did not match")
def artifact(self, artifact): if self.pk: ca = ContentArtifact(artifact=artifact, content=self, relative_path=self.relative_path) ca.save()