Exemplo n.º 1
0
    def put(self, request, path, pk=None):
        """
        Create a blob from uploaded chunks.
        """
        _, repository = self.get_dr_push(request, path)

        digest = request.query_params["digest"]
        upload = models.Upload.objects.get(pk=pk, repository=repository)
        chunks = UploadChunk.objects.filter(upload=upload).order_by("offset")

        with NamedTemporaryFile("ab") as temp_file:
            for chunk in chunks:
                temp_file.write(chunk.file.read())
            temp_file.flush()

            uploaded_file = PulpTemporaryUploadedFile.from_file(
                File(open(temp_file.name, "rb")))

        if uploaded_file.hashers["sha256"].hexdigest() == digest[len("sha256:"
                                                                     ):]:
            try:
                artifact = Artifact.init_and_validate(uploaded_file)
                artifact.save()
            except IntegrityError:
                artifact = Artifact.objects.get(sha256=artifact.sha256)
            try:
                blob = models.Blob(digest=digest,
                                   media_type=models.MEDIA_TYPE.REGULAR_BLOB)
                blob.save()
            except IntegrityError:
                blob = models.Blob.objects.get(digest=digest)
            try:
                blob_artifact = ContentArtifact(artifact=artifact,
                                                content=blob,
                                                relative_path=digest)
                blob_artifact.save()
            except IntegrityError:
                pass

            with repository.new_version() as new_version:
                new_version.add_content(models.Blob.objects.filter(pk=blob.pk))

            upload.delete()

            return BlobResponse(blob, path, 201, request)
        else:
            raise Exception("The digest did not match")
Exemplo n.º 2
0
    def put(self, request, path, pk=None):
        """Handles creation of Uploads."""
        _, repository = self.get_dr_push(request, path)

        digest = request.query_params["digest"]
        upload = models.Upload.objects.get(pk=pk, repository=repository)

        if upload.sha256 == digest[len("sha256:"):]:
            try:
                artifact = Artifact(
                    file=upload.file.name,
                    md5=upload.md5,
                    sha1=upload.sha1,
                    sha256=upload.sha256,
                    sha384=upload.sha384,
                    sha512=upload.sha512,
                    size=upload.file.size,
                )
                artifact.save()
            except IntegrityError:
                artifact = Artifact.objects.get(sha256=artifact.sha256)
            try:
                blob = models.Blob(digest=digest,
                                   media_type=models.MEDIA_TYPE.REGULAR_BLOB)
                blob.save()
            except IntegrityError:
                blob = models.Blob.objects.get(digest=digest)
            try:
                blob_artifact = ContentArtifact(artifact=artifact,
                                                content=blob,
                                                relative_path=digest)
                blob_artifact.save()
            except IntegrityError:
                pass

            with repository.new_version() as new_version:
                new_version.add_content(models.Blob.objects.filter(pk=blob.pk))

            upload.delete()

            return BlobResponse(blob, path, 201, request)
        else:
            raise Exception("The digest did not match")
Exemplo n.º 3
0
    def put(self, request, path, pk=None):
        """
        Create a blob from uploaded chunks.
        """
        _, repository = self.get_dr_push(request, path)

        digest = request.query_params["digest"]
        # Try to see if the client came back after we told it to backoff with the ``Throttled``
        # exception. In that case we answer based on the task state, or make it backoff again.
        # This mechanism seems to work with podman but not with docker. However we let the task run
        # anyway, since all clients will look with a HEAD request before attemting to upload a blob
        # again.
        try:
            upload = models.Upload.objects.get(pk=pk, repository=repository)
        except models.Upload.DoesNotExist as e_upload:
            # Upload has been deleted => task has started or even finished
            try:
                task = Task.objects.filter(
                    name__endswith="add_and_remove",
                    reserved_resources_record__resource=f"upload:{pk}",
                ).last()
            except Task.DoesNotExist:
                # No upload and no task for it => the upload probably never existed
                # return 404
                raise e_upload

            if task.state == "completed":
                task.delete()
                blob = models.Blob.objects.get(digest=digest)
                return BlobResponse(blob, path, 201, request)
            elif task.state in ["waiting", "running"]:
                raise Throttled()
            else:
                error = task.error
                task.delete()
                raise Exception(str(error))

        chunks = UploadChunk.objects.filter(upload=upload).order_by("offset")

        with NamedTemporaryFile("ab") as temp_file:
            for chunk in chunks:
                temp_file.write(chunk.file.read())
            temp_file.flush()

            uploaded_file = PulpTemporaryUploadedFile.from_file(File(open(temp_file.name, "rb")))

        if uploaded_file.hashers["sha256"].hexdigest() == digest[len("sha256:") :]:
            try:
                artifact = Artifact.init_and_validate(uploaded_file)
                artifact.save()
            except IntegrityError:
                artifact = Artifact.objects.get(sha256=artifact.sha256)
            try:
                blob = models.Blob(digest=digest, media_type=models.MEDIA_TYPE.REGULAR_BLOB)
                blob.save()
            except IntegrityError:
                blob = models.Blob.objects.get(digest=digest)
            try:
                blob_artifact = ContentArtifact(
                    artifact=artifact, content=blob, relative_path=digest
                )
                blob_artifact.save()
            except IntegrityError:
                pass

            upload.delete()

            dispatched_task = dispatch(
                add_and_remove,
                [f"upload:{pk}", repository],
                kwargs={
                    "repository_pk": str(repository.pk),
                    "add_content_units": [str(blob.pk)],
                    "remove_content_units": [],
                },
            )

            # Wait a small amount of time
            for dummy in range(3):
                time.sleep(1)
                task = Task.objects.get(pk=dispatched_task.pk)
                if task.state == "completed":
                    task.delete()
                    return BlobResponse(blob, path, 201, request)
                elif task.state in ["waiting", "running"]:
                    continue
                else:
                    error = task.error
                    task.delete()
                    raise Exception(str(error))
            raise Throttled()
        else:
            raise Exception("The digest did not match")