def test_read_temp_file(self): with tempfile.NamedTemporaryFile("ab") as tf: tf.write(b"temp file test") tf.flush() temp_file = PulpTemporaryFile(file=tf.name) temp_file.save() assert b"temp file test" in temp_file.file.read()
def test_storage_location(self): if settings.DEFAULT_FILE_STORAGE != "pulpcore.app.models.storage.FileSystem": self.skipTest("Skipping test for nonlocal storage.") with tempfile.NamedTemporaryFile("ab") as tf: temp_file = PulpTemporaryFile(file=tf.name) temp_file.save() assert temp_file.file.name.startswith("tmp/files/") assert temp_file.file.file.name.startswith("/var/lib/pulp/tmp/files")
def create(self, request): """Create a content unit.""" serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) task_payload = {k: v for k, v in request.data.items()} file_content = task_payload.pop("file", None) temp_file = PulpTemporaryFile.init_and_validate(file_content) temp_file.save() shared_resources = [] repository = serializer.validated_data.get("repository") if repository: shared_resources.append(repository) app_label = self.queryset.model._meta.app_label task = dispatch( tasks.base.general_create_from_temp_file, shared_resources, args=(app_label, serializer.__class__.__name__, str(temp_file.pk)), kwargs={ "data": task_payload, "context": self.get_deferred_context(request) }, ) return OperationPostponedResponse(task, request)
def create(self, request): """Upload a comps.xml file and create Content from it.""" serializer = CompsXmlSerializer(data=request.data, context={"request": request}) serializer.is_valid(raise_exception=True) # Store TemporaryUpload as a file we can find/use from our task task_payload = {k: v for k, v in request.data.items()} file_content = task_payload.pop("file", None) temp_file = PulpTemporaryFile.init_and_validate(file_content) temp_file.save() # Lock destination-repo if we are given one so two uploads can't collide repository = serializer.validated_data.get("repository", None) repo_pk = str(repository.pk) if repository else None replace = serializer.validated_data.get("replace", False) # Kick off task to Do the Deed task = dispatch( tasks.upload_comps, exclusive_resources=[repository] if repository else [], args=([str(temp_file.pk), repo_pk, replace]), kwargs={}, ) return OperationPostponedResponse(task, request)
def create(self, request): """ Dispatch a Collection creation task. """ serializer = CollectionOneShotSerializer(data=request.data, context={"request": request}) serializer.is_valid(raise_exception=True) expected_digests = {} if serializer.validated_data["sha256"]: expected_digests["sha256"] = serializer.validated_data["sha256"] try: temp_file = PulpTemporaryFile.init_and_validate( serializer.validated_data["file"], expected_digests=expected_digests, ) except DigestValidationError: raise serializers.ValidationError( _("The provided sha256 value does not match the sha256 of the uploaded file." )) temp_file.save() async_result = self._dispatch_import_collection_task(temp_file.pk) return OperationPostponedResponse(async_result, request)
def create(self, request, path): """ Dispatch a Collection creation task. """ distro = get_object_or_404(AnsibleDistribution, base_path=path) serializer = self.get_serializer(data=request.data, context={"request": request}) serializer.is_valid(raise_exception=True) expected_digests = {} if serializer.validated_data["sha256"]: expected_digests["sha256"] = serializer.validated_data["sha256"] try: temp_file = PulpTemporaryFile.init_and_validate( serializer.validated_data["file"], expected_digests=expected_digests, ) except DigestValidationError: raise serializers.ValidationError( _("The provided sha256 value does not match the sha256 of the uploaded file." )) temp_file.save() kwargs = {} if serializer.validated_data["expected_namespace"]: kwargs["expected_namespace"] = serializer.validated_data[ "expected_namespace"] if serializer.validated_data["expected_name"]: kwargs["expected_name"] = serializer.validated_data[ "expected_name"] if serializer.validated_data["expected_version"]: kwargs["expected_version"] = serializer.validated_data[ "expected_version"] async_result = self._dispatch_import_collection_task( temp_file.pk, distro.repository, **kwargs) CollectionImport.objects.create(task_id=async_result.id) data = { "task": reverse( "collection-imports-detail", kwargs={ "path": path, "pk": async_result.id }, request=None, ) } return Response(data, status=http_status.HTTP_202_ACCEPTED)
def post(self, request, path): """ Queues a task that creates a new Collection from an uploaded artifact. """ distro = get_object_or_404(AnsibleDistribution, base_path=path) serializer = GalaxyCollectionUploadSerializer( data=request.data, context={"request": request}) serializer.is_valid(raise_exception=True) temp_file = PulpTemporaryFile.init_and_validate( serializer.validated_data["file"]) temp_file.save() async_result = self._dispatch_import_collection_task( temp_file.pk, distro.repository) return OperationPostponedResponse(async_result, request)
def validate(self, data): """Validate that we have a file or can create one.""" if "artifact" in data: raise serializers.ValidationError(_("Only 'file' may be specified.")) if "request" not in self.context: data = self.deferred_validate(data) sha256 = data["file"].hashers["sha256"].hexdigest() artifact = Artifact.objects.filter(sha256=sha256).first() if artifact: ValidationError(_("Artifact already exists")) temp_file = PulpTemporaryFile.init_and_validate(data.pop("file")) temp_file.save() data["temp_file_pk"] = str(temp_file.pk) return data
def setUp(self): with open(self.artifact_path, 'w') as f: f.write('Temp Artifact File') self.pulp_temp_file = PulpTemporaryFile.init_and_validate( self.artifact_path) self.pulp_temp_file.save() self.artifact = Artifact.from_pulp_temporary_file(self.pulp_temp_file) collection = Collection.objects.create(namespace='my_ns', name='my_name') self.collection_version = CollectionVersion.objects.create( collection=collection) self.collection_version.save() content_artifact = ContentArtifact.objects.create( artifact=self.artifact, content=self.collection_version, ) content_artifact.save()