def run(self, request, pk): """Run the migration plan.""" migration_plan = self.get_object() serializer = MigrationPlanRunSerializer( data=request.data, context={'request': request} ) serializer.is_valid(raise_exception=True) validate = serializer.validated_data.get('validate', False) dry_run = serializer.validated_data.get('dry_run', False) skip_corrupted = serializer.validated_data.get('skip_corrupted', False) if is_migration_plan_running(): raise ValidationError(_("Only one migration plan can run or be reset at a time")) result = dispatch( migrate_from_pulp2, [PULP_2TO3_MIGRATION_RESOURCE], kwargs={ 'migration_plan_pk': str(migration_plan.pk), 'validate': validate, 'dry_run': dry_run, 'skip_corrupted': skip_corrupted } ) return OperationPostponedResponse(result, request)
def create(self, request): """ Dispatch a Collection creation task. """ serializer = CollectionOneShotSerializer(data=request.data, context={'request': request}) serializer.is_valid(raise_exception=True) expected_digests = {} if serializer.validated_data['sha256']: expected_digests['sha256'] = serializer.validated_data['sha256'] try: artifact = Artifact.init_and_validate(serializer.validated_data['file'], expected_digests=expected_digests) except DigestValidationError: raise serializers.ValidationError( _("The provided sha256 value does not match the sha256 of the uploaded file.") ) artifact.save() async_result = enqueue_with_reservation( import_collection, [str(artifact.pk)], kwargs={ 'artifact_pk': artifact.pk, } ) return OperationPostponedResponse(async_result, request)
def run(self, request, pk): """Run the migration plan.""" migration_plan = self.get_object() serializer = MigrationPlanRunSerializer(data=request.data, context={"request": request}) serializer.is_valid(raise_exception=True) validate = serializer.validated_data.get("validate", False) dry_run = serializer.validated_data.get("dry_run", False) skip_corrupted = serializer.validated_data.get("skip_corrupted", False) if is_migration_plan_running(): raise ValidationError( _("Only one migration plan can run or be reset at a time")) result = dispatch( migrate_from_pulp2, exclusive_resources=[PULP_2TO3_MIGRATION_RESOURCE], kwargs={ "migration_plan_pk": str(migration_plan.pk), "validate": validate, "dry_run": dry_run, "skip_corrupted": skip_corrupted, }, ) return OperationPostponedResponse(result, request)
def create(self, request): """ Dispatches a publish task. """ serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) repository_version = serializer.validated_data.get( 'repository_version') repository = RpmRepository.objects.get( pk=repository_version.repository.pk) metadata_checksum_type = serializer.validated_data.get( 'metadata_checksum_type', "") package_checksum_type = serializer.validated_data.get( 'package_checksum_type', "") checksum_types = dict( metadata=metadata_checksum_type, package=package_checksum_type, ) result = enqueue_with_reservation( tasks.publish, [repository_version.repository], kwargs={ 'repository_version_pk': repository_version.pk, 'metadata_signing_service': repository.metadata_signing_service, 'checksum_types': checksum_types, }) return OperationPostponedResponse(result, request)
def create(self, request): """ Publishes a repository. Either the ``repository`` or the ``repository_version`` fields can be provided but not both at the same time. """ serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) repository_version = serializer.validated_data.get( 'repository_version') simple = serializer.validated_data.get('simple') structured = serializer.validated_data.get('structured') result = enqueue_with_reservation(tasks.publish, [repository_version.repository], kwargs={ 'repository_version_pk': str(repository_version.pk), 'simple': simple, 'structured': structured, }) return OperationPostponedResponse(result, request)
def sign(self, request, pk): """ Dispatches a sync task. This endpoint is in tech preview and can change at any time in the future. """ content_units = {} repository = self.get_object() serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) signing_service = serializer.validated_data["signing_service"] content = serializer.validated_data["content_units"] if "*" in content: content_units_pks = ["*"] else: for url in content: content_units[NamedModelViewSet.extract_pk(url)] = url content_units_pks = list(content_units.keys()) existing_content_units = CollectionVersion.objects.filter(pk__in=content_units_pks) self.verify_content_units(existing_content_units, content_units) result = dispatch( sign, exclusive_resources=[repository], kwargs={ "repository_href": repository.pk, "content_hrefs": content_units_pks, "signing_service_href": signing_service.pk, }, ) return OperationPostponedResponse(result, request)
def sync(self, request, pk): """ Dispatches a sync task. """ repository = self.get_object() serializer = AnsibleRepositorySyncURLSerializer( data=request.data, context={"request": request, "repository_pk": repository.pk} ) serializer.is_valid(raise_exception=True) remote = serializer.validated_data.get("remote", repository.remote) remote = remote.cast() mirror = serializer.validated_data["mirror"] sync_kwargs = { "remote_pk": remote.pk, "repository_pk": repository.pk, "mirror": mirror, } if isinstance(remote, RoleRemote): sync_func = role_sync elif isinstance(remote, CollectionRemote): sync_func = collection_sync sync_kwargs["optimize"] = serializer.validated_data["optimize"] elif isinstance(remote, GitRemote): sync_func = git_sync result = dispatch( sync_func, exclusive_resources=[repository], shared_resources=[remote], kwargs=sync_kwargs, ) return OperationPostponedResponse(result, request)
def tag(self, request, pk): """ Create a task which is responsible for creating a new tag. """ repository = self.get_object() request.data['repository'] = repository serializer = serializers.TagImageSerializer( data=request.data, context={'request': request} ) serializer.is_valid(raise_exception=True) manifest = serializer.validated_data['manifest'] tag = serializer.validated_data['tag'] result = enqueue_with_reservation( tasks.tag_image, [repository, manifest], kwargs={ 'manifest_pk': manifest.pk, 'tag': tag, 'repository_pk': repository.pk } ) return OperationPostponedResponse(result, request)
def create(self, request): """Upload a comps.xml file and create Content from it.""" serializer = CompsXmlSerializer(data=request.data, context={"request": request}) serializer.is_valid(raise_exception=True) # Store TemporaryUpload as a file we can find/use from our task task_payload = {k: v for k, v in request.data.items()} file_content = task_payload.pop("file", None) temp_file = PulpTemporaryFile.init_and_validate(file_content) temp_file.save() # Lock destination-repo if we are given one so two uploads can't collide repository = serializer.validated_data.get("repository", None) repo_pk = str(repository.pk) if repository else None replace = serializer.validated_data.get("replace", False) # Kick off task to Do the Deed task = dispatch( tasks.upload_comps, exclusive_resources=[repository] if repository else [], args=([str(temp_file.pk), repo_pk, replace]), kwargs={}, ) return OperationPostponedResponse(task, request)
def create(self, request): """Create a content unit.""" serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) task_payload = {k: v for k, v in request.data.items()} file_content = task_payload.pop("file", None) temp_file = PulpTemporaryFile.init_and_validate(file_content) temp_file.save() shared_resources = [] repository = serializer.validated_data.get("repository") if repository: shared_resources.append(repository) app_label = self.queryset.model._meta.app_label task = dispatch( tasks.base.general_create_from_temp_file, shared_resources, args=(app_label, serializer.__class__.__name__, str(temp_file.pk)), kwargs={ "data": task_payload, "context": self.get_deferred_context(request) }, ) return OperationPostponedResponse(task, request)
def sync(self, request, pk): """ Synchronizes a repository. The ``repository`` field has to be provided. """ serializer = RepositorySyncURLSerializer(data=request.data, context={ "request": request, "repository_pk": pk }) serializer.is_valid(raise_exception=True) repository = self.get_object() remote = serializer.validated_data.get("remote", repository.remote) mirror = serializer.validated_data.get("mirror", False) result = dispatch( tasks.synchronize, [repository, remote], kwargs={ "remote_pk": str(remote.pk), "repository_pk": str(repository.pk), "mirror": mirror, }, ) return OperationPostponedResponse(result, request)
def sync(self, request, pk): """ Dispatches a sync task. """ repository = self.get_object() serializer = RepositorySyncURLSerializer(data=request.data, context={ "request": request, "repository_pk": pk }) # Validate synchronously to return 400 errors. serializer.is_valid(raise_exception=True) remote = serializer.validated_data.get("remote", repository.remote) mirror = serializer.validated_data.get("mirror", True) result = dispatch( func=tasks.synchronize, exclusive_resources=[repository], shared_resources=[remote], kwargs={ "remote_pk": remote.pk, "repository_pk": repository.pk, "mirror": mirror, }, ) return OperationPostponedResponse(result, request)
def publish(self, request, pk): """ Dispatches a publish task. """ publisher = self.get_object() serializer = RepositoryPublishURLSerializer( data=request.data, context={'request': request} ) serializer.is_valid(raise_exception=True) repository_version = serializer.validated_data.get('repository_version') # Safe because version OR repository is enforced by serializer. if not repository_version: repository = serializer.validated_data.get('repository') repository_version = RepositoryVersion.latest(repository) result = enqueue_with_reservation( tasks.publish, [repository_version.repository, publisher], kwargs={ 'publisher_pk': publisher.pk, 'repository_version_pk': repository_version.pk } ) return OperationPostponedResponse(result, request)
def create(self, request): """ Dispatch a Collection creation task. """ serializer = CollectionOneShotSerializer(data=request.data, context={"request": request}) serializer.is_valid(raise_exception=True) expected_digests = {} if serializer.validated_data["sha256"]: expected_digests["sha256"] = serializer.validated_data["sha256"] try: artifact = Artifact.init_and_validate( serializer.validated_data["file"], expected_digests=expected_digests) except DigestValidationError: raise serializers.ValidationError( _("The provided sha256 value does not match the sha256 of the uploaded file." )) try: artifact.save() except IntegrityError: raise serializers.ValidationError(_("Artifact already exists.")) async_result = self._dispatch_import_collection_task(artifact.pk) return OperationPostponedResponse(async_result, request)
def sync(self, request, pk): """ Synchronizes a repository. The ``repository`` field has to be provided. """ repository = self.get_object() serializer = RepositorySyncURLSerializer( data=request.data, context={'request': request} ) # Validate synchronously to return 400 errors. serializer.is_valid(raise_exception=True) remote = serializer.validated_data.get('remote') mirror = serializer.validated_data.get('mirror') result = enqueue_with_reservation( tasks.synchronize, [repository, remote], kwargs={ 'remote_pk': remote.pk, 'repository_pk': repository.pk, 'mirror': mirror } ) return OperationPostponedResponse(result, request)
def create(self, request): """ Dispatches a publish task. """ serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) repository_version = serializer.validated_data.get("repository_version") repository = RpmRepository.objects.get(pk=repository_version.repository.pk) metadata_checksum_type = serializer.validated_data.get("metadata_checksum_type", "") package_checksum_type = serializer.validated_data.get("package_checksum_type", "") checksum_types = dict( metadata=metadata_checksum_type, package=package_checksum_type, ) gpgcheck_options = dict( gpgcheck=serializer.validated_data.get("gpgcheck"), repo_gpgcheck=serializer.validated_data.get("repo_gpgcheck"), ) sqlite_metadata = serializer.validated_data.get("sqlite_metadata", "") result = enqueue_with_reservation( tasks.publish, [repository_version.repository], kwargs={ "repository_version_pk": repository_version.pk, "metadata_signing_service": repository.metadata_signing_service, "checksum_types": checksum_types, "gpgcheck_options": gpgcheck_options, "sqlite_metadata": sqlite_metadata, }, ) return OperationPostponedResponse(result, request)
def sync(self, request, pk): """ Dispatches a sync task. """ repository = self.get_object() serializer = RpmRepositorySyncURLSerializer( data=request.data, context={"request": request, "repository_pk": pk} ) serializer.is_valid(raise_exception=True) remote = serializer.validated_data.get("remote", repository.remote) mirror = serializer.validated_data.get("mirror") skip_types = serializer.validated_data.get("skip_types") optimize = serializer.validated_data.get("optimize") if repository.retain_package_versions > 0 and mirror: raise DRFValidationError("Cannot use 'retain_package_versions' with mirror-mode sync") result = enqueue_with_reservation( tasks.synchronize, [repository, remote], kwargs={ "mirror": mirror, "remote_pk": remote.pk, "repository_pk": repository.pk, "skip_types": skip_types, "optimize": optimize, }, ) return OperationPostponedResponse(result, request)
def create(self, request): """ Publishes a repository. Either the ``repository`` or the ``repository_version`` fields can be provided but not both at the same time. """ serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) repository_version = serializer.validated_data.get( 'repository_version') publisher = serializer.validated_data.get('publisher') if publisher: publisher_pk = str(publisher.pk) else: publisher_pk = '' result = enqueue_with_reservation( tasks.publish, [repository_version.repository, publisher_pk], kwargs={ 'publisher_pk': publisher_pk, 'repository_version_pk': str(repository_version.pk) }) return OperationPostponedResponse(result, request)
def create(self, request): """Upload an RPM package.""" artifact = Artifact.init_and_validate(request.data['file']) filename = request.data['file'].name if 'repository' in request.data: serializer = OneShotUploadSerializer(data=request.data, context={'request': request}) serializer.is_valid(raise_exception=True) repository = serializer.validated_data['repository'] repository_pk = repository.pk else: repository_pk = None try: artifact.save() except IntegrityError: # if artifact already exists, let's use it artifact = Artifact.objects.get(sha256=artifact.sha256) async_result = enqueue_with_reservation(tasks.one_shot_upload, [artifact], kwargs={ 'artifact_pk': artifact.pk, 'filename': filename, 'repository_pk': repository_pk, }) return OperationPostponedResponse(async_result, request)
def create(self, request): """ Queues a task that creates a new RepositoryVersion by adding content units. """ serializer = serializers.ManifestCopySerializer(data=request.data) serializer.is_valid(raise_exception=True) source_latest = serializer.validated_data['source_repository_version'] destination = serializer.validated_data['destination_repository'] content_manifests_in_repo = source_latest.content.filter( pulp_type="docker.manifest") manifests_in_repo = models.Manifest.objects.filter( pk__in=content_manifests_in_repo, ) digests = request.data.get("digests") media_types = request.data.get("media_types") filters = {} if digests is not None: filters['digest__in'] = digests if media_types is not None: filters['media_type__in'] = media_types manifests_to_add = manifests_in_repo.filter(**filters) result = enqueue_with_reservation(tasks.recursive_add_content, [destination], kwargs={ 'repository_pk': destination.pk, 'content_units': manifests_to_add, }) return OperationPostponedResponse(result, request)
def build_image(self, request, pk): """ Create a task which is responsible for creating a new image and tag. """ repository = self.get_object() serializer = serializers.OCIBuildImageSerializer( data=request.data, context={"request": request}) serializer.is_valid(raise_exception=True) containerfile = serializer.validated_data["containerfile_artifact"] try: containerfile.save() except IntegrityError: containerfile = Artifact.objects.get(sha256=containerfile.sha256) tag = serializer.validated_data["tag"] artifacts = serializer.validated_data["artifacts"] result = enqueue_with_reservation( tasks.build_image_from_containerfile, [repository], kwargs={ "containerfile_pk": containerfile.pk, "tag": tag, "repository_pk": repository.pk, "artifacts": artifacts, }, ) return OperationPostponedResponse(result, request)
def create(self, request): """ Publishes a repository. Either the ``repository`` or the ``repository_version`` fields can be provided but not both at the same time. """ serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) repository_version = serializer.validated_data.get("repository_version") simple = serializer.validated_data.get("simple") structured = serializer.validated_data.get("structured") signing_service = serializer.validated_data.get("signing_service") result = dispatch( func=tasks.publish, shared_resources=[repository_version.repository], kwargs={ "repository_version_pk": repository_version.pk, "simple": simple, "structured": structured, "signing_service_pk": getattr(signing_service, "pk", None), }, ) return OperationPostponedResponse(result, request)
def copy_tags(self, request, pk): """ Queues a task that creates a new RepositoryVersion by adding Tags. """ names = request.data.get("names") serializer = serializers.TagCopySerializer(data=request.data) serializer.is_valid(raise_exception=True) repository = self.get_object() source_latest = serializer.validated_data["source_repository_version"] content_tags_in_repo = source_latest.content.filter( pulp_type=models.Tag.get_pulp_type()) tags_in_repo = models.Tag.objects.filter(pk__in=content_tags_in_repo) if names is None: tags_to_add = tags_in_repo else: tags_to_add = tags_in_repo.filter(name__in=names) result = enqueue_with_reservation( tasks.recursive_add_content, [repository], kwargs={ "repository_pk": repository.pk, "content_units": tags_to_add.values_list("pk", flat=True), }, ) return OperationPostponedResponse(result, request)
def copy_manifests(self, request, pk): """ Queues a task that creates a new RepositoryVersion by adding Manifests. """ serializer = serializers.ManifestCopySerializer(data=request.data) serializer.is_valid(raise_exception=True) repository = self.get_object() source_latest = serializer.validated_data["source_repository_version"] content_manifests_in_repo = source_latest.content.filter( pulp_type=models.Manifest.get_pulp_type()) manifests_in_repo = models.Manifest.objects.filter( pk__in=content_manifests_in_repo) digests = request.data.get("digests") media_types = request.data.get("media_types") filters = {} if digests is not None: filters["digest__in"] = digests if media_types is not None: filters["media_type__in"] = media_types manifests_to_add = manifests_in_repo.filter(**filters) result = enqueue_with_reservation( tasks.recursive_add_content, [repository], kwargs={ "repository_pk": repository.pk, "content_units": manifests_to_add }, ) return OperationPostponedResponse(result, request)
def remove(self, request, pk): """ Queues a task that creates a new RepositoryVersion by removing content units. """ remove_content_units = [] repository = self.get_object() serializer = serializers.RecursiveManageSerializer(data=request.data) serializer.is_valid(raise_exception=True) if "content_units" in request.data: for url in request.data["content_units"]: if url == "*": remove_content_units = [url] break content = NamedModelViewSet.get_resource(url, Content) remove_content_units.append(content.pk) result = enqueue_with_reservation( tasks.recursive_remove_content, [repository], kwargs={ "repository_pk": repository.pk, "content_units": remove_content_units }, ) return OperationPostponedResponse(result, request)
def destroy(self, request, pk, **kwargs): """ Delete a Namespace with all distributions. If a push repository is associated to any of its distributions, delete it as well. """ namespace = self.get_object() reservations = [] instance_ids = [] for distribution in namespace.container_distributions.all(): reservations.append(distribution) instance_ids.append((distribution.pk, "container", "ContainerDistributionSerializer"), ) if distribution.repository and distribution.repository.cast( ).PUSH_ENABLED: reservations.append(distribution.repository) instance_ids.append((distribution.repository.pk, "container", "ContainerPushRepositorySerializer"), ) reservations.append(namespace) instance_ids.append( (namespace.pk, "container", "ContainerNamespaceSerializer"), ) async_result = enqueue_with_reservation(tasks.general_multi_delete, reservations, args=(instance_ids, )) return OperationPostponedResponse(async_result, request)
def sync(self, request, pk): """ Dispatches a sync task. """ repository = self.get_object() serializer = RepositorySyncURLSerializer( data=request.data, context={"request": request, "repository_pk": repository.pk} ) serializer.is_valid(raise_exception=True) remote = serializer.validated_data.get("remote") remote.cast() if isinstance(remote, RoleRemote): sync_func = role_sync elif isinstance(remote, CollectionRemote): sync_func = collection_sync mirror = serializer.validated_data.get("mirror", False) result = enqueue_with_reservation( sync_func, [repository, remote], kwargs={"remote_pk": remote.pk, "repository_pk": repository.pk, "mirror": mirror}, ) return OperationPostponedResponse(result, request)
def publish(self, request, pk): publisher = self.get_object() repository = None repository_version = None if 'repository' not in request.data and 'repository_version' not in request.data: raise serializers.ValidationError("Either the 'repository' or 'repository_version' " "need to be specified.") if 'repository' in request.data and request.data['repository']: repository = self.get_resource(request.data['repository'], Repository) if 'repository_version' in request.data and request.data['repository_version']: repository_version = self.get_resource(request.data['repository_version'], RepositoryVersion) if repository and repository_version: raise serializers.ValidationError("Either the 'repository' or 'repository_version' " "can be specified - not both.") if not repository_version: repository_version = RepositoryVersion.latest(repository) result = tasks.publish.apply_async_with_reservation( [repository_version.repository, publisher], kwargs={ 'publisher_pk': str(publisher.pk), 'repository_version_pk': str(repository_version.pk) } ) return OperationPostponedResponse(result, request)
def curate(self, request, pk): synclist = get_object_or_404(models.SyncList, pk=pk) synclist_task = enqueue_with_reservation( curate_synclist_repository, resources=[synclist.repository], args=(pk, ) ) log.debug("synclist_task: %s", synclist_task) return OperationPostponedResponse(synclist_task, request)
def create(self, request): """Copy content.""" serializer = CopySerializer(data=request.data, context={"request": request}) serializer.is_valid(raise_exception=True) config = serializer.validated_data["config"] config, repos = self._process_config(config) async_result = enqueue_with_reservation(copy_content, repos, args=[config], kwargs={}) return OperationPostponedResponse(async_result, request)