Exemplo n.º 1
0
    def refresh(self, request, pk):
        """
        Refresh ACS metadata.
        """
        acs = get_object_or_404(RpmAlternateContentSource, pk=pk)
        acs_paths = AlternateContentSourcePath.objects.filter(
            alternate_content_source=pk)
        task_group = TaskGroup.objects.create(
            description=
            f"Refreshing {acs_paths.count()} alternate content source path(s)."
        )

        # Get required defaults for sync operation
        optimize = RpmRepositorySyncURLSerializer().data["optimize"]
        skip_types = RpmRepositorySyncURLSerializer().data["skip_types"]

        for acs_path in acs_paths:
            # Create or get repository for the path
            repo_data = {
                "name": f"{acs.name}--{acs_path.pk}--repository",
                "retain_repo_versions": 1,
                "user_hidden": True,
            }
            repo, created = RpmRepository.objects.get_or_create(**repo_data)
            if created:
                acs_path.repository = repo
                acs_path.save()
            acs_url = (os.path.join(acs.remote.url, acs_path.path)
                       if acs_path.path else acs.remote.url)

            # Dispatching ACS path to own task and assign it to common TaskGroup
            dispatch(
                tasks.synchronize,
                shared_resources=[acs.remote, acs],
                task_group=task_group,
                kwargs={
                    "remote_pk": str(acs.remote.pk),
                    "repository_pk": str(acs_path.repository.pk),
                    "sync_policy": SYNC_POLICIES.MIRROR_CONTENT_ONLY,
                    "skip_types": skip_types,
                    "optimize": optimize,
                    "url": acs_url,
                },
            )

        # Update TaskGroup that all child task are dispatched
        task_group.finish()

        acs.last_refreshed = now()
        acs.save()

        return TaskGroupOperationResponse(task_group, request)
Exemplo n.º 2
0
    def sign(self, request, pk):
        """
        Dispatches a sync task.

        This endpoint is in tech preview and can change at any time in the future.
        """
        content_units = {}

        repository = self.get_object()
        serializer = self.get_serializer(data=request.data)
        serializer.is_valid(raise_exception=True)

        signing_service = serializer.validated_data["signing_service"]
        content = serializer.validated_data["content_units"]

        if "*" in content:
            content_units_pks = ["*"]
        else:
            for url in content:
                content_units[NamedModelViewSet.extract_pk(url)] = url
            content_units_pks = list(content_units.keys())
            existing_content_units = CollectionVersion.objects.filter(pk__in=content_units_pks)
            self.verify_content_units(existing_content_units, content_units)

        result = dispatch(
            sign,
            exclusive_resources=[repository],
            kwargs={
                "repository_href": repository.pk,
                "content_hrefs": content_units_pks,
                "signing_service_href": signing_service.pk,
            },
        )
        return OperationPostponedResponse(result, request)
Exemplo n.º 3
0
    def sync(self, request, pk):
        """
        Synchronizes a repository.

        The ``repository`` field has to be provided.
        """
        serializer = RepositorySyncURLSerializer(data=request.data,
                                                 context={
                                                     "request": request,
                                                     "repository_pk": pk
                                                 })
        serializer.is_valid(raise_exception=True)

        repository = self.get_object()
        remote = serializer.validated_data.get("remote", repository.remote)

        mirror = serializer.validated_data.get("mirror", False)
        result = dispatch(
            tasks.synchronize,
            [repository, remote],
            kwargs={
                "remote_pk": str(remote.pk),
                "repository_pk": str(repository.pk),
                "mirror": mirror,
            },
        )
        return OperationPostponedResponse(result, request)
Exemplo n.º 4
0
    def build_image(self, request, pk):
        """
        Create a task which is responsible for creating a new image and tag.
        """
        repository = self.get_object()

        serializer = serializers.OCIBuildImageSerializer(
            data=request.data, context={"request": request})

        serializer.is_valid(raise_exception=True)

        containerfile = serializer.validated_data["containerfile_artifact"]
        try:
            containerfile.save()
        except IntegrityError:
            containerfile = Artifact.objects.get(sha256=containerfile.sha256)
        tag = serializer.validated_data["tag"]

        artifacts = serializer.validated_data["artifacts"]

        result = dispatch(
            tasks.build_image_from_containerfile,
            [repository],
            kwargs={
                "containerfile_pk": str(containerfile.pk),
                "tag": tag,
                "repository_pk": str(repository.pk),
                "artifacts": artifacts,
            },
        )
        return OperationPostponedResponse(result, request)
Exemplo n.º 5
0
    def sync(self, request, pk):
        """
        <!-- User-facing documentation, rendered as html-->
        Trigger an asynchronous task to sync python content. The sync task will retrieve Python
        content from the specified `Remote` and update the specified `Respository`, creating a
        new  `RepositoryVersion`.
        """
        repository = self.get_object()
        serializer = RepositorySyncURLSerializer(data=request.data,
                                                 context={
                                                     'request': request,
                                                     "repository_pk": pk
                                                 })
        serializer.is_valid(raise_exception=True)
        remote = serializer.validated_data.get('remote', repository.remote)
        mirror = serializer.validated_data.get('mirror')

        result = dispatch(tasks.sync,
                          exclusive_resources=[repository, remote],
                          kwargs={
                              'remote_pk': str(remote.pk),
                              'repository_pk': str(repository.pk),
                              'mirror': mirror
                          })
        return core_viewsets.OperationPostponedResponse(result, request)
Exemplo n.º 6
0
    def create(self, request, exporter_pk):
        """
        Generates a Task to export the set of repositories assigned to a specific PulpExporter.
        """
        # Validate Exporter
        exporter = PulpExporter.objects.get(pk=exporter_pk).cast()
        ExporterSerializer.validate_path(exporter.path, check_is_dir=True)

        # Validate Export
        serializer = PulpExportSerializer(data=request.data,
                                          context={"exporter": exporter})
        serializer.is_valid(raise_exception=True)

        # Invoke the export
        export = PulpExport.objects.create(exporter=exporter,
                                           params=request.data)
        export.validated_versions = serializer.validated_data.get(
            "versions", None)
        export.validated_start_versions = serializer.validated_data.get(
            "start_versions", None)
        export.validated_chunk_size = serializer.validated_data.get(
            "chunk_size", None)

        task = dispatch(pulp_export, [exporter], kwargs={"the_export": export})

        return OperationPostponedResponse(task, request)
Exemplo n.º 7
0
    def sync(self, request, pk):
        """
        Dispatches a sync task.
        """
        repository = self.get_object()
        serializer = RepositorySyncURLSerializer(data=request.data,
                                                 context={
                                                     "request": request,
                                                     "repository_pk": pk
                                                 })

        # Validate synchronously to return 400 errors.
        serializer.is_valid(raise_exception=True)
        remote = serializer.validated_data.get("remote", repository.remote)
        mirror = serializer.validated_data.get("mirror", True)

        result = dispatch(
            func=tasks.synchronize,
            exclusive_resources=[repository],
            shared_resources=[remote],
            kwargs={
                "remote_pk": remote.pk,
                "repository_pk": repository.pk,
                "mirror": mirror,
            },
        )
        return OperationPostponedResponse(result, request)
Exemplo n.º 8
0
    def sync(self, request, pk):
        """
        Dispatches a sync task.
        """
        repository = self.get_object()
        serializer = RpmRepositorySyncURLSerializer(data=request.data,
                                                    context={
                                                        "request": request,
                                                        "repository_pk": pk
                                                    })
        serializer.is_valid(raise_exception=True)
        remote = serializer.validated_data.get("remote", repository.remote)
        mirror = serializer.validated_data.get("mirror")
        skip_types = serializer.validated_data.get("skip_types")
        optimize = serializer.validated_data.get("optimize")

        if repository.retain_package_versions > 0 and mirror:
            raise DRFValidationError(
                "Cannot use 'retain_package_versions' with mirror-mode sync")

        result = dispatch(
            tasks.synchronize,
            [repository, remote],
            kwargs={
                "mirror": mirror,
                "remote_pk": str(remote.pk),
                "repository_pk": str(repository.pk),
                "skip_types": skip_types,
                "optimize": optimize,
            },
        )
        return OperationPostponedResponse(result, request)
Exemplo n.º 9
0
    def sync(self, request, pk):
        """
        Dispatches a sync task.
        """
        repository = self.get_object()
        serializer = AnsibleRepositorySyncURLSerializer(
            data=request.data, context={"request": request, "repository_pk": repository.pk}
        )
        serializer.is_valid(raise_exception=True)

        remote = serializer.validated_data.get("remote", repository.remote)
        remote = remote.cast()

        mirror = serializer.validated_data["mirror"]
        sync_kwargs = {
            "remote_pk": remote.pk,
            "repository_pk": repository.pk,
            "mirror": mirror,
        }

        if isinstance(remote, RoleRemote):
            sync_func = role_sync
        elif isinstance(remote, CollectionRemote):
            sync_func = collection_sync
            sync_kwargs["optimize"] = serializer.validated_data["optimize"]
        elif isinstance(remote, GitRemote):
            sync_func = git_sync

        result = dispatch(
            sync_func,
            exclusive_resources=[repository],
            shared_resources=[remote],
            kwargs=sync_kwargs,
        )
        return OperationPostponedResponse(result, request)
Exemplo n.º 10
0
    def create(self, request):
        """Upload a comps.xml file and create Content from it."""
        serializer = CompsXmlSerializer(data=request.data,
                                        context={"request": request})
        serializer.is_valid(raise_exception=True)

        # Store TemporaryUpload as a file we can find/use from our task
        task_payload = {k: v for k, v in request.data.items()}
        file_content = task_payload.pop("file", None)
        temp_file = PulpTemporaryFile.init_and_validate(file_content)
        temp_file.save()

        # Lock destination-repo if we are given one so two uploads can't collide
        repository = serializer.validated_data.get("repository", None)
        repo_pk = str(repository.pk) if repository else None
        replace = serializer.validated_data.get("replace", False)

        # Kick off task to Do the Deed
        task = dispatch(
            tasks.upload_comps,
            exclusive_resources=[repository] if repository else [],
            args=([str(temp_file.pk), repo_pk, replace]),
            kwargs={},
        )
        return OperationPostponedResponse(task, request)
Exemplo n.º 11
0
    def remove(self, request, pk):
        """
        Queues a task that creates a new RepositoryVersion by removing content units.
        """
        remove_content_units = []
        repository = self.get_object()
        serializer = serializers.RecursiveManageSerializer(data=request.data)
        serializer.is_valid(raise_exception=True)

        if "content_units" in request.data:
            for url in request.data["content_units"]:
                if url == "*":
                    remove_content_units = [url]
                    break

                content = NamedModelViewSet.get_resource(url, Content)
                remove_content_units.append(str(content.pk))

        result = dispatch(
            tasks.recursive_remove_content,
            [repository],
            kwargs={
                "repository_pk": str(repository.pk),
                "content_units": remove_content_units
            },
        )
        return OperationPostponedResponse(result, request)
Exemplo n.º 12
0
    def create(self, request):
        """
        Publishes a repository.

        Either the ``repository`` or the ``repository_version`` fields can
        be provided but not both at the same time.
        """
        serializer = self.get_serializer(data=request.data)
        serializer.is_valid(raise_exception=True)
        repository_version = serializer.validated_data.get("repository_version")
        simple = serializer.validated_data.get("simple")
        structured = serializer.validated_data.get("structured")
        signing_service = serializer.validated_data.get("signing_service")

        result = dispatch(
            func=tasks.publish,
            shared_resources=[repository_version.repository],
            kwargs={
                "repository_version_pk": repository_version.pk,
                "simple": simple,
                "structured": structured,
                "signing_service_pk": getattr(signing_service, "pk", None),
            },
        )
        return OperationPostponedResponse(result, request)
Exemplo n.º 13
0
    def copy_tags(self, request, pk):
        """
        Queues a task that creates a new RepositoryVersion by adding Tags.
        """
        names = request.data.get("names")
        serializer = serializers.TagCopySerializer(data=request.data)
        serializer.is_valid(raise_exception=True)
        repository = self.get_object()
        source_latest = serializer.validated_data["source_repository_version"]
        content_tags_in_repo = source_latest.content.filter(
            pulp_type=models.Tag.get_pulp_type())
        tags_in_repo = models.Tag.objects.filter(pk__in=content_tags_in_repo)
        if names is None:
            tags_to_add = tags_in_repo
        else:
            tags_to_add = tags_in_repo.filter(name__in=names)

        result = dispatch(
            tasks.recursive_add_content,
            [repository],
            kwargs={
                "repository_pk":
                str(repository.pk),
                "content_units":
                [str(pk) for pk in tags_to_add.values_list("pk", flat=True)],
            },
        )
        return OperationPostponedResponse(result, request)
Exemplo n.º 14
0
 def copy_manifests(self, request, pk):
     """
     Queues a task that creates a new RepositoryVersion by adding Manifests.
     """
     serializer = serializers.ManifestCopySerializer(data=request.data)
     serializer.is_valid(raise_exception=True)
     repository = self.get_object()
     source_latest = serializer.validated_data["source_repository_version"]
     content_manifests_in_repo = source_latest.content.filter(
         pulp_type=models.Manifest.get_pulp_type())
     manifests_in_repo = models.Manifest.objects.filter(
         pk__in=content_manifests_in_repo)
     digests = request.data.get("digests")
     media_types = request.data.get("media_types")
     filters = {}
     if digests is not None:
         filters["digest__in"] = digests
     if media_types is not None:
         filters["media_type__in"] = media_types
     manifests_to_add = manifests_in_repo.filter(**filters)
     result = dispatch(
         tasks.recursive_add_content,
         [repository],
         kwargs={
             "repository_pk":
             str(repository.pk),
             "content_units":
             [str(manifest.pk) for manifest in manifests_to_add],
         },
     )
     return OperationPostponedResponse(result, request)
Exemplo n.º 15
0
    def run(self, request, pk):
        """Run the migration plan."""
        migration_plan = self.get_object()
        serializer = MigrationPlanRunSerializer(
            data=request.data,
            context={'request': request}
        )
        serializer.is_valid(raise_exception=True)
        validate = serializer.validated_data.get('validate', False)
        dry_run = serializer.validated_data.get('dry_run', False)
        skip_corrupted = serializer.validated_data.get('skip_corrupted', False)

        if is_migration_plan_running():
            raise ValidationError(_("Only one migration plan can run or be reset at a time"))

        result = dispatch(
            migrate_from_pulp2,
            [PULP_2TO3_MIGRATION_RESOURCE],
            kwargs={
                'migration_plan_pk': str(migration_plan.pk),
                'validate': validate,
                'dry_run': dry_run,
                'skip_corrupted': skip_corrupted
            }
        )
        return OperationPostponedResponse(result, request)
Exemplo n.º 16
0
    def run(self, request, pk):
        """Run the migration plan."""
        migration_plan = self.get_object()
        serializer = MigrationPlanRunSerializer(data=request.data,
                                                context={"request": request})
        serializer.is_valid(raise_exception=True)
        validate = serializer.validated_data.get("validate", False)
        dry_run = serializer.validated_data.get("dry_run", False)
        skip_corrupted = serializer.validated_data.get("skip_corrupted", False)

        if is_migration_plan_running():
            raise ValidationError(
                _("Only one migration plan can run or be reset at a time"))

        result = dispatch(
            migrate_from_pulp2,
            exclusive_resources=[PULP_2TO3_MIGRATION_RESOURCE],
            kwargs={
                "migration_plan_pk": str(migration_plan.pk),
                "validate": validate,
                "dry_run": dry_run,
                "skip_corrupted": skip_corrupted,
            },
        )
        return OperationPostponedResponse(result, request)
Exemplo n.º 17
0
    def destroy(self, request, pk, **kwargs):
        """
        Delete a Namespace with all distributions.
        If a push repository is associated to any of its distributions, delete it as well.
        """
        namespace = self.get_object()
        reservations = []
        instance_ids = []

        for distribution in namespace.container_distributions.all():

            reservations.append(distribution)
            instance_ids.append((str(distribution.pk), "container",
                                 "ContainerDistributionSerializer"), )
            if distribution.repository and distribution.repository.cast(
            ).PUSH_ENABLED:
                reservations.append(distribution.repository)
                instance_ids.append((
                    str(distribution.repository.pk),
                    "container",
                    "ContainerPushRepositorySerializer",
                ), )

        reservations.append(namespace)
        instance_ids.append(
            (str(namespace.pk), "container", "ContainerNamespaceSerializer"), )
        async_result = dispatch(tasks.general_multi_delete,
                                reservations,
                                args=(instance_ids, ))
        return OperationPostponedResponse(async_result, request)
Exemplo n.º 18
0
    def create(self, request):
        """Create a content unit."""
        serializer = self.get_serializer(data=request.data)
        serializer.is_valid(raise_exception=True)

        task_payload = {k: v for k, v in request.data.items()}
        file_content = task_payload.pop("file", None)

        temp_file = PulpTemporaryFile.init_and_validate(file_content)
        temp_file.save()

        shared_resources = []
        repository = serializer.validated_data.get("repository")
        if repository:
            shared_resources.append(repository)

        app_label = self.queryset.model._meta.app_label
        task = dispatch(
            tasks.base.general_create_from_temp_file,
            shared_resources,
            args=(app_label, serializer.__class__.__name__, str(temp_file.pk)),
            kwargs={
                "data": task_payload,
                "context": self.get_deferred_context(request)
            },
        )
        return OperationPostponedResponse(task, request)
Exemplo n.º 19
0
    def _dispatch_import_collection_task(self, temp_file_pk, repository=None, **kwargs):
        """
        Dispatch a Import Collection creation task.
        """
        locks = []
        kwargs["temp_file_pk"] = temp_file_pk
        if repository:
            locks.append(repository)
            kwargs["repository_pk"] = repository.pk

        return dispatch(import_collection, locks, kwargs=kwargs)
Exemplo n.º 20
0
    def refresh(self, request, pk):
        """
        Refresh ACS metadata.
        """
        acs = get_object_or_404(AlternateContentSource, pk=pk)
        acs_paths = AlternateContentSourcePath.objects.filter(alternate_content_source=pk)
        task_group = TaskGroup.objects.create(
            description=f"Refreshing {acs_paths.count()} alternate content source paths."
        )

        for acs_path in acs_paths:
            # Create or get repository for the path
            repo_data = {
                "name": f"{acs.name}--{acs_path.pk}--repository",
                "retain_repo_versions": 1,
                "user_hidden": True,
            }
            repo, created = FileRepository.objects.get_or_create(**repo_data)
            if created:
                acs_path.repository = repo
                acs_path.save()
            acs_url = (
                os.path.join(acs.remote.url, acs_path.path) if acs_path.path else acs.remote.url
            )

            # Dispatching ACS path to own task and assign it to common TaskGroup
            dispatch(
                tasks.synchronize,
                shared_resources=[acs.remote, acs],
                task_group=task_group,
                kwargs={
                    "remote_pk": str(acs.remote.pk),
                    "repository_pk": str(acs_path.repository.pk),
                    "mirror": False,
                    "url": acs_url,
                },
            )

        # Update TaskGroup that all child task are dispatched
        task_group.finish()
        return TaskGroupOperationResponse(task_group, request)
Exemplo n.º 21
0
    def create(self, request):
        """Copy content."""
        serializer = CopySerializer(data=request.data,
                                    context={"request": request})
        serializer.is_valid(raise_exception=True)

        config = serializer.validated_data["config"]

        config, repos = self._process_config(config)

        async_result = dispatch(copy_content, repos, args=[config], kwargs={})
        return OperationPostponedResponse(async_result, request)
Exemplo n.º 22
0
    def reset(self, request, pk):
        """Reset Pulp 3 data for plugins specified in the migration plan."""
        migration_plan = self.get_object()

        if is_migration_plan_running():
            raise ValidationError(
                _("Only one migration plan can run or be reset at a time"))

        result = dispatch(reset_pulp3_data, [PULP_2TO3_MIGRATION_RESOURCE],
                          kwargs={
                              'migration_plan_pk': str(migration_plan.pk),
                          })
        return OperationPostponedResponse(result, request)
Exemplo n.º 23
0
    def create(self, request, exporter_pk):
        """
        Generates a Task to export files to the filesystem.
        """
        # Validate Exporter
        exporter = FilesystemExporter.objects.get(pk=exporter_pk).cast()
        ExporterSerializer.validate_path(exporter.path, check_is_dir=True)

        # Validate Export
        serializer = FilesystemExportSerializer(data=request.data,
                                                context={"exporter": exporter})
        serializer.is_valid(raise_exception=True)

        if request.data.get("publication"):
            publication = self.get_resource(request.data["publication"],
                                            Publication)

            task = dispatch(
                fs_publication_export,
                exclusive_resources=[exporter],
                kwargs={
                    "exporter_pk": exporter.pk,
                    "publication_pk": publication.pk
                },
            )
        else:
            repo_version = self.get_resource(
                request.data["repository_version"], RepositoryVersion)

            task = dispatch(
                fs_repo_version_export,
                exclusive_resources=[exporter],
                kwargs={
                    "exporter_pk": str(exporter.pk),
                    "repo_version_pk": repo_version.pk
                },
            )

        return OperationPostponedResponse(task, request)
Exemplo n.º 24
0
 def create_group_upload_task(self, cur_session, repository, artifact,
                              filename, start_time):
     """Creates the actual task that adds the packages to the index."""
     cur_session['start'] = str(start_time)
     cur_session['artifacts'] = [(str(artifact.sha256), filename)]
     cur_session.modified = False
     cur_session.save()
     result = dispatch(tasks.upload_group, [artifact, repository],
                       kwargs={
                           "session_pk": str(cur_session.session_key),
                           "repository_pk": str(repository.pk)
                       })
     return reverse('tasks-detail', args=[result.pk], request=None)
Exemplo n.º 25
0
    def create(self, request):
        """
        Dispatches a publish task.
        """
        serializer = self.get_serializer(data=request.data)
        serializer.is_valid(raise_exception=True)
        repository_version = serializer.validated_data.get(
            "repository_version")
        repository = RpmRepository.objects.get(
            pk=repository_version.repository.pk)

        metadata_checksum_type = serializer.validated_data.get(
            "metadata_checksum_type", repository.metadata_checksum_type)
        package_checksum_type = serializer.validated_data.get(
            "package_checksum_type", repository.package_checksum_type)
        checksum_types = dict(
            metadata=metadata_checksum_type,
            package=package_checksum_type,
        )
        gpgcheck_options = dict(
            gpgcheck=serializer.validated_data.get("gpgcheck",
                                                   repository.gpgcheck),
            repo_gpgcheck=serializer.validated_data.get(
                "repo_gpgcheck", repository.repo_gpgcheck),
        )
        sqlite_metadata = serializer.validated_data.get(
            "sqlite_metadata", repository.sqlite_metadata)
        if sqlite_metadata:
            logging.getLogger("pulp_rpm.deprecation").info(
                "Support for sqlite metadata generation will be removed from a future release "
                "of pulp_rpm. See https://tinyurl.com/sqlite-removal for more details"
            )

        if repository.metadata_signing_service:
            signing_service_pk = repository.metadata_signing_service.pk
        else:
            signing_service_pk = None

        result = dispatch(
            tasks.publish,
            shared_resources=[repository_version.repository],
            kwargs={
                "repository_version_pk": repository_version.pk,
                "metadata_signing_service": signing_service_pk,
                "checksum_types": checksum_types,
                "gpgcheck_options": gpgcheck_options,
                "sqlite_metadata": sqlite_metadata,
            },
        )
        return OperationPostponedResponse(result, request)
Exemplo n.º 26
0
    def sync(self, request, pk):
        """
        Dispatches a sync task.
        """
        repository = self.get_object()
        serializer = RpmRepositorySyncURLSerializer(data=request.data,
                                                    context={
                                                        "request": request,
                                                        "repository_pk": pk
                                                    })
        serializer.is_valid(raise_exception=True)
        remote = serializer.validated_data.get("remote", repository.remote)
        mirror = serializer.validated_data.get("mirror")
        sync_policy = serializer.validated_data.get("sync_policy")
        skip_types = serializer.validated_data.get("skip_types")
        optimize = serializer.validated_data.get("optimize")

        if not sync_policy:
            sync_policy = SYNC_POLICIES.ADDITIVE if not mirror else SYNC_POLICIES.MIRROR_COMPLETE

        # validate some invariants that involve repository-wide settings.
        if sync_policy in (SYNC_POLICIES.MIRROR_COMPLETE,
                           SYNC_POLICIES.MIRROR_CONTENT_ONLY):
            err_msg = (
                "Cannot use '{}' in combination with a 'mirror_complete' or "
                "'mirror_content_only' sync policy.")
            if repository.retain_package_versions > 0:
                raise DRFValidationError(
                    err_msg.format("retain_package_versions"))

        if sync_policy == SYNC_POLICIES.MIRROR_COMPLETE:
            err_msg = "Cannot use '{}' in combination with a 'mirror_complete' sync policy."
            if repository.autopublish:
                raise DRFValidationError(err_msg.format("autopublish"))
            if skip_types:
                raise DRFValidationError(err_msg.format("skip_types"))

        result = dispatch(
            tasks.synchronize,
            shared_resources=[remote],
            exclusive_resources=[repository],
            kwargs={
                "sync_policy": sync_policy,
                "remote_pk": str(remote.pk),
                "repository_pk": str(repository.pk),
                "skip_types": skip_types,
                "optimize": optimize,
            },
        )
        return OperationPostponedResponse(result, request)
Exemplo n.º 27
0
def promote_content(repos_per_task, num_repos_to_update):
    """
    Select a random CollectionVersion and attempt to add it to some number of AnsibleRepositories.

    By default this will update all Repositories, creating a new RepositoryVersion on each. The
    `num_repos_to_update` argument can specify the number of Repositories to update.

    This task generates many subtasks. This task randomly selects the Repositories it will update,
    and randomly selects the CollectionVersion. Then it dispatches N repositories to be handled by
    the `add_content_to_repositories` task. The dispatch locks on all repositories in the set to
    make this workload safe for execution with any other Pulp workload.

    The `repos_per_task` argument controls the number of repositories handled per subtask.

    Args:
        repos_per_task: The number of repositories to handle in each subtask. 100 is a typical
            choice.
        num_repos_to_update: The total number of repositories to update.

    """
    random_collection_version = CollectionVersion.objects.order_by("?").first()
    repos_to_dispatch = []
    locks = []
    for repo_num, repo in enumerate(AnsibleRepository.objects.all(), 1):
        if repo_num > num_repos_to_update:
            break
        repos_to_dispatch.append(repo.pk)
        locks.append(repo)
        if len(repos_to_dispatch) == repos_per_task:
            task_args = (random_collection_version.pk, repos_to_dispatch)
            dispatch(add_content_to_repositories, exclusive_resources=locks, args=task_args)
            repos_to_dispatch = []
            locks = []

    if repos_to_dispatch:
        task_args = (random_collection_version.pk, repos_to_dispatch)
        dispatch(add_content_to_repositories, exclusive_resources=locks, args=task_args)
Exemplo n.º 28
0
 def update(self, request, pk, **kwargs):
     """Update remote."""
     partial = kwargs.pop("partial", False)
     lock = [self.get_object()]
     repos = AnsibleRepository.objects.filter(
         remote_id=pk, last_synced_metadata_time__isnull=False
     ).all()
     lock.extend(repos)
     async_result = dispatch(
         update_collection_remote,
         exclusive_resources=lock,
         args=(pk,),
         kwargs={"data": request.data, "partial": partial},
     )
     return OperationPostponedResponse(async_result, request)
Exemplo n.º 29
0
    def mount_blob(self, request, path, repository):
        """Mount a blob that is already present in another repository."""
        from_path = request.query_params["from"]
        try:
            distribution = models.ContainerDistribution.objects.get(
                base_path=from_path)
        except models.ContainerDistribution.DoesNotExist:
            raise RepositoryNotFound(name=path)

        try:
            version = distribution.repository_version or distribution.repository.latest_version(
            )
        except AttributeError:
            # the distribution does not contain reference to the source repository version
            raise RepositoryNotFound(name=from_path)

        digest = request.query_params["mount"]
        try:
            blob = models.Blob.objects.get(digest=digest,
                                           pk__in=version.content)
        except models.Blob.DoesNotExist:
            raise BlobNotFound(digest=digest)

        dispatched_task = dispatch(
            add_and_remove,
            shared_resources=[version.repository],
            exclusive_resources=[repository],
            kwargs={
                "repository_pk": str(repository.pk),
                "add_content_units": [str(blob.pk)],
                "remove_content_units": [],
            },
        )

        # Wait a small amount of time
        for dummy in range(3):
            time.sleep(1)
            task = Task.objects.get(pk=dispatched_task.pk)
            if task.state == "completed":
                task.delete()
                return BlobResponse(blob, path, 201, request)
            elif task.state in ["waiting", "running"]:
                continue
            else:
                error = task.error
                task.delete()
                raise Exception(str(error))
        raise Throttled()
Exemplo n.º 30
0
    def create(self, request):
        """
        Publishes a repository.

        Either the ``repository`` or the ``repository_version`` fields can
        be provided but not both at the same time.
        """
        serializer = self.get_serializer(data=request.data)
        serializer.is_valid(raise_exception=True)
        repository_version = serializer.validated_data.get("repository_version")

        result = dispatch(
            func=tasks.publish_verbatim,
            shared_resources=[repository_version.repository],
            kwargs={"repository_version_pk": repository_version.pk},
        )
        return OperationPostponedResponse(result, request)