Пример #1
0
def sync(importer_pk, repository_pk):
    """
    Validate the importer, create and finalize RepositoryVersion.

    Args:
        importer_pk (str): The importer PK.
        repository_pk (str): The repository to sync into.

    Raises:
        ValueError: When feed_url is empty.
    """
    importer = GemImporter.objects.get(pk=importer_pk)
    repository = Repository.objects.get(pk=repository_pk)

    if not importer.feed_url:
        raise ValueError(
            _("An importer must have a 'feed_url' attribute to sync."))

    base_version = RepositoryVersion.latest(repository)

    with RepositoryVersion.create(repository) as new_version:

        synchronizer = Synchronizer(importer, new_version, base_version)
        with WorkingDirectory():
            log.info(
                _('Starting sync: repository=%(repository)s importer=%(importer)s'
                  ), {
                      'repository': repository.name,
                      'importer': importer.name
                  })
            synchronizer.run()
Пример #2
0
def synchronize(remote_pk, repository_pk):
    """
    Create a new version of the repository that is synchronized with the remote
    as specified by the remote.

    Args:
        remote_pk (str): The remote PK.
        repository_pk (str): The repository PK.

    Raises:
        ValueError: When url is empty.
    """
    remote = PuppetRemote.objects.get(pk=remote_pk)
    repository = Repository.objects.get(pk=repository_pk)
    base_version = RepositoryVersion.latest(repository)

    if not remote.url:
        raise ValueError(
            _('An remote must have a url specified to synchronize.'))

    with WorkingDirectory():
        with RepositoryVersion.create(repository) as new_version:
            log.info(_('Synchronizing: repository=%(r)s remote=%(p)s'), {
                'r': repository.name,
                'p': remote.name
            })
Пример #3
0
    def new_version(self, base_version=None):
        """
        Create a new RepositoryVersion for this Repository.

        Creation of a RepositoryVersion should be done in a RQ Job.

        Args:
            repository (pulpcore.app.models.Repository): to create a new version of
            base_version (pulpcore.app.models.RepositoryVersion): an optional repository version
                whose content will be used as the set of content for the new version

        Returns:
            pulpcore.app.models.RepositoryVersion: The Created RepositoryVersion

        """
        with transaction.atomic():
            version = RepositoryVersion(
                repository=self,
                number=int(self.next_version),
                base_version=base_version)
            version.save()

            if base_version:
                # first remove the content that isn't in the base version
                version.remove_content(version.content.exclude(pk__in=base_version.content))
                # now add any content that's in the base_version but not in version
                version.add_content(base_version.content.exclude(pk__in=version.content))

            if Task.current() and not self.sub_repo:
                resource = CreatedResource(content_object=version)
                resource.save()
            return version
Пример #4
0
def untag_image(tag, repository_pk):
    """
    Create a new repository version without a specified manifest's tag name.
    """
    repository = Repository.objects.get(pk=repository_pk)
    latest_version = RepositoryVersion.latest(repository)

    tags_in_latest_repository = latest_version.content.filter(
        _type="docker.tag")

    tags_to_remove = Tag.objects.filter(pk__in=tags_in_latest_repository,
                                        name=tag)

    with RepositoryVersion.create(repository) as repository_version:
        repository_version.remove_content(tags_to_remove)
Пример #5
0
    def publish(self, request, pk):
        """
        Dispatches a publish task.
        """
        publisher = self.get_object()
        serializer = RepositoryPublishURLSerializer(
            data=request.data,
            context={'request': request}
        )
        serializer.is_valid(raise_exception=True)
        repository_version = serializer.validated_data.get('repository_version')

        # Safe because version OR repository is enforced by serializer.
        if not repository_version:
            repository = serializer.validated_data.get('repository')
            repository_version = RepositoryVersion.latest(repository)

        result = enqueue_with_reservation(
            tasks.publish,
            [repository_version.repository, publisher],
            kwargs={
                'publisher_pk': publisher.pk,
                'repository_version_pk': repository_version.pk
            }
        )
        return OperationPostponedResponse(result, request)
Пример #6
0
    def publish(self, request, pk):
        publisher = self.get_object()
        repository = None
        repository_version = None
        if 'repository' not in request.data and 'repository_version' not in request.data:
            raise serializers.ValidationError("Either the 'repository' or 'repository_version' "
                                              "need to be specified.")

        if 'repository' in request.data and request.data['repository']:
            repository = self.get_resource(request.data['repository'], Repository)

        if 'repository_version' in request.data and request.data['repository_version']:
            repository_version = self.get_resource(request.data['repository_version'],
                                                   RepositoryVersion)

        if repository and repository_version:
            raise serializers.ValidationError("Either the 'repository' or 'repository_version' "
                                              "can be specified - not both.")

        if not repository_version:
            repository_version = RepositoryVersion.latest(repository)

        result = tasks.publish.apply_async_with_reservation(
            [repository_version.repository, publisher],
            kwargs={
                'publisher_pk': str(publisher.pk),
                'repository_version_pk': str(repository_version.pk)
            }
        )
        return OperationPostponedResponse(result, request)
Пример #7
0
def synchronize(remote_pk, repository_pk):
    """
    Sync content from the remote repository.

    Create a new version of the repository that is synchronized with the remote.

    Args:
        remote_pk (str): The remote PK.
        repository_pk (str): The repository PK.

    Raises:
        ValueError: If the remote does not specify a url to sync.

    """
    remote = RpmRemote.objects.get(pk=remote_pk)
    repository = Repository.objects.get(pk=repository_pk)

    if not remote.url:
        raise ValueError(_('A remote must have a url specified to synchronize.'))

    log.info(_('Synchronizing: repository={r} remote={p}').format(
        r=repository.name, p=remote.name))

    first_stage = RpmFirstStage(remote)
    with WorkingDirectory():
        with RepositoryVersion.create(repository) as new_version:
            loop = asyncio.get_event_loop()
            stages = [
                first_stage,
                QueryExistingArtifacts(), ArtifactDownloader(), ArtifactSaver(),
                QueryExistingContentUnits(), ErratumContentUnitSaver(),
                ContentUnitAssociation(new_version), EndStage()
            ]
            pipeline = create_pipeline(stages)
            loop.run_until_complete(pipeline)
Пример #8
0
    def _show_on_demand_content(self, checksums):
        query = Q(pk__in=[])
        for checksum in checksums:
            query |= Q(**{f"{checksum}__isnull": False})

        remote_artifacts = RemoteArtifact.objects.filter(query).filter(
            content_artifact__artifact__isnull=True)
        ras_size = remote_artifacts.aggregate(Sum("size"))["size__sum"]

        content_artifacts = ContentArtifact.objects.filter(
            remoteartifact__pk__in=remote_artifacts)
        content = Content.objects.filter(
            contentartifact__pk__in=content_artifacts)
        repo_versions = RepositoryVersion.versions_containing_content(
            content).select_related("repository")

        self.stdout.write(
            _("Found {} on-demand content units with forbidden checksums.").
            format(content.count()))
        if content.count():
            self.stdout.write(
                _("There is approx {:.2f}Mb of content to be downloaded.").
                format(ras_size / (1024**2)))

        if repo_versions.exists():
            self.stdout.write(
                _("\nAffected repository versions with remote content:"))
            self._print_out_repository_version_hrefs(repo_versions)
Пример #9
0
    def _show_immediate_content(self, forbidden_checksums):
        allowed_checksums = set(
            constants.ALL_KNOWN_CONTENT_CHECKSUMS.symmetric_difference(
                forbidden_checksums))
        query_forbidden = Q()
        query_required = Q()
        for checksum in forbidden_checksums:
            query_forbidden |= Q(**{f"{checksum}__isnull": False})

        for allowed_checksum in allowed_checksums:
            query_required |= Q(**{f"{allowed_checksum}__isnull": True})

        artifacts = Artifact.objects.filter(query_forbidden | query_required)
        content_artifacts = ContentArtifact.objects.filter(
            artifact__in=artifacts)
        content = Content.objects.filter(
            contentartifact__pk__in=content_artifacts)
        repo_versions = RepositoryVersion.versions_containing_content(
            content).select_related("repository")

        self.stdout.write(
            _("Found {} downloaded content units with forbidden or missing checksums."
              ).format(content.count()))
        if content.count() > 0:
            self.stdout.write(
                _("There is approx. {:.2f}Mb content data to be re-hashed.").
                format(
                    artifacts.aggregate(Sum("size"))["size__sum"] / (1024**2)))

        if repo_versions.exists():
            self.stdout.write(
                _("\nAffected repository versions with present content:"))
            self._print_out_repository_version_hrefs(repo_versions)
Пример #10
0
def publish(publisher_pk, repository_pk):
    """
    Use provided publisher to create a Publication based on a RepositoryVersion.
    Args:
        publisher_pk (str): Use the publish settings provided by this publisher.
        repository_pk (str): Create a Publication from the latest version of this Repository.
    """
    publisher = FilePublisher.objects.get(pk=publisher_pk)
    repository = Repository.objects.get(pk=repository_pk)
    repository_version = RepositoryVersion.latest(repository)

    log.info(
        _('Publishing: repository=%(repository)s, version=%(version)d, publisher=%(publisher)s'
          ), {
              'repository': repository.name,
              'version': repository_version.number,
              'publisher': publisher.name,
          })

    with WorkingDirectory():
        with Publication.create(repository_version, publisher) as publication:
            manifest = Manifest('PULP_MANIFEST')
            manifest.write(_publish(publication))
            metadata = PublishedMetadata(relative_path=os.path.basename(
                manifest.path),
                                         publication=publication,
                                         file=File(open(manifest.path, 'rb')))
            metadata.save()

    log.info(_('Publication: %(publication)s created'),
             {'publication': publication.pk})
Пример #11
0
    def publish(self, request, pk):
        """
        Dispatches a publish task.
        """
        publisher = self.get_object()
        serializer = RepositoryPublishURLSerializer(
            data=request.data,
            context={'request': request}
        )
        serializer.is_valid(raise_exception=True)
        repository_version = serializer.validated_data.get('repository_version')

        # Safe because version OR repository is enforced by serializer.
        if not repository_version:
            repository = serializer.validated_data.get('repository')
            repository_version = RepositoryVersion.latest(repository)

        result = enqueue_with_reservation(
            tasks.publish,
            [repository_version.repository, publisher],
            kwargs={
                'publisher_pk': publisher.pk,
                'repository_version_pk': repository_version.pk
            }
        )
        return OperationPostponedResponse(result, request)
Пример #12
0
    def validate(self, data):
        """
        Validate that the Serializer contains valid data.

        Set the RpmRepository based on the RepositoryVersion if only the latter is provided.
        Set the RepositoryVersion based on the RpmRepository if only the latter is provided.
        Convert the human-friendly names of the content types into what Pulp needs to query on.

        """
        super().validate(data)
        if hasattr(self, 'initial_data'):
            validate_unknown_fields(self.initial_data, self.fields)

        new_data = {}
        new_data.update(data)

        source_repo = data.get('source_repo')
        source_repo_version = data.get('source_repo_version')

        if not source_repo and not source_repo_version:
            raise serializers.ValidationError(
                _("Either the 'source_repo' or 'source_repo_version' need to be specified"))

        if source_repo and source_repo_version:
            raise serializers.ValidationError(
                _("Either the 'source_repo' or 'source_repo_version' need to be specified "
                  "but not both.")
            )

        if not source_repo and source_repo_version:
            repo = {'source_repo': source_repo_version.repository}
            new_data.update(repo)

        if source_repo and not source_repo_version:
            version = RepositoryVersion.latest(source_repo)
            if version:
                repo_version = {'source_repo_version': version}
                new_data.update(repo_version)
            else:
                raise serializers.ValidationError(
                    detail=_('Repository has no version available to copy'))

        types = data.get('types')
        final_types = []

        if types:
            for t in types:
                substitution = RPM_PLUGIN_TYPE_CHOICE_MAP.get(t)
                if not substitution:
                    raise serializers.ValidationError(_(
                        "'{type}' is an invalid type, please use one of {choices}".format(
                            type=t,
                            choices=list(RPM_PLUGIN_TYPE_CHOICE_MAP.keys())
                        ))
                    )
                final_types.append(substitution)
            new_data.update({'types': final_types})

        return new_data
Пример #13
0
def synchronize(remote_pk, repository_pk):
    """
    Sync content from the remote repository.

    Create a new version of the repository that is synchronized with the remote.

    Args:
        remote_pk (str): The remote PK.
        repository_pk (str): The repository PK.

    Raises:
        ValueError: When remote has no url specified.

    """
    remote = AnsibleRemote.objects.get(pk=remote_pk)
    repository = Repository.objects.get(pk=repository_pk)
    base_version = RepositoryVersion.latest(repository)

    if not remote.url:
        raise ValueError(
            _('A remote must have a url specified to synchronize.'))

    with WorkingDirectory():
        with RepositoryVersion.create(repository) as new_version:
            log.info(_('Synchronizing: repository=%(r)s remote=%(p)s'), {
                'r': repository.name,
                'p': remote.name
            })
            roles = fetch_roles(remote)
            content = fetch_content(base_version)
            delta = find_delta(roles, content)
            additions = build_additions(remote, roles, delta)
            removals = build_removals(base_version, delta)
            changeset = ChangeSet(remote=remote,
                                  repository_version=new_version,
                                  additions=additions,
                                  removals=removals)
            for report in changeset.apply():
                if not log.isEnabledFor(logging.DEBUG):
                    continue
                log.debug(
                    _('Applied: repository=%(r)s remote=%(p)s change:%(c)s'), {
                        'r': repository.name,
                        'p': remote.name,
                        'c': report,
                    })
Пример #14
0
def synchronize(importer_pk, repository_pk):
    """
    Create a new version of the repository that is synchronized with the remote
    as specified by the importer.

    Args:
        importer_pk (str): The importer PK.
        repository_pk (str): The repository PK.

    Raises:
        ValueError: When feed_url is empty.
    """
    importer = FileImporter.objects.get(pk=importer_pk)
    repository = Repository.objects.get(pk=repository_pk)
    base_version = RepositoryVersion.latest(repository)

    if not importer.feed_url:
        raise ValueError(
            _('An importer must have a feed_url specified to synchronize.'))

    with WorkingDirectory():
        with RepositoryVersion.create(repository) as new_version:
            log.info(_('Synchronizing: repository=%(r)s importer=%(p)s'), {
                'r': repository.name,
                'p': importer.name
            })
            manifest = fetch_manifest(importer)
            content = fetch_content(base_version)
            delta = find_delta(manifest, content)
            additions = build_additions(importer, manifest, delta)
            removals = build_removals(base_version, delta)
            changeset = ChangeSet(importer=importer,
                                  repository_version=new_version,
                                  additions=additions,
                                  removals=removals)
            for report in changeset.apply():
                if not log.isEnabledFor(logging.DEBUG):
                    continue
                log.debug(
                    _('Applied: repository=%(r)s importer=%(p)s change:%(c)s'),
                    {
                        'r': repository.name,
                        'p': importer.name,
                        'c': report,
                    })
Пример #15
0
 def get_repository_version(self):
     """
     Returns the repository version that is supposed to be served by this DockerDistribution.
     """
     if self.repository:
         return RepositoryVersion.latest(self.repository)
     elif self.repository_version:
         return self.repository_version
     else:
         return None
Пример #16
0
 def get_repository_version(self):
     """
     Returns the repository version that is supposed to be served by this DockerDistribution.
     """
     if self.repository:
         return RepositoryVersion.latest(self.repository)
     elif self.repository_version:
         return self.repository_version
     else:
         return None
Пример #17
0
 def get_distro_content(path):
     """Returns distribution content."""
     distro = get_object_or_404(AnsibleDistribution, base_path=path)
     if distro.repository_version:
         return distro.repository_version.content
     else:
         repo_version = RepositoryVersion.latest(distro.repository)
         if repo_version is None:
             return Content.objects.none()
         else:
             return repo_version.content
Пример #18
0
def publish(publisher_pk, repository_pk):
    """
    Use provided publisher to create a Publication based on a RepositoryVersion.

    Args:
        publisher_pk (str): Use the publish settings provided by this publisher.
        repository_pk (str): Create a Publication from the latest version of this Repository.
    """
    publisher = GemPublisher.objects.get(pk=publisher_pk)
    repository = Repository.objects.get(pk=repository_pk)
    repository_version = RepositoryVersion.latest(repository)

    log.info(
        _('Publishing: repository=%(repository)s, version=%(version)d, publisher=%(publisher)s'
          ), {
              'repository': repository.name,
              'version': repository_version.number,
              'publisher': publisher.name,
          })

    with WorkingDirectory():
        with Publication.create(repository_version, publisher) as publication:
            specs = Specs()
            latest_versions = {}
            prerelease_specs = Specs()
            for content in GemContent.objects.filter(
                    pk__in=publication.repository_version.content).order_by(
                        '-created'):
                for content_artifact in content.contentartifact_set.all():
                    published_artifact = PublishedArtifact(
                        relative_path=content_artifact.relative_path,
                        publication=publication,
                        content_artifact=content_artifact)
                    published_artifact.save()
                if re.fullmatch(r"[0-9.]*", content.version):
                    specs.append(Key(content.name, content.version))
                    old_ver = latest_versions.get(content.name)
                    if old_ver is None or version.parse(
                            old_ver) < version.parse(content.version):
                        latest_versions[content.name] = content.version
                else:
                    prerelease_specs.append(Key(content.name, content.version))
            latest_specs = Specs(
                Key(name, version)
                for name, version in latest_versions.items())

            _publish_specs(specs, 'specs.4.8', publication)
            _publish_specs(latest_specs, 'latest_specs.4.8', publication)
            _publish_specs(prerelease_specs, 'prerelease_specs.4.8',
                           publication)

    log.info(_('Publication: %(publication)s created'),
             {'publication': publication.pk})
Пример #19
0
def one_shot_upload(artifact_pk, filename, repository_pk=None):
    """
    One shot upload for pulp_python

    Args:
        artifact_pk: validated artifact
        filename: file name
        repository_pk: optional repository to add Content to
    """
    # iterate through extensions since splitext does not support things like .tar.gz
    for ext, packagetype in DIST_EXTENSIONS.items():
        if filename.endswith(ext):
            # Copy file to a temp directory under the user provided filename, we do this
            # because pkginfo validates that the filename has a valid extension before
            # reading it
            with tempfile.TemporaryDirectory() as td:
                temp_path = os.path.join(td, filename)
                artifact = Artifact.objects.get(pk=artifact_pk)
                shutil.copy2(artifact.file.path, temp_path)
                metadata = DIST_TYPES[packagetype](temp_path)
                metadata.packagetype = packagetype
                break
    else:
        raise serializers.ValidationError(
            _("Extension on {} is not a valid python extension "
              "(.whl, .exe, .egg, .tar.gz, .tar.bz2, .zip)").format(filename))
    data = parse_project_metadata(vars(metadata))
    data['classifiers'] = [{
        'name': classifier
    } for classifier in metadata.classifiers]
    data['packagetype'] = metadata.packagetype
    data['version'] = metadata.version
    data['filename'] = filename
    data['_relative_path'] = filename

    new_content = PythonPackageContent.objects.create(
        filename=filename,
        packagetype=metadata.packagetype,
        name=data['classifiers'],
        version=data['version'])

    queryset = PythonPackageContent.objects.filter(pk=new_content.pk)

    if repository_pk:
        repository = Repository.objects.get(pk=repository_pk)
        with RepositoryVersion.create(repository) as new_version:
            new_version.add_content(queryset)

    resource = CreatedResource(content_object=new_content)
    resource.save()
Пример #20
0
 def create(self):
     """
     Perform the work. This is the long-blocking call where all syncing occurs.
     """
     with WorkingDirectory():
         with RepositoryVersion.create(self.repository) as new_version:
             loop = asyncio.get_event_loop()
             stages = self.pipeline_stages(new_version)
             stages.append(ContentUnitAssociation(new_version))
             if self.mirror:
                 stages.append(ContentUnitUnassociation(new_version))
             stages.append(EndStage())
             pipeline = create_pipeline(stages)
             loop.run_until_complete(pipeline)
Пример #21
0
def tag_image(manifest_pk, tag, repository_pk):
    """
    Create a new repository version out of the passed tag name and the manifest.

    If the tag name is already associated with an existing manifest with the same digest,
    no new content is created. Note that a same tag name cannot be used for two different
    manifests. Due to this fact, an old Tag object is going to be removed from
    a new repository version when a manifest contains a digest which is not equal to the
    digest passed with POST request.
    """
    manifest = Manifest.objects.get(pk=manifest_pk)
    artifact = manifest._artifacts.all()[0]

    repository = Repository.objects.get(pk=repository_pk)
    latest_version = RepositoryVersion.latest(repository)

    tags_to_remove = Tag.objects.filter(
        pk__in=latest_version.content.all(),
        name=tag).exclude(tagged_manifest=manifest)

    manifest_tag, created = Tag.objects.get_or_create(name=tag,
                                                      tagged_manifest=manifest)

    if created:
        resource = CreatedResource(content_object=manifest_tag)
        resource.save()

    ContentArtifact.objects.get_or_create(artifact=artifact,
                                          content=manifest_tag,
                                          relative_path=tag)

    tags_to_add = Tag.objects.filter(pk=manifest_tag.pk).exclude(
        pk__in=latest_version.content.all())

    with RepositoryVersion.create(repository) as repository_version:
        repository_version.remove_content(tags_to_remove)
        repository_version.add_content(tags_to_add)
Пример #22
0
    def create(self, validated_data):
        """Save the GemContent unit.

        This must be used inside a task that locks on the Artifact and if given, the repository.
        """
        repository = validated_data.pop("repository", None)
        content = super().create(validated_data)

        if repository:
            content_to_add = self.Meta.model.objects.filter(pk=content.pk)

            # create new repo version with uploaded package
            with RepositoryVersion.create(repository) as new_version:
                new_version.add_content(content_to_add)
        return content
Пример #23
0
def synchronize(remote_pk, repository_pk):
    """
    Sync content from the remote repository.

    Create a new version of the repository that is synchronized with the remote.

    Args:
        remote_pk (str): The remote PK.
        repository_pk (str): The repository PK.

    Raises:
        ValueError: If the remote does not specify a url to sync.

    """
    remote = RpmRemote.objects.get(pk=remote_pk)
    repository = Repository.objects.get(pk=repository_pk)

    dupe_criteria = {'model': Package,
                     'field_names': ['name', 'epoch', 'version', 'release', 'arch']}

    if not remote.url:
        raise ValueError(_('A remote must have a url specified to synchronize.'))

    log.info(_('Synchronizing: repository={r} remote={p}').format(
        r=repository.name, p=remote.name))

    download_artifacts = (remote.policy == Remote.IMMEDIATE)
    first_stage = RpmFirstStage(remote)
    with WorkingDirectory():
        with RepositoryVersion.create(repository) as new_version:
            loop = asyncio.get_event_loop()
            remove_duplicates_stage = RemoveDuplicates(new_version, **dupe_criteria)
            stages = [first_stage]

            if download_artifacts:
                stages.extend([QueryExistingArtifacts(), ArtifactDownloader(), ArtifactSaver()])

            stages.extend([
                QueryExistingContents(), ErratumContentSaver(), remove_duplicates_stage,
                ContentAssociation(new_version), EndStage()
            ])
            pipeline = create_pipeline(stages)
            loop.run_until_complete(pipeline)
Пример #24
0
    def create(self, request):
        """
        <!-- User-facing documentation, rendered as html-->
        Dispatches a publish task, which generates metadata that will be used by pip.
        """
        serializer = self.get_serializer(data=request.data)
        serializer.is_valid(raise_exception=True)
        repository_version = serializer.validated_data.get(
            'repository_version')

        # Safe because version OR repository is enforced by serializer.
        if not repository_version:
            repository = serializer.validated_data.get('repository')
            repository_version = RepositoryVersion.latest(repository)

        result = enqueue_with_reservation(
            tasks.publish, [repository_version.repository],
            kwargs={'repository_version_pk': repository_version.pk})
        return core_viewsets.OperationPostponedResponse(result, request)
Пример #25
0
 def create(self):
     """
     Perform the work. This is the long-blocking call where all syncing occurs.
     """
     with WorkingDirectory():
         with RepositoryVersion.create(self.repository) as new_version:
             loop = asyncio.get_event_loop()
             stages = [
                 self.first_stage,
                 QueryExistingArtifacts(), ArtifactDownloader(), ArtifactSaver(),
                 QueryExistingContentUnits(), ContentUnitSaver(),
                 ContentUnitAssociation(new_version)
             ]
             if self.sync_mode == 'additive':
                 stages.append(EndStage())
             elif self.sync_mode == 'mirror':
                 stages.extend([ContentUnitUnassociation(new_version), EndStage()])
             pipeline = create_pipeline(stages)
             loop.run_until_complete(pipeline)
Пример #26
0
    def create(self, request):
        """
        Queues a task that publishes a new Ansible Publication.
        """
        serializer = RepositoryPublishURLSerializer(
            data=request.data, context={'request': request})
        serializer.is_valid(raise_exception=True)
        repository_version = serializer.validated_data.get(
            'repository_version')

        # Safe because version OR repository is enforced by serializer.
        if not repository_version:
            repository = serializer.validated_data.get('repository')
            repository_version = RepositoryVersion.latest(repository)

        result = enqueue_with_reservation(
            tasks.publish, [repository_version.repository],
            kwargs={'repository_version_pk': str(repository_version.pk)})
        return OperationPostponedResponse(result, request)
Пример #27
0
    def validate(self, data):
        """
        Validate data passed through a request call.

        Check if a repository has got a reference to a latest repository version. A
        new dictionary object is initialized by the passed data and altered by a latest
        repository version.
        """
        new_data = {}
        new_data.update(data)

        latest_version = RepositoryVersion.latest(data['repository'])
        if not latest_version:
            raise serializers.ValidationError(
                _("The latest repository version of '{}' was not found".format(
                    data['repository'])))

        new_data['latest_version'] = latest_version
        return new_data
Пример #28
0
def synchronize(remote_pk, repository_pk):
    """
    Sync content from the remote repository.

    Create a new version of the repository that is synchronized with the remote.

    Args:
        remote_pk (str): The remote PK.
        repository_pk (str): The repository PK.

    Raises:
        ValueError: If the remote does not specify a url to sync.

    """
    remote = RpmRemote.objects.get(pk=remote_pk)
    repository = Repository.objects.get(pk=repository_pk)

    if not remote.url:
        raise ValueError(
            _('A remote must have a url specified to synchronize.'))

    log.info(
        _('Synchronizing: repository={r} remote={p}').format(r=repository.name,
                                                             p=remote.name))

    first_stage = RpmFirstStage(remote)
    with WorkingDirectory():
        with RepositoryVersion.create(repository) as new_version:
            loop = asyncio.get_event_loop()
            stages = [
                first_stage,
                QueryExistingArtifacts(),
                ArtifactDownloader(),
                ArtifactSaver(),
                QueryExistingContentUnits(),
                ErratumContentUnitSaver(),
                ContentUnitAssociation(new_version),
                EndStage()
            ]
            pipeline = create_pipeline(stages)
            loop.run_until_complete(pipeline)
Пример #29
0
 def create(self):
     """
     Perform the work. This is the long-blocking call where all syncing occurs.
     """
     with WorkingDirectory():
         with RepositoryVersion.create(self.repository) as new_version:
             loop = asyncio.get_event_loop()
             stages = [
                 self.first_stage,
                 QueryExistingContentUnits(),
                 ExistingContentNeedsNoArtifacts(),
                 ArtifactDownloader(),
                 ArtifactSaver(),
                 ContentUnitSaver(),
                 ContentUnitAssociation(new_version)
             ]
             if self.mirror:
                 stages.append(ContentUnitUnassociation(new_version))
             stages.append(EndStage())
             pipeline = create_pipeline(stages)
             loop.run_until_complete(pipeline)
Пример #30
0
    def create_repo_version(pulp3_repo_name, pulp2_repo):
        """
        Create a repo version based on a pulp2 repository

        Args:
            pulp3_repo_name(str): repository name in Pulp 3
            pulp2_repo(Pulp2Repository): a pre-migrated repository to create a repo version for
        """

        pulp3_repo = Repository.objects.get(name=pulp3_repo_name)
        unit_ids = Pulp2RepoContent.objects.filter(pulp2_repository=pulp2_repo).values_list(
            'pulp2_unit_id', flat=True)
        incoming_content = set(Pulp2Content.objects.filter(pulp2_id__in=unit_ids).only(
            'pulp3_content').values_list('pulp3_content__pk', flat=True))

        with RepositoryVersion.create(pulp3_repo) as new_version:
            repo_content = set(new_version.content.values_list('pk', flat=True))
            to_add = incoming_content - repo_content
            to_delete = repo_content - incoming_content
            new_version.add_content(Content.objects.filter(pk__in=to_add))
            new_version.remove_content(Content.objects.filter(pk__in=to_delete))
Пример #31
0
    def create(self, request):
        """
        <!-- User-facing documentation, rendered as html-->
        Dispatches a publish task, which generates metadata that will be used by pip.
        """
        serializer = self.get_serializer(data=request.data)
        serializer.is_valid(raise_exception=True)
        repository_version = serializer.validated_data.get('repository_version')

        # Safe because version OR repository is enforced by serializer.
        if not repository_version:
            repository = serializer.validated_data.get('repository')
            repository_version = RepositoryVersion.latest(repository)

        result = enqueue_with_reservation(
            tasks.publish,
            [repository_version.repository],
            kwargs={
                'repository_version_pk': repository_version.pk
            }
        )
        return platform.OperationPostponedResponse(result, request)
Пример #32
0
    def validate(self, data):
        repository = data.get('repository')
        repository_version = data.get('repository_version')

        if not repository and not repository_version:
            raise serializers.ValidationError(
                _("Either the 'repository' or 'repository_version' need to be specified"))
        elif not repository and repository_version:
            return data
        elif repository and not repository_version:
            version = RepositoryVersion.latest(repository)
            if version:
                new_data = {'repository_version': version}
                new_data.update(data)
                return new_data
            else:
                raise serializers.ValidationError(
                    detail=_('Repository has no version available to publish'))
        raise serializers.ValidationError(
            _("Either the 'repository' or 'repository_version' need to be specified "
              "but not both.")
        )
Пример #33
0
def one_shot_upload(artifact_pk, filename, repository_pk=None):
    """
    One shot upload for RPM package.

    Args:
        artifact_pk: validated artifact for a file
        filename : name of file
        repository_pk: repository to extend with new pkg
    """
    artifact = Artifact.objects.get(pk=artifact_pk)

    # export META from rpm and prepare dict as saveable format
    try:
        new_pkg = _prepare_package(artifact, filename)
    except OSError:
        raise OSError('RPM file cannot be parsed for metadata.')

    pkg, created = Package.objects.get_or_create(**new_pkg)

    if not created:
        raise OSError('RPM package {} already exists.'.format(pkg.filename))

    ContentArtifact.objects.create(
        artifact=artifact,
        content=pkg,
        relative_path=filename
    )

    resource = CreatedResource(content_object=pkg)
    resource.save()

    if repository_pk:
        repository = Repository.objects.get(pk=repository_pk)
        content_to_add = Package.objects.filter(pkgId=pkg.pkgId)

        # create new repo version with uploaded package
        with RepositoryVersion.create(repository) as new_version:
            new_version.add_content(content_to_add)
Пример #34
0
def copy_content(source_repo_version_pk, dest_repo_pk, types):
    """
    Copy content from one repo to another.

    Args:
        source_repo_version_pk: repository version primary key to copy units from
        dest_repo_pk: repository primary key to copy units into
        types: a tuple of strings representing the '_type' values of types to include in the copy
    """
    source_repo_version = RepositoryVersion.objects.get(
        pk=source_repo_version_pk)
    dest_repo = Repository.objects.get(pk=dest_repo_pk)

    query = None
    for ptype in types:
        if query:
            query = query | Q(_type=ptype)
        else:
            query = Q(_type=ptype)

    content_to_copy = source_repo_version.content.filter(query)
    with RepositoryVersion.create(dest_repo) as new_version:
        new_version.add_content(content_to_copy)
Пример #35
0
def sync(remote_pk, repository_pk):
    """
    Sync Collections with ``remote_pk``, and save a new RepositoryVersion for ``repository_pk``.

    Args:
        remote_pk (str): The remote PK.
        repository_pk (str): The repository PK.

    Raises:
        ValueError: If the remote does not specify a URL to sync or a ``whitelist`` of Collections
            to sync.

    """
    remote = CollectionRemote.objects.get(pk=remote_pk)
    repository = Repository.objects.get(pk=repository_pk)

    if not remote.url:
        raise ValueError(
            _("A CollectionRemote must have a 'url' specified to synchronize.")
        )

    if not remote.whitelist:
        raise ValueError(
            _("A CollectionRemote must have a 'whitelist' specified to synchronize."
              ))

    repository_spec_strings = remote.whitelist.split(' ')

    def nowhere(*args, **kwargs):
        pass

    collections_created_pks = []

    with tempfile.TemporaryDirectory() as temp_ansible_path:
        galaxy_context = GalaxyContext(
            collections_path=temp_ansible_path,
            server={
                'url': remote.url,
                'ignore_certs': False,
            },
        )

        install_repository_specs_loop(
            display_callback=nowhere,
            galaxy_context=galaxy_context,
            repository_spec_strings=repository_spec_strings,
        )

        content_walk_generator = os.walk(temp_ansible_path)
        for dirpath, dirnames, filenames in content_walk_generator:
            if 'MANIFEST.json' in filenames:
                with open(dirpath + os.path.sep +
                          'MANIFEST.json') as manifest_file:
                    manifest_data = json.load(manifest_file)
                info = manifest_data['collection_info']
                filename = '{namespace}-{name}-{version}'.format(
                    namespace=info['namespace'],
                    name=info['name'],
                    version=info['version'],
                )
                tarfile_path = temp_ansible_path + os.path.sep + filename + '.tar.gz'
                with tarfile.open(name=tarfile_path, mode='w|gz') as newtar:
                    newtar.add(dirpath, arcname=filename)

                with transaction.atomic():
                    collection, created = Collection.objects.get_or_create(
                        namespace=info['namespace'],
                        name=info['name'],
                        version=info['version'])

                    if created:
                        artifact = Artifact.init_and_validate(newtar.name)
                        artifact.save()

                        ContentArtifact.objects.create(
                            artifact=artifact,
                            content=collection,
                            relative_path=collection.relative_path,
                        )

                        collections_created_pks.append(collection)

    if collections_created_pks:
        with RepositoryVersion.create(repository) as new_version:
            collections = Collection.objects.filter(
                pk__in=collections_created_pks)
            new_version.add_content(collections)