Example #1
0
    def get(self, request, repo_id):
        """
        Looks for query parameters 'importers' and 'distributors', and will add
        the corresponding fields to the repository returned. Query parameter
        'details' is equivalent to passing both 'importers' and 'distributors'.

        :param request: WSGI request object
        :type  request: django.core.handlers.wsgi.WSGIRequest
        :param repo_id: id of requested repository
        :type  repo_id: str

        :return: Response containing a serialized dict for the requested repo.
        :rtype : django.http.HttpResponse
        :raises exceptions.MissingResource: if repo cannot be found
        """
        repo_obj = model.Repository.objects.get_repo_or_missing_resource(repo_id)
        repo = serializers.Repository(repo_obj).data

        # Add importers and distributors to the dicts if requested.
        details = request.GET.get('details', 'false').lower() == 'true'
        if request.GET.get('importers', 'false').lower() == 'true' or details:
            _merge_related_objects('importers', model.Importer, (repo,))
        if request.GET.get('distributors', 'false').lower() == 'true' or details:
            _merge_related_objects('distributors', model.Distributor, (repo,))
        if details:
            repo['total_repository_units'] = sum(repo['content_unit_counts'].itervalues())
            total_missing = repo_controller.missing_unit_count(repo_obj.repo_id)
            repo['locally_stored_units'] = repo['total_repository_units'] - total_missing

        return generate_json_response_with_pulp_encoder(repo)
Example #2
0
    def get(self, request, repo_id):
        """
        Looks for query parameters 'importers' and 'distributors', and will add
        the corresponding fields to the repository returned. Query parameter
        'details' is equivalent to passing both 'importers' and 'distributors'.

        :param request: WSGI request object
        :type  request: django.core.handlers.wsgi.WSGIRequest
        :param repo_id: id of requested repository
        :type  repo_id: str

        :return: Response containing a serialized dict for the requested repo.
        :rtype : django.http.HttpResponse
        :raises exceptions.MissingResource: if repo cannot be found
        """
        repo_obj = model.Repository.objects.get_repo_or_missing_resource(repo_id)
        repo = serializers.Repository(repo_obj).data

        # Add importers and distributors to the dicts if requested.
        details = request.GET.get('details', 'false').lower() == 'true'
        if request.GET.get('importers', 'false').lower() == 'true' or details:
            _merge_related_objects('importers', model.Importer, (repo,))
        if request.GET.get('distributors', 'false').lower() == 'true' or details:
            _merge_related_objects('distributors', model.Distributor, (repo,))
        if details:
            repo['total_repository_units'] = sum(repo['content_unit_counts'].itervalues())
            total_missing = repo_controller.missing_unit_count(repo_obj.repo_id)
            repo['locally_stored_units'] = repo['total_repository_units'] - total_missing

        return generate_json_response_with_pulp_encoder(repo)
Example #3
0
    def get_metadata(self, metadata_files):
        """
        :param metadata_files: instance of MetadataFiles
        :type: pulp_rpm.plugins.importers.yum.repomd.metadata.MetadataFiles

        :return:    instance of MetadataFiles where each relevant file has been
                    identified and downloaded.
        :rtype:     pulp_rpm.plugins.importers.yum.repomd.metadata.MetadataFiles
        """

        self.downloader = metadata_files.downloader
        scratchpad = self.conduit.get_scratchpad() or {}
        previous_revision = scratchpad.get(constants.REPOMD_REVISION_KEY, 0)
        previous_skip_set = set(scratchpad.get(constants.PREVIOUS_SKIP_LIST, []))
        current_skip_set = set(self.config.get(constants.CONFIG_SKIP, []))
        self.current_revision = metadata_files.revision
        # if the revision is positive, hasn't increased and the skip list doesn't include
        # new types that weren't present on the last run...
        # determine missing units
        missing_units = repo_controller.missing_unit_count(self.repo.repo_id)
        if 0 < metadata_files.revision <= previous_revision \
                and previous_skip_set - current_skip_set == set() \
                and (self.download_deferred or not missing_units):
            _logger.info(_('upstream repo metadata has not changed. Skipping steps.'))
            self.skip_repomd_steps = True
            return metadata_files
        else:
            _logger.info(_('Downloading metadata files.'))
            metadata_files.download_metadata_files()
            self.downloader = None
            _logger.info(_('Generating metadata databases.'))
            metadata_files.generate_dbs()
            self.import_unknown_metadata_files(metadata_files)
            return metadata_files
    def get_metadata(self, metadata_files):
        """
        Get metadata and decide whether to sync the repository or not.

        :param metadata_files: instance of MetadataFiles
        :type: pulp_rpm.plugins.importers.yum.repomd.metadata.MetadataFiles

        :return:    instance of MetadataFiles where each relevant file has been
                    identified and downloaded.
        :rtype:     pulp_rpm.plugins.importers.yum.repomd.metadata.MetadataFiles
        """

        self.downloader = metadata_files.downloader
        scratchpad = self.conduit.get_scratchpad() or {}
        previous_revision = scratchpad.get(constants.REPOMD_REVISION_KEY, 0)
        previous_skip_set = set(scratchpad.get(constants.PREVIOUS_SKIP_LIST, []))
        current_skip_set = set(self.config.get(constants.CONFIG_SKIP, []))
        self.current_revision = metadata_files.revision
        last_sync = self.conduit.last_sync()
        sync_due_to_unit_removal = False
        if last_sync is not None:
            last_sync = dateutils.parse_iso8601_datetime(last_sync)
            last_removed = self.repo.last_unit_removed
            sync_due_to_unit_removal = last_removed is not None and last_sync < last_removed
        # determine missing units
        missing_units = repo_controller.missing_unit_count(self.repo.repo_id)
        # if the current MD revision is not newer than the old one
        # and we aren't using an override URL
        # and the skip list doesn't have any new types
        # and there are no missing units, or we have deferred download enabled
        # and no units were removed after last sync
        # then skip fetching the repo MD :)
        if 0 < metadata_files.revision <= previous_revision \
                and not self.config.override_config.get(importer_constants.KEY_FEED) \
                and previous_skip_set - current_skip_set == set() \
                and (self.download_deferred or not missing_units) \
                and not sync_due_to_unit_removal:
            _logger.info(_('upstream repo metadata has not changed. Skipping steps.'))
            self.skip_repomd_steps = True
            return metadata_files
        else:
            _logger.info(_('Downloading metadata files.'))
            metadata_files.download_metadata_files()
            self.downloader = None
            _logger.info(_('Generating metadata databases.'))
            metadata_files.generate_dbs()
            self.import_unknown_metadata_files(metadata_files)
            return metadata_files
Example #5
0
    def get_metadata(self, metadata_files):
        """
        Get metadata and decide whether to sync the repository or not.

        :param metadata_files: instance of MetadataFiles
        :type: pulp_rpm.plugins.importers.yum.repomd.metadata.MetadataFiles

        :return:    instance of MetadataFiles where each relevant file has been
                    identified and downloaded.
        :rtype:     pulp_rpm.plugins.importers.yum.repomd.metadata.MetadataFiles
        """

        self.downloader = metadata_files.downloader
        scratchpad = self.conduit.get_scratchpad() or {}
        previous_revision = scratchpad.get(constants.REPOMD_REVISION_KEY, 0)
        previous_skip_set = set(scratchpad.get(constants.PREVIOUS_SKIP_LIST, []))
        current_skip_set = set(self.config.get(constants.CONFIG_SKIP, []))
        self.current_revision = metadata_files.revision
        last_sync = self.conduit.last_sync()
        sync_due_to_unit_removal = False
        if last_sync is not None:
            last_sync = dateutils.parse_iso8601_datetime(last_sync)
            last_removed = self.repo.last_unit_removed
            sync_due_to_unit_removal = last_removed is not None and last_sync < last_removed
        # determine missing units
        missing_units = repo_controller.missing_unit_count(self.repo.repo_id)
        # if the current MD revision is not newer than the old one
        # and we aren't using an override URL
        # and the skip list doesn't have any new types
        # and there are no missing units, or we have deferred download enabled
        # and no units were removed after last sync
        # then skip fetching the repo MD :)
        if 0 < metadata_files.revision <= previous_revision \
                and not self.config.override_config.get(importer_constants.KEY_FEED) \
                and previous_skip_set - current_skip_set == set() \
                and (self.download_deferred or not missing_units) \
                and not sync_due_to_unit_removal:
            _logger.info(_('upstream repo metadata has not changed. Skipping steps.'))
            self.skip_repomd_steps = True
            return metadata_files
        else:
            _logger.info(_('Downloading metadata files.'))
            metadata_files.download_metadata_files()
            self.downloader = None
            _logger.info(_('Generating metadata databases.'))
            metadata_files.generate_dbs()
            self.import_unknown_metadata_files(metadata_files)
            return metadata_files
Example #6
0
    def get_metadata(self, metadata_files):
        """
        Get metadata and decide whether to sync the repository or not.

        :param metadata_files: instance of MetadataFiles
        :type: pulp_rpm.plugins.importers.yum.repomd.metadata.MetadataFiles

        :return:    instance of MetadataFiles where each relevant file has been
                    identified and downloaded.
        :rtype:     pulp_rpm.plugins.importers.yum.repomd.metadata.MetadataFiles
        """
        self.downloader = metadata_files.downloader
        scratchpad = self.conduit.get_scratchpad() or {}
        previous_revision = scratchpad.get(constants.REPOMD_REVISION_KEY, 0)
        self.current_revision = metadata_files.revision
        # determine missing units
        missing_units = repo_controller.missing_unit_count(self.repo.repo_id)

        force_full_sync = repo_controller.check_perform_full_sync(self.repo.repo_id,
                                                                  self.conduit,
                                                                  self.config)

        # if the platform does not prescribe forcing a full sync
        # (due to removed unit, force_full flag, config change, etc.)
        # the current MD revision is not newer than the old one
        # and there are no missing units, or we have deferred download enabled
        # then skip fetching the repo MD :)
        skip_sync_steps = not force_full_sync and \
            0 < self.current_revision <= previous_revision and \
            (self.download_deferred or not missing_units)

        if skip_sync_steps:
            _logger.info(_('upstream repo metadata has not changed. Skipping steps.'))
            self.skip_repomd_steps = True
            return metadata_files
        else:
            _logger.info(_('Downloading metadata files.'))
            metadata_files.download_metadata_files()
            self.downloader = None
            _logger.info(_('Generating metadata databases.'))
            metadata_files.generate_dbs()
            self.import_unknown_metadata_files(metadata_files)
            return metadata_files
Example #7
0
    def get_metadata(self, metadata_files):
        """
        :param metadata_files: instance of MetadataFiles
        :type: pulp_rpm.plugins.importers.yum.repomd.metadata.MetadataFiles

        :return:    instance of MetadataFiles where each relevant file has been
                    identified and downloaded.
        :rtype:     pulp_rpm.plugins.importers.yum.repomd.metadata.MetadataFiles
        """

        self.downloader = metadata_files.downloader
        scratchpad = self.conduit.get_scratchpad() or {}
        previous_revision = scratchpad.get(constants.REPOMD_REVISION_KEY, 0)
        previous_skip_set = set(
            scratchpad.get(constants.PREVIOUS_SKIP_LIST, []))
        current_skip_set = set(self.config.get(constants.CONFIG_SKIP, []))
        self.current_revision = metadata_files.revision
        # if the revision is positive, hasn't increased and the skip list doesn't include
        # new types that weren't present on the last run...
        # determine missing units
        missing_units = repo_controller.missing_unit_count(self.repo.repo_id)
        if 0 < metadata_files.revision <= previous_revision \
                and previous_skip_set - current_skip_set == set() \
                and (self.download_deferred or not missing_units):
            _logger.info(
                _('upstream repo metadata has not changed. Skipping steps.'))
            self.skip_repomd_steps = True
            return metadata_files
        else:
            _logger.info(_('Downloading metadata files.'))
            metadata_files.download_metadata_files()
            self.downloader = None
            _logger.info(_('Generating metadata databases.'))
            metadata_files.generate_dbs()
            self.import_unknown_metadata_files(metadata_files)
            return metadata_files
Example #8
0
    def _run(self, tmp_dir):
        """
        Look for a distribution in the target repo and sync it if found

        :param tmp_dir: The absolute path to the temporary directory
        :type tmp_dir: str
        """
        treeinfo_path = self.get_treefile(tmp_dir)
        if not treeinfo_path:
            _logger.debug(_('No treeinfo found'))
            return

        try:
            unit, files = self.parse_treeinfo_file(treeinfo_path)
        except ValueError:
            _logger.error(_('could not parse treeinfo'))
            self.progress_report['state'] = constants.STATE_FAILED
            return

        existing_units = repo_controller.find_repo_content_units(
            self.repo,
            repo_content_unit_q=Q(unit_type_id=ids.TYPE_ID_DISTRO),
            yield_content_unit=True)

        existing_units = list(existing_units)

        # determine missing units
        missing_units = repo_controller.missing_unit_count(self.repo.repo_id)
        # Continue only when the distribution has changed.
        if len(existing_units) == 1 and \
                self.existing_distribution_is_current(existing_units[0], unit) and \
                (self.download_deferred or not missing_units):
            _logger.info(_('upstream distribution unchanged; skipping'))
            return

        # Process the distribution
        dist_files, pulp_dist_xml_path = self.process_distribution(tmp_dir)
        files.extend(dist_files)

        self.update_unit_files(unit, files)

        # Download distribution files
        if not self.download_deferred:
            try:
                downloaded = self.download_files(tmp_dir, files)
            except DownloadFailed:
                # All files must be downloaded to continue.
                return
        else:
            unit.downloaded = False
            downloaded = []

        # Save the unit.
        unit.save()

        # Update deferred downloading catalog
        self.update_catalog_entries(unit, files)

        # The treeinfo and PULP_DISTRIBTION.xml files are always imported into platform
        # # storage regardless of the download policy
        unit.safe_import_content(treeinfo_path,
                                 os.path.basename(treeinfo_path))
        if pulp_dist_xml_path is not None:
            unit.safe_import_content(pulp_dist_xml_path,
                                     os.path.basename(pulp_dist_xml_path))

        # The downloaded files are imported into platform storage.
        if downloaded:
            for destination, location in downloaded:
                unit.safe_import_content(destination, location)
            else:
                if not unit.downloaded:
                    unit.downloaded = True
                    unit.save()

        # Associate the unit.
        repo_controller.associate_single_unit(self.repo, unit)

        # find any old distribution units and remove them. See BZ #1150714
        for existing_unit in existing_units:
            if existing_unit == unit:
                continue
            msg = _('Removing out-of-date distribution unit {k} for repo {r}')
            _logger.info(
                msg.format(k=existing_unit.unit_key, r=self.repo.repo_id))
            qs = RepositoryContentUnit.objects.filter(
                repo_id=self.repo.repo_id, unit_id=existing_unit.id)
            qs.delete()
Example #9
0
    def _run(self, tmp_dir):
        """
        Look for a distribution in the target repo and sync it if found

        :param tmp_dir: The absolute path to the temporary directory
        :type tmp_dir: str
        """
        treeinfo_path = self.get_treefile(tmp_dir)
        if not treeinfo_path:
            _logger.debug(_('No treeinfo found'))
            return
        self.metadata_found = True

        try:
            unit, files = self.parse_treeinfo_file(treeinfo_path)
        except ValueError:
            _logger.error(_('could not parse treeinfo'))
            self.progress_report['state'] = constants.STATE_FAILED
            return

        existing_units = repo_controller.find_repo_content_units(
            self.repo,
            repo_content_unit_q=Q(unit_type_id=ids.TYPE_ID_DISTRO),
            yield_content_unit=True)

        existing_units = list(existing_units)

        # determine missing units
        missing_units = repo_controller.missing_unit_count(self.repo.repo_id)
        # Continue only when the distribution has changed.
        if len(existing_units) == 1 and \
                self.existing_distribution_is_current(existing_units[0], unit) and \
                (self.download_deferred or not missing_units):
            _logger.info(_('upstream distribution unchanged; skipping'))
            return

        # Process the distribution
        dist_files, pulp_dist_xml_path = self.process_distribution(tmp_dir)
        files.extend(dist_files)

        self.update_unit_files(unit, files)

        # Download distribution files
        if not self.download_deferred:
            try:
                downloaded = self.download_files(tmp_dir, files)
            except DownloadFailed:
                # All files must be downloaded to continue.
                return
        else:
            unit.downloaded = False
            downloaded = []

        # Save the unit.
        unit.save()

        # Update deferred downloading catalog
        self.update_catalog_entries(unit, files)

        # The treeinfo and PULP_DISTRIBTION.xml files are always imported into platform
        # # storage regardless of the download policy
        unit.safe_import_content(treeinfo_path, os.path.basename(treeinfo_path))
        if pulp_dist_xml_path is not None:
            unit.safe_import_content(pulp_dist_xml_path, os.path.basename(pulp_dist_xml_path))

        # The downloaded files are imported into platform storage.
        if downloaded:
            for destination, location in downloaded:
                unit.safe_import_content(destination, location)
            else:
                if not unit.downloaded:
                    unit.downloaded = True
                    unit.save()

        # Associate the unit.
        repo_controller.associate_single_unit(self.repo, unit)

        # find any old distribution units and remove them. See BZ #1150714
        for existing_unit in existing_units:
            if existing_unit == unit:
                continue
            msg = _('Removing out-of-date distribution unit {k} for repo {r}')
            _logger.info(msg.format(k=existing_unit.unit_key, r=self.repo.repo_id))
            qs = RepositoryContentUnit.objects.filter(
                repo_id=self.repo.repo_id,
                unit_id=existing_unit.id)
            qs.delete()