コード例 #1
0
ファイル: reposcan.py プロジェクト: tkasparek/vmaas
 def put(cls, **kwargs):
     """Add repositories listed in request to the DB"""
     try:
         status_code, status_msg = cls.start_task()
         return status_msg, status_code
     except Exception as err:  # pylint: disable=broad-except
         msg = "Internal server error <%s>" % err.__hash__()
         LOGGER.exception(msg)
         FAILED_IMPORT_REPO.inc()
         return TaskStartResponse(msg, success=False), 400
コード例 #2
0
 def import_repositories(self):
     """Create or update repository records in the DB."""
     self.logger.info("Importing %d repositories.", len(self.repositories))
     failures = 0
     for repository in self.repositories:
         try:
             self.repo_store.import_repository(repository)
         except Exception:  # pylint: disable=broad-except
             failures += 1
     if failures > 0:
         self.logger.warning("Failed to import %d repositories.", failures)
         FAILED_IMPORT_REPO.inc(failures)
コード例 #3
0
ファイル: reposcan.py プロジェクト: tkasparek/vmaas
 def post(cls, **kwargs):
     """Add repositories listed in request to the DB"""
     try:
         products, repos = cls._parse_input_list()
         if not products and not repos:
             msg = "Input json is not valid"
             LOGGER.warning(msg)
             return TaskStartResponse(msg, success=False), 400
         status_code, status_msg = cls.start_task(products=products,
                                                  repos=repos)
         return status_msg, status_code
     except Exception as err:  # pylint: disable=broad-except
         msg = "Internal server error <%s>" % err.__hash__()
         LOGGER.exception(msg)
         FAILED_IMPORT_REPO.inc()
         return TaskStartResponse(msg, success=False), 400
コード例 #4
0
    def store(self):  # pylint: disable=too-many-branches,too-many-statements
        """Sync all queued repositories. Process repositories in batches due to disk space and memory usage."""
        self.logger.info("Checking %d repositories.", len(self.repositories))

        self._write_certificate_cache()

        # Download all repomd files first
        failed = self._download_repomds()
        if failed:
            FAILED_REPOMD.inc(len(failed))
            failed_repos = [
                repo for repo in sorted(self.repositories,
                                        key=attrgetter("repo_url"))
                if self._repo_download_failed(repo, failed)
            ]
            self.logger.warning("%d repomd.xml files failed to download.",
                                len(failed))
            self.clean_repodata(failed_repos)

        self._read_repomds()
        # Filter all repositories without repomd attribute set (downloaded repomd is not newer)
        batches = BatchList()
        up_to_date = []

        def md_size(repomd, data_type):
            try:
                mdata = repomd.get_metadata(data_type)
                # open-size is not present for uncompressed files
                return int(mdata.get('size', 0)) + int(
                    mdata.get('open-size', '0'))
            except RepoMDTypeNotFound:
                return 0

        for repository in sorted(self.repositories,
                                 key=attrgetter("repo_url")):
            if repository.repomd:

                repo_size = md_size(repository.repomd, 'primary_db')
                # If we use primary_db, we don't even download primary data xml
                if repo_size == 0:
                    repo_size += md_size(repository.repomd, 'primary')

                repo_size += md_size(repository.repomd, 'updateinfo')
                repo_size += md_size(repository.repomd, 'modules')

                batches.add_item(repository, repo_size)
            else:
                up_to_date.append(repository)

        self.clean_repodata(up_to_date)
        self.logger.info("%d repositories are up to date.", len(up_to_date))
        total_repositories = batches.get_total_items()
        completed_repositories = 0
        self.logger.info("%d repositories need to be synced.",
                         total_repositories)

        # Download and process repositories in batches (unpacked metadata files can consume lot of disk space)
        try:  # pylint: disable=too-many-nested-blocks
            for batch in batches:
                self.logger.info("Syncing a batch of %d repositories",
                                 len(batch))
                try:
                    failed = self._download_metadata(batch)
                    if failed:
                        self.logger.warning(
                            "%d metadata files failed to download.",
                            len(failed))
                        failed_repos = [
                            repo for repo in batch
                            if self._repo_download_failed(repo, failed)
                        ]
                        self.clean_repodata(failed_repos)
                        batch = [
                            repo for repo in batch if repo not in failed_repos
                        ]
                    self._unpack_metadata(batch)
                    for repository in batch:
                        completed_repositories += 1
                        try:
                            repository.load_metadata()
                            self.logger.info(
                                "Syncing repository: %s [%s/%s]", ", ".join(
                                    filter(None, (repository.content_set,
                                                  repository.basearch,
                                                  repository.releasever))),
                                completed_repositories, total_repositories)
                            self.repo_store.store(repository)
                        except Exception:  # pylint: disable=broad-except
                            self.logger.warning(
                                "Syncing repository failed: %s [%s/%s]",
                                ", ".join(
                                    filter(None, (repository.content_set,
                                                  repository.basearch,
                                                  repository.releasever))),
                                completed_repositories, total_repositories)
                            self.logger.exception("Exception: ")
                            FAILED_IMPORT_REPO.inc()
                        finally:
                            repository.unload_metadata()
                finally:
                    self.clean_repodata(batch)
        finally:
            self.repo_store.cleanup_unused_data()
            self._clean_certificate_cache()