Example #1
0
 def __init__(self, log, db, koji_session, repo_cache):
     self.log = log
     self.db = db
     self.koji_session = koji_session
     self.repo_cache = repo_cache
     # TODO repo_id
     self.group = util.get_build_group(koji_session)
Example #2
0
    def run(self):
        # pylint: disable=E1101
        unprocessed = self.db.query(Build)\
                             .filter_by(deps_processed=False)\
                             .filter(Build.repo_id != None)\
                             .options(joinedload(Build.package))\
                             .order_by(Build.repo_id).all()
        # TODO repo_id
        self.group = util.get_build_group()

        # do this before processing to avoid multiple runs of createrepo
        for build in unprocessed:
            self.srpm_cache.get_srpm(build.package.name, build.epoch,
                                     build.version, build.release)
        srpm_repo = self.srpm_cache.get_repodata()

        for repo_id, builds in itertools.groupby(unprocessed,
                                                 lambda build: build.repo_id):
            builds = list(builds)
            if repo_id is not None:
                self.prepare_sack(repo_id)
                if self.sack:
                    util.add_repos_to_sack('srpm', {'src': srpm_repo},
                                           self.sack)
                    for build in builds:
                        self.process_build(build)
            self.db.query(Build).filter(Build.id.in_([b.id for b in builds]))\
                                .update({'deps_processed': True},
                                        synchronize_session=False)
            self.db.commit()
Example #3
0
    def run(self, repo_id):
        start = time.time()
        self.log.info("Generating new repo")
        self.db.add(Repo(repo_id=repo_id))
        self.db.flush()
        packages = self.get_packages()
        self.refresh_latest_builds(packages)
        packages = self.get_packages()
        srpm_repo = self.srpm_cache.get_repodata()
        self.prepare_sack(repo_id)
        if not self.sack:
            self.log.error('Cannot generate repo: {}'.format(repo_id))
            return
        self.update_repo_index(repo_id)
        util.add_repo_to_sack('src', srpm_repo, self.sack)
        # TODO repo_id
        self.group = util.get_build_group()
        self.log.info("Resolving dependencies")
        resolution_start = time.time()
        changes = self.generate_dependency_changes(packages, repo_id)
        resolution_end = time.time()
        self.db.query(ResolutionProblem).delete(synchronize_session=False)
        # pylint: disable=E1101
        if self.problems:
            self.db.execute(ResolutionProblem.__table__.insert(),
                            self.problems)
        self.synchronize_resolution_state()
        self.update_dependency_changes(changes)
        self.db.commit()
        end = time.time()

        self.log.info(
            ("New repo done. Resolution time: {} minutes\n"
             "Overall time: {} minutes.").format(
                 (resolution_end - resolution_start) / 60, (end - start) / 60))
Example #4
0
    def generate_repo(self, repo_id):
        start = time.time()
        packages = self.get_packages()
        package_names = [pkg.name for pkg in packages]
        prev_states = {pkg.id: pkg.state_string for pkg in packages}
        self.log.info("Generating new repo")
        self.srpm_cache.get_latest_srpms(package_names)
        srpm_repo = self.srpm_cache.get_repodata()
        sack = self.prepare_sack(repo_id)
        util.add_repo_to_sack('src', srpm_repo, sack)
        # TODO repo_id
        group = util.get_build_group()
        self.log.info("Resolving dependencies")
        resolution_start = time.time()
        changes = []
        for package in packages:
            srpm = get_srpm_pkg(sack, package.name)
            curr_deps = self.resolve_dependencies(sack, package, srpm, group,
                                                  repo_id)
            if curr_deps is not None:
                last_build = package.last_build
                if last_build and last_build.repo_id:
                    prev_deps = self.get_deps_from_db(last_build.package_id,
                                                      last_build.repo_id)
                    if prev_deps is not None:
                        changes += self.create_dependency_changes(prev_deps,
                                                                  curr_deps,
                                                                  package.id)
        self.synchronize_resolution_state()
        packages = self.get_packages()
        for pkg in packages:
            prev_state = prev_states[pkg.id]
            check_package_state(pkg, prev_state)

        self.update_dependency_changes(changes)
        self.db.commit()
        end = time.time()

        self.log.info(("New repo done. Resolution time: {} minutes\n"
                       "Overall time: {} minutes.")
                      .format((end - resolution_start) / 60,
                              (end - start) / 60))
Example #5
0
    def process_builds(self):
        # pylint: disable=E1101
        unprocessed = self.db.query(Build)\
                             .filter_by(deps_processed=False)\
                             .filter(Build.repo_id != None)\
                             .order_by(Build.repo_id).all()
        # TODO repo_id
        group = util.get_build_group()

        # do this before processing to avoid multiple runs of createrepo
        for build in unprocessed:
            self.srpm_cache.get_srpm(build.package.name, build.epoch,
                                     build.version, build.release)
        srpm_repo = self.srpm_cache.get_repodata()

        for repo_id, builds in itertools.groupby(unprocessed,
                                                 lambda build: build.repo_id):
            if repo_id is not None:
                sack = self.prepare_sack(repo_id)
                if sack:
                    util.add_repos_to_sack('srpm', {'src': srpm_repo}, sack)
                    for build in builds:
                        self.process_build(build, sack, group)
            self.db.commit()