def reload_repo(repo_id): repos = getattr(threading.local(), "repos", None) if repos is not None: repoapi = RepositoryAPI() repos[repo_id] = repoapi.repository(repo_id) _load_repo_extras(repos[repo_id]) return get_repos()[repo_id]
def run(self, repo_id, groups=None, name=None, arch=None, url=None, gpgkeys=None, cksum="sha1", filters=None): if groups is None: groups = [] if gpgkeys is None: gpgkeys = [] if filters is None: filters = [] repoapi = RepositoryAPI() errors = [] keylist = repo_utils.get_keylist(gpgkeys, errors=errors) for error in errors: self.update(error, state="ERROR") try: repoapi.create(repo_id, name, arch, feed=url, relative_path=repo_id, groupid=groups, gpgkeys=keylist, checksum_type=cksum) except ServerRequestError, err: raise TaskExecutionError("Could not create repo %s: %s" % (repo_id, err[1]))
def diff(request, repo_id=None, repo_id2=None): repoapi = RepositoryAPI() if request.GET: mode = request.GET.get("mode", "all") else: mode = "all" repo1 = repo_utils.get_repo(repo_id) repo2 = repo_utils.get_repo(repo_id2) packages1 = dict([("%s.%s" % (p['name'], p['arch']), p) for p in repoapi.packages(repo_id)]) packages2 = dict([("%s.%s" % (p['name'], p['arch']), p) for p in repoapi.packages(repo_id2)]) pkg_names = set(packages1.keys() + packages2.keys()) allpackages = dict() for pkg in pkg_names: if pkg in packages1: nevra1 = repo_utils.get_nevra(packages1[pkg]) else: nevra1 = None if pkg in packages2: nevra2 = repo_utils.get_nevra(packages2[pkg]) else: nevra2 = None if nevra1 == nevra2 and mode == "diff": continue allpackages[pkg] = dict() allpackages[pkg]["repo1"] = nevra1 allpackages[pkg]["repo2"] = nevra2 return dict(repo1=repo1, repo2=repo2, mode=mode, packages1=packages1, packages2=packages2, allpackages=allpackages)
def set_gpgkeys(repo, keys, request=None, errors=None): repoapi = RepositoryAPI() if errors is None: errors = [] to_remove = [k for k, kurl in repo["keys"].items() if k not in keys] try: repoapi.rmkeys(repo["id"], to_remove) if request: messages.debug(request, "Removed GPG keys %s from %s" % (to_remove, repo["name"])) except ServerRequestError, err: errors.append("Could not remove GPG keys (%s) from %s: %s" % (to_remove, repo["name"], err[1]))
def __init__(self, basepath, xsource, config): Source.__init__(self, basepath, xsource, config) self.pulp_id = None if has_pulp and xsource.get("pulp_id"): self.pulp_id = xsource.get("pulp_id") _setup_pulp(self.config) repoapi = RepositoryAPI() try: self.repo = repoapi.repository(self.pulp_id) self.gpgkeys = ["%s/%s" % (PULPCONFIG.cds['keyurl'], key) for key in repoapi.listkeys(self.pulp_id)] except server.ServerRequestError: err = sys.exc_info()[1] if err[0] == 401: msg = "Packages: Error authenticating to Pulp: %s" % err[1] elif err[0] == 404: msg = "Packages: Pulp repo id %s not found: %s" % (self.pulp_id, err[1]) else: msg = "Packages: Error %d fetching pulp repo %s: %s" % (err[0], self.pulp_id, err[1]) logger.error(msg) raise Bcfg2.Server.Plugin.PluginInitError except socket.error: err = sys.exc_info()[1] logger.error("Packages: Could not contact Pulp server: %s" % err) raise Bcfg2.Server.Plugin.PluginInitError except: err = sys.exc_info()[1] logger.error("Packages: Unknown error querying Pulp server: %s" % err) raise Bcfg2.Server.Plugin.PluginInitError self.rawurl = "%s/%s" % (PULPCONFIG.cds['baseurl'], self.repo['relative_path']) self.arches = [self.repo['arch']] if not self.rawurl: self.baseurl = self.url + "%(version)s/%(component)s/%(arch)s/" else: self.baseurl = self.rawurl self.packages = dict() self.deps = dict([('global', dict())]) self.provides = dict([('global', dict())]) self.filemap = dict([(x, dict()) for x in ['global'] + self.arches]) self.needed_paths = set() self.file_to_arch = dict() self.use_yum = has_yum try: self.use_yum &= config.getboolean("yum", "use_yum_libraries") except (ConfigParser.NoOptionError, ConfigParser.NoSectionError): self.use_yum = False
def set_groups(repo, groups, request=None, errors=None): repoapi = RepositoryAPI() if errors is None: errors = [] for group in repo["groupid"]: if group not in groups: try: repoapi.remove_group(repo["id"], group) if request: messages.debug(request, "Removed group %s from %s" % (group, repo["id"])) except ServerRequestError, err: errors.append("Could not remove group %s from %s: %s" % (group, repo["id"], err[1]))
def set_filters(repo, filters, request=None, errors=None): repoapi = RepositoryAPI() if errors is None: errors = [] to_remove = [f for f in repo["filters"] if f not in filters] if to_remove: try: repoapi.remove_filters(repo["id"], to_remove) if request: messages.debug(request, "Removed filters %s from %s" % (to_remove, repo["id"])) except ServerRequestError, err: errors.append("Could not remove filters %s from %s: %s" % (to_remove, repo["id"], err[1]))
def __init__(self, basepath, xsource, config): Source.__init__(self, basepath, xsource, config) self.pulp_id = None if has_pulp and xsource.get("pulp_id"): self.pulp_id = xsource.get("pulp_id") _setup_pulp(self.config) repoapi = RepositoryAPI() try: self.repo = repoapi.repository(self.pulp_id) self.gpgkeys = [ "%s/%s" % (PULPCONFIG.cds['keyurl'], key) for key in repoapi.listkeys(self.pulp_id) ] except server.ServerRequestError: err = sys.exc_info()[1] if err[0] == 401: msg = "Packages: Error authenticating to Pulp: %s" % err[1] elif err[0] == 404: msg = "Packages: Pulp repo id %s not found: %s" % ( self.pulp_id, err[1]) else: msg = "Packages: Error %d fetching pulp repo %s: %s" % ( err[0], self.pulp_id, err[1]) raise SourceInitError(msg) except socket.error: err = sys.exc_info()[1] raise SourceInitError("Could not contact Pulp server: %s" % err) except: err = sys.exc_info()[1] raise SourceInitError( "Unknown error querying Pulp server: %s" % err) self.rawurl = "%s/%s" % (PULPCONFIG.cds['baseurl'], self.repo['relative_path']) self.arches = [self.repo['arch']] if not self.rawurl: self.baseurl = self.url + "%(version)s/%(component)s/%(arch)s/" else: self.baseurl = self.rawurl self.packages = dict() self.deps = dict([('global', dict())]) self.provides = dict([('global', dict())]) self.filemap = dict([(x, dict()) for x in ['global'] + self.arches]) self.needed_paths = set() self.file_to_arch = dict() self.use_yum = has_yum and config.getboolean( "yum", "use_yum_libraries", default=False)
def get_updates(repo): """ return a list of updated packages available to a child repo """ if not repo["parent"]: return None repoapi = RepositoryAPI() child_nevras = [get_nevra(p) for p in repoapi.packages(repo["id"])] rv = [] for pkg in repoapi.packages(repo["parent"]["id"]): if get_nevra(pkg) not in child_nevras: rv.append(pkg) return rv
def get_package(repo, name=None, id=None): if not isinstance(repo, dict): repo = get_repo(repo) repoapi = RepositoryAPI() for pkg in repoapi.packages(repo["id"]): if ( pkg["id"] == id or pkg["name"] == name or get_nevra(pkg) == name or get_nevra(pkg, arch=repo["arch"]) == name or "%s-%s" % (pkg["name"], pkg["version"]) == name ): return pkg return None
def _load_repo_extras(repo, repos=None): config = get_config() repoapi = RepositoryAPI() repo["url"] = os.path.join(config.cds.baseurl, repo["relative_path"]) repo["parent"] = None repo["children"] = [] if repos is None: repos = getattr(threading.local(), "repos", dict()) for repo2 in repos.values(): if repo2 == repo: continue elif repo["id"] in repo2["clone_ids"]: # the clone_id attribute is broken, but we check it anyway # just in case it gets fixed some day repo["parent"] = repo2 elif repo2["id"] in repo["clone_ids"]: repo["children"].append(repo2) elif ( repo["source"] and repo["source"]["type"] == "local" and repo["source"]["url"].endswith("/%s" % repo2["id"]) ): # the child syncs from a local repo that ends with # /<parent repo id> repo["parent"] = repo2 elif ( repo2["source"] and repo2["source"]["type"] == "local" and repo2["source"]["url"].endswith("/%s" % repo["id"]) ): repo["children"].append(repo2) repo["keys"] = dict() for key in repoapi.listkeys(repo["id"]): repo["keys"][os.path.basename(key)] = "%s/%s" % (config.cds.keyurl, key) if repo["parent"]: repo["updates"] = has_updates(repo) if repo["last_sync"] and repo["sync_schedule"]: repo["next_sync"] = format_iso8601_datetime( parse_iso8601_datetime(repo["last_sync"]) + parse_iso8601_interval(repo["sync_schedule"])[0] ) elif repo["sync_schedule"]: repo["next_sync"] = format_iso8601_datetime(parse_iso8601_interval(repo["sync_schedule"])[1]) else: repo["next_sync"] = None repo["groupid"].sort()
def run(self, clone_id, name=None, parent=None, groups=None, filters=None): if groups is None: groups = [] if filters is None: filters = [] repoapi = RepositoryAPI() try: repoapi.clone(parent['id'], clone_id, name, relative_path=clone_id) self.update("Cloned %s to %s" % (parent['id'], clone_id)) except ServerRequestError, err: raise TaskExecutionError("Could not clone %s as %s: %s" % (parent['id'], clone_id, err[1]))
def get_repos(reload=False): if reload: repos = None else: repos = getattr(threading.local(), "repos", None) if repos is None: # this looks inefficient, and it is, but repos has to be fully # loaded before we can call _load_repo_extras(), so we have to # do this in two separate loops repoapi = RepositoryAPI() repos = dict([(r["id"], r) for r in repoapi.repositories(dict())]) for repo in repos.values(): _load_repo_extras(repo, repos=repos) setattr(threading.local(), "repos", repos) return repos
def delete(request, repo_id=None): repo = repo_utils.get_repo(repo_id) form = DeleteOkayForm(request.POST or None, dict(id=repo_id)) if request.method == 'POST': if form.is_valid(): repoapi = RepositoryAPI() try: repoapi.delete(repo_id) messages.success(request, "Deleted repository %s (%s)" % (repo['name'], repo['id'])) return \ HttpResponseRedirect(reverse('sponge.views.repos.list')) except ServerRequestError, err: messages.error(request, "Failed to delete repository %s (%s): %s" (repo['name'], repo['id'], err[1]))
def __init__(self, basepath, xsource, config): Source.__init__(self, basepath, xsource, config) self.pulp_id = None if has_pulp and xsource.get("pulp_id"): self.pulp_id = xsource.get("pulp_id") _setup_pulp(self.config) repoapi = RepositoryAPI() try: self.repo = repoapi.repository(self.pulp_id) self.gpgkeys = [os.path.join(PULPCONFIG.cds['keyurl'], key) for key in repoapi.listkeys(self.pulp_id)] except server.ServerRequestError: err = sys.exc_info()[1] if err[0] == 401: msg = "Packages: Error authenticating to Pulp: %s" % err[1] elif err[0] == 404: msg = "Packages: Pulp repo id %s not found: %s" % \ (self.pulp_id, err[1]) else: msg = "Packages: Error %d fetching pulp repo %s: %s" % \ (err[0], self.pulp_id, err[1]) raise SourceInitError(msg) except socket.error: err = sys.exc_info()[1] raise SourceInitError("Could not contact Pulp server: %s" % err) except: err = sys.exc_info()[1] raise SourceInitError("Unknown error querying Pulp server: %s" % err) self.rawurl = "%s/%s" % (PULPCONFIG.cds['baseurl'], self.repo['relative_path']) self.arches = [self.repo['arch']] if not self.rawurl: self.baseurl = self.url + "%(version)s/%(component)s/%(arch)s/" else: self.baseurl = self.rawurl self.packages = dict() self.deps = dict([('global', dict())]) self.provides = dict([('global', dict())]) self.filemap = dict([(x, dict()) for x in ['global'] + self.arches]) self.needed_paths = set() self.file_to_arch = dict()
def view(request, repo_id=None): repoapi = RepositoryAPI() repo = repo_utils.get_repo(repo_id) packages = repoapi.packages(repo_id) for pkg in packages: pkg['nevra'] = repo_utils.get_nevra(pkg, repo['arch']) editform = RepoEditForm(request.POST or None, repo=repo) diffform = DiffSelectForm(request.POST or None, repo=repo) if request.method == 'POST' and "repoedit" in request.POST: if editform.is_valid(): success = True if editform.cleaned_data['name'] != repo['name']: try: repoapi.update(repo['id'], dict(name=editform.cleaned_data['name'], checksum_type=editform.cleaned_data['cksum'])) messages.debug(request, "Updated repository name for %s" % repo['id']) except ServerRequestError, err: success = False messages.error(request, "Could not update repository info for %s: " "%s" % (repo['id'], err[1])) groups = filter(lambda s: s != '', editform.cleaned_data['groups'] + \ re.split(r'\s*,\s*', editform.cleaned_data['newgroups'])) success &= repo_utils.set_groups(repo, groups, request=request) success &= repo_utils.set_gpgkeys(repo, editform.cleaned_data['gpgkeys'].splitlines(), request=request) success &= repo_utils.set_filters(repo, editform.cleaned_data['filters'], request=request) if success: messages.success(request, "Updated repository %s" % repo['id']) else: messages.warning(request, "Errors encountered while updating repository " "%s" % repo['id']) repo = repo_utils.reload_repo(repo['id'])
def demote_ok(request, pid=None): pset = PackageSet.objects.get(pk=pid) form = DemoteOkayForm(request.POST or None, pset=pset) repos = PackageSetRepo.objects.filter(packageset=pset.pk) if request.method == 'POST': repoapi = RepositoryAPI() packages = [cPickle.loads(str(p.pkgobj)) for p in PackageSetPackage.objects.filter(packageset=pset.pk)] success = True for repo in repos: logger.info("Deleting %s from repo %s" % ([repo_utils.get_nevra(p) for p in packages], repo.repoid)) for package in packages: try: if not repoapi.remove_package(repo.repoid, pkgobj=[package]): success = False messages.warning(request, "Failed to remove package %s from %s" % (repo_utils.get_nevra(package), repo.name)) except ServerRequestError, err: success = False messages.error(request, "Failed to remove package %s from %s: %s" % (repo_utils.get_nevra(package), repo.name, err[1])) if success: messages.success(request, "Successfully removed %s from %s" % (", ".join([repo_utils.get_nevra(p) for p in packages]), ", ".join([r.name for r in repos]))) pset.delete() if len(repos) == 1: nexturl = reverse("sponge.views.repos.view", kwargs=dict(repo_id=repos[0].repoid)) else: nexturl = reverse("sponge.views.repos.list") return HttpResponseRedirect(nexturl)
def promote_ok(request, pid=None): pset = PackageSet.objects.get(pk=pid) repos = PackageSetRepo.objects.filter(packageset=pset.pk) form = PromoteOkayForm(request.POST or None, pset=pset) if request.POST: repoapi = RepositoryAPI() packages = \ [cPickle.loads(str(p.pkgobj)) for p in PackageSetPackage.objects.filter(packageset=pset.pk)] success = True logger.info("Promoting %s to repo(s) %s" % ([p['id'] for p in packages], [r.repoid for r in repos])) for repo in repos: try: errors = repoapi.add_package(repo.repoid, [p['id'] for p in packages]) for error in errors: if error[4]: success = False messages.warning(request, "Failed to add package %s to %s: %s" % (error[2], repo.repoid, error[4])) except ServerRequestError, err: success = False messages.error(request, "Failed to add packages to %s (%s): %s" % (repo.repoid, ", ".join([repo_utils.get_nevra(p) for p in packages]), err[1])) if success: messages.success(request, "Successfully added packages to repo(s) %s: %s" % (",".join([r.name for r in repos]), ", ".join([repo_utils.get_nevra(p) for p in packages]))) pset.delete() if len(repos) == 1: nexturl = reverse("sponge.views.repos.view", kwargs=dict(repo_id=repos[0].repoid)) else: nexturl = reverse("sponge.views.repos.list") return HttpResponseRedirect(nexturl)
def __init__(self, basepath, xsource, setup): Source.__init__(self, basepath, xsource, setup) self.pulp_id = None if HAS_PULP and xsource.get("pulp_id"): self.pulp_id = xsource.get("pulp_id") _setup_pulp(self.setup) repoapi = RepositoryAPI() try: self.repo = repoapi.repository(self.pulp_id) self.gpgkeys = [ os.path.join(PULPCONFIG.cds['keyurl'], key) for key in repoapi.listkeys(self.pulp_id) ] except server.ServerRequestError: err = sys.exc_info()[1] if err[0] == 401: msg = "Packages: Error authenticating to Pulp: %s" % err[1] elif err[0] == 404: msg = "Packages: Pulp repo id %s not found: %s" % \ (self.pulp_id, err[1]) else: msg = "Packages: Error %d fetching pulp repo %s: %s" % \ (err[0], self.pulp_id, err[1]) raise SourceInitError(msg) except socket.error: err = sys.exc_info()[1] raise SourceInitError("Could not contact Pulp server: %s" % err) except: err = sys.exc_info()[1] raise SourceInitError( "Unknown error querying Pulp server: %s" % err) self.rawurl = "%s/%s" % (PULPCONFIG.cds['baseurl'], self.repo['relative_path']) self.arches = [self.repo['arch']] self.packages = dict() self.deps = dict([('global', dict())]) self.provides = dict([('global', dict())]) self.filemap = dict([(x, dict()) for x in ['global'] + self.arches]) self.needed_paths = set() self.file_to_arch = dict()
def sync_foreground(repo_id): taskapi = TaskAPI() repoapi = RepositoryAPI() running = repoapi.running_task(repoapi.sync_list(repo_id)) if running is not None: raise Exception("Sync for repository %s already in progress" % repo_id) task = repoapi.sync(repo_id) while not task_end(task): time.sleep(1) task = taskapi.info(task["id"]) if not task_succeeded(task): if task["exception"] and task["traceback"]: raise Exception(task["traceback"][-1]) elif task["exception"]: raise Exception("Unknown sync error: %s" % task["exception"]) else: raise Exception("Unknown sync error") return task
def promote_select(request, repo_id=None): repo = repo_utils.get_repo(repo_id) form = PromotePackageSelectionForm(request.POST or None, repo=repo) if request.method == 'POST' and form.is_valid(): repoapi = RepositoryAPI() pset = PackageSet.objects.create(stype="promote") pset.save() prepo = PackageSetRepo.objects.create(packageset=pset, repoid=repo['id'], name=repo['name']) prepo.save() packages = [] for pkg in repoapi.packages(repo['parent']['id']): if pkg['id'] in form.cleaned_data['packages']: packages.append(pkg) pspkg = \ PackageSetPackage.objects.create(packageset=pset, packageid=pkg['id'], pkgobj=cPickle.dumps(pkg)) pspkg.save() deps = repo_utils.resolve_deps(packages, [repo['parent']['id']], pkgfilter=repoapi.packages(repo['id'])) for pkg in deps: pspkg = PackageSetPackage.objects.create(packageset=pset, packageid=pkg['id'], pkgobj=cPickle.dumps(pkg)) pspkg.save() return HttpResponseRedirect(reverse('sponge.views.repos.promote_ok', kwargs=dict(pid=pset.pk))) packages = repo_utils.get_updates(repo) if packages: return dict(repo=repo, form=form) else: messages.info(request, "No packages available to be promoted from %s to %s" % (repo['parent']['name'], repo['name'])) return HttpResponseRedirect(reverse('sponge.views.repos.list'))
def run(self, repo_id): taskapi = TaskAPI() repoapi = RepositoryAPI() running = repoapi.running_task(repoapi.sync_list(repo_id)) if running is not None: raise TaskExecutionError("Metadata rebuild for repository %s " "already in progress" % repo_id) task = repoapi.sync(repo_id) while not task_end(task): time.sleep(1) task = taskapi.info(task['id']) if not task_succeeded(task): if task['exception'] and task['traceback']: raise TaskExecutionError(task['traceback'][-1]) elif task['exception']: raise TaskExecutionError("Unknown metadata rebuild error: %s" % task['exception']) else: raise TaskExecutionError("Unknown metadata rebuild error") return "Metadata rebuilt for %s" % repo_id
def demote_select(request, repo_id=None): repo = repo_utils.get_repo(repo_id) form = DemotePackageSelectionForm(request.POST or None, repo=repo) if request.method == 'POST' and form.is_valid(): pset = PackageSet.objects.create(stype="demote") pset.save() prepo = PackageSetRepo.objects.create(packageset=pset, repoid=repo['id'], name=repo['name']) prepo.save() repoapi = RepositoryAPI() for pkg in repoapi.packages(repo['id']): if pkg['id'] in form.cleaned_data['packages']: pspkg = \ PackageSetPackage.objects.create(packageset=pset, packageid=pkg['id'], pkgobj=cPickle.dumps(pkg)) pspkg.save() return HttpResponseRedirect(reverse('sponge.views.repos.demote_ok', kwargs=dict(pid=pset.pk))) else: packages = repo_utils.get_updates(repo) return dict(repo=repo, form=form)
def __init__(self, basepath, xsource): Source.__init__(self, basepath, xsource) self.pulp_id = None if HAS_PULP and xsource.get("pulp_id"): self.pulp_id = xsource.get("pulp_id") _setup_pulp() repoapi = RepositoryAPI() try: self.repo = repoapi.repository(self.pulp_id) self.gpgkeys = [os.path.join(PULPCONFIG.cds["keyurl"], key) for key in repoapi.listkeys(self.pulp_id)] except server.ServerRequestError: err = sys.exc_info()[1] if err[0] == 401: msg = "Packages: Error authenticating to Pulp: %s" % err[1] elif err[0] == 404: msg = "Packages: Pulp repo id %s not found: %s" % (self.pulp_id, err[1]) else: msg = "Packages: Error %d fetching pulp repo %s: %s" % (err[0], self.pulp_id, err[1]) raise SourceInitError(msg) except socket.error: err = sys.exc_info()[1] raise SourceInitError("Could not contact Pulp server: %s" % err) except: err = sys.exc_info()[1] raise SourceInitError("Unknown error querying Pulp server: %s" % err) self.rawurl = "%s/%s" % (PULPCONFIG.cds["baseurl"], self.repo["relative_path"]) self.arches = [self.repo["arch"]] self.packages = dict() self.deps = dict([("global", dict())]) self.provides = dict([("global", dict())]) self.filemap = dict([(x, dict()) for x in ["global"] + self.arches]) self.needed_paths = set() self.file_to_arch = dict() self.yumgroups = dict()
def rebalance_sync_schedule(errors=None): repoapi = RepositoryAPI() repos = get_repos() # get a list of sync frequencies syncgroups = dict() # dict of sync time -> [groups] default = None for ckey, sync in config.list(filter=dict(name__startswith="sync_frequency_")).items(): group = ckey.replace("sync_frequency_", "") if sync is None: logger.error("Sync frequency for %s is None, skipping" % group) continue synctime = 60 * 60 * int(sync) if "group" == "default": default = synctime else: try: syncgroups[synctime].append(group) except KeyError: syncgroups[synctime] = [group] # divide the repos up by sync time and sort them by inheritance, # reversed, to ensure that children get synced before parents and # a package doesn't just go straight to the final child cycles = dict() # dict of repo -> sync time for repo in repos.values(): cycles[repo["id"]] = default for synctime, groups in syncgroups.items(): if set(groups) & set(repo["groupid"]) and (cycles[repo["id"]] is None or synctime > cycles[repo["id"]]): cycles[repo["id"]] = synctime # finally, build a dict of sync time -> [repos] syncs = dict() for repoid, synctime in cycles.items(): if synctime is None: continue try: syncs[synctime].append(repos[repoid]) except KeyError: syncs[synctime] = [repos[repoid]] for synctime, syncrepos in syncs.items(): syncrepos = sort_repos_by_ancestry(syncrepos) syncrepos.reverse() # we count the total number of packages in all repos, and # divide them evenly amongst the timespan allotted. It's # worth noting that we count clones just the same as we count # "regular" repos, because it's createrepo, not the sync, that # really takes a lot of time and memory. pkgs = 0 for repo in syncrepos: if repo["package_count"] < 10: # we still have to run createrepo even if there are # very few (or no!) packages, so count very small # repos as 10 packages pkgs += 10 else: pkgs += repo["package_count"] try: pkgtime = float(synctime) / pkgs except ZeroDivisionError: pkgtime = 1 logger.debug("Allowing %s seconds per package" % pkgtime) # find tomorrow morning at 12:00 am tomorrow = datetime.datetime.today() + datetime.timedelta(days=1) start = datetime.datetime(tomorrow.year, tomorrow.month, tomorrow.day) if errors is None: errors = [] for repo in syncrepos: iso8601_start = format_iso8601_datetime(start) iso8601_interval = format_iso8601_interval(datetime.timedelta(seconds=synctime)) logger.debug("Scheduling %s to start at %s, sync every %s" % (repo["id"], iso8601_start, iso8601_interval)) schedule = parse_interval_schedule(iso8601_interval, iso8601_start, None) try: repoapi.change_sync_schedule(repo["id"], dict(schedule=schedule, options=dict())) reload_repo(repo["id"]) except ServerRequestError, err: errors.append("Could not set schedule for %s: %s" % (repo["id"], err[1])) start += datetime.timedelta(seconds=int(pkgtime * repo["package_count"]))
def get_packages(self, repo): repoapi = RepositoryAPI() return repoapi.packages(repo['id'])