def runCommand(self, thisCommand, cfg, argSet, args, debugAll=False): client = conaryclient.ConaryClient(cfg) repos = client.getRepos() callback = commit.CheckinCallback(cfg) if not cfg.buildLabel and cfg.installLabelPath: cfg.buildLabel = cfg.installLabelPath[0] sys.excepthook = util.genExcepthook(debug=cfg.debugExceptions, debugCtrlC=debugAll) if cfg.installLabelPath: cfg.installLabel = cfg.installLabelPath[0] else: cfg.installLabel = None cfg.initializeFlavors() log.setMinVerbosity(log.INFO) log.resetErrorOccurred() # set the build flavor here, just to set architecture information # which is used when initializing a recipe class use.setBuildFlagsFromFlavor(None, cfg.buildFlavor, error=False) profile = False if argSet.has_key('lsprof'): import cProfile prof = cProfile.Profile() prof.enable() profile = 'lsprof' del argSet['lsprof'] keyCache = openpgpkey.getKeyCache() keyCache.setPublicPath(cfg.pubRing) repos = conaryclient.ConaryClient(cfg).getRepos() keyCacheCallback = openpgpkey.KeyCacheCallback(repos, cfg) keyCache.setCallback(keyCacheCallback) try: rv = options.MainHandler.runCommand(self, thisCommand, cfg, argSet, args, callback=callback, repos=client.getRepos(), profile=profile) finally: if profile == 'lsprof': prof.disable() prof.dump_stats('cvc.lsprof') prof.print_stats() elif profile: prof.stop() if log.errorOccurred(): sys.exit(2) return rv
def testLogMinVerbosity(self): log.resetErrorOccurred() log.setVerbosity(log.ERROR) self.logCheck(log.warning, ("a warning",), []) assert(not log.setMinVerbosity(log.ERROR)) self.logCheck(log.warning, ("a warning",), []) assert(log.setMinVerbosity(log.WARNING) == log.ERROR) self.logCheck(log.warning, ("a warning",), ["warning: a warning"]) assert(not log.setMinVerbosity(log.ERROR)) self.logCheck(log.warning, ("a warning",), ["warning: a warning"])
def testLogMinVerbosity(self): log.resetErrorOccurred() log.setVerbosity(log.ERROR) self.logCheck(log.warning, ("a warning", ), []) assert (not log.setMinVerbosity(log.ERROR)) self.logCheck(log.warning, ("a warning", ), []) assert (log.setMinVerbosity(log.WARNING) == log.ERROR) self.logCheck(log.warning, ("a warning", ), ["warning: a warning"]) assert (not log.setMinVerbosity(log.ERROR)) self.logCheck(log.warning, ("a warning", ), ["warning: a warning"])
def resolve(self, resolveJob): """ Find the set of troves that must be installed for the set of buildreqs associated with this trove. Searches for build req and runtime req solutions in the following order: 1. Search the group, w/o consideration of order 2. Search the label of the trove we're building, followed by the reset of the labelPath 2. Search the label path. """ log.setMinVerbosity(log.DEBUG) trv = resolveJob.getTrove() cfg = resolveJob.getConfig() client = conaryclient.ConaryClient(cfg) if not self.repos: self.repos = client.repos else: client.repos = self.repos if cfg.resolveTrovesOnly: installLabelPath = None searchFlavor = cfg.flavor else: installLabelPath = cfg.installLabelPath searchFlavor = cfg.flavor searchSource, resolveSource = self.getSources(resolveJob) self.logger.debug('attempting to resolve buildreqs for %s=%s[%s]' % resolveJob.getTrove().getNameVersionFlavor()) resolveResult = ResolveResult(inCycle=resolveJob.inCycle) buildReqs = trv.getBuildRequirementSpecs() crossReqs = trv.getCrossRequirementSpecs() if not (buildReqs or crossReqs): resolveResult.troveResolved([], [], []) return resolveResult self.logger.debug(' finding buildreqs for %s....' % trv.getName()) self.logger.debug(' resolving deps for %s...' % trv.getName()) start = time.time() buildReqJobs = crossReqJobs = bootstrapJobs = set() if buildReqs: success, results = self._resolve(cfg, resolveResult, trv, searchSource, resolveSource, installLabelPath, searchFlavor, buildReqs) if success: buildReqJobs = results else: client.close() searchSource.close() resolveSource.close() return resolveResult if crossReqs: searchSource, resolveSource = self.getSources(resolveJob, cross=True) searchFlavor = resolveSource.flavor success, results = self._resolve(cfg, resolveResult, trv, searchSource, resolveSource, installLabelPath, searchFlavor, crossReqs, isCross=True) if success: crossReqJobs = results else: client.close() searchSource.close() resolveSource.close() return resolveResult if cfg.bootstrapTroves: success, results = self._resolve(cfg, resolveResult, trv, searchSource, resolveSource, installLabelPath, searchFlavor, cfg.bootstrapTroves) if success: bootstrapJobs = results else: client.close() searchSource.close() resolveSource.close() return resolveResult if (searchSource.mainSource and False not in searchSource.mainSource.hasTroves( [(x[0], x[2][0], x[2][1]) for x in bootstrapJobs | buildReqJobs | crossReqJobs])): # All troves came from resolveTroves therefore the result is # cacheable. resolveResult.jobHash = resolveJob.getJobHash() client.close() searchSource.close() resolveSource.close() self.logger.debug(' took %s seconds' % (time.time() - start)) self.logger.info(' Resolved troves:') if crossReqJobs: self.logger.info(' Cross Requirements:') self.logger.info('\n '.join([ '%s=%s[%s]' % (x[0], x[2][0], x[2][1]) for x in sorted(crossReqJobs) ])) if bootstrapJobs: self.logger.info(' Chroot Init Requirements:') self.logger.info('\n '.join([ '%s=%s[%s]' % (x[0], x[2][0], x[2][1]) for x in sorted(bootstrapJobs) ])) if buildReqJobs: self.logger.info(' Build Requirements:') self.logger.info('\n '.join([ '%s=%s[%s]' % (x[0], x[2][0], x[2][1]) for x in sorted(buildReqJobs) ])) if resolveResult.jobHash: self.logger.info("Resolve result can be cached, hash key is %s", resolveResult.jobHash) resolveResult.troveResolved(buildReqJobs, crossReqJobs, bootstrapJobs) return resolveResult
def resolve(self, resolveJob): """ Find the set of troves that must be installed for the set of buildreqs associated with this trove. Searches for build req and runtime req solutions in the following order: 1. Search the group, w/o consideration of order 2. Search the label of the trove we're building, followed by the reset of the labelPath 2. Search the label path. """ log.setMinVerbosity(log.DEBUG) trv = resolveJob.getTrove() cfg = resolveJob.getConfig() client = conaryclient.ConaryClient(cfg) if not self.repos: self.repos = client.repos else: client.repos = self.repos if cfg.resolveTrovesOnly: installLabelPath = None searchFlavor = cfg.flavor else: installLabelPath = cfg.installLabelPath searchFlavor = cfg.flavor searchSource, resolveSource = self.getSources(resolveJob) self.logger.debug('attempting to resolve buildreqs for %s=%s[%s]' % resolveJob.getTrove().getNameVersionFlavor()) resolveResult = ResolveResult(inCycle=resolveJob.inCycle) buildReqs = trv.getBuildRequirementSpecs() crossReqs = trv.getCrossRequirementSpecs() if not (buildReqs or crossReqs): resolveResult.troveResolved([], [], []) return resolveResult self.logger.debug(' finding buildreqs for %s....' % trv.getName()) self.logger.debug(' resolving deps for %s...' % trv.getName()) start = time.time() buildReqJobs = crossReqJobs = bootstrapJobs = set() if buildReqs: success, results = self._resolve(cfg, resolveResult, trv, searchSource, resolveSource, installLabelPath, searchFlavor, buildReqs) if success: buildReqJobs = results else: client.close() searchSource.close() resolveSource.close() return resolveResult if crossReqs: searchSource, resolveSource = self.getSources(resolveJob, cross=True) searchFlavor = resolveSource.flavor success, results = self._resolve(cfg, resolveResult, trv, searchSource, resolveSource, installLabelPath, searchFlavor, crossReqs, isCross=True) if success: crossReqJobs = results else: client.close() searchSource.close() resolveSource.close() return resolveResult if cfg.bootstrapTroves: success, results = self._resolve(cfg, resolveResult, trv, searchSource, resolveSource, installLabelPath, searchFlavor, cfg.bootstrapTroves) if success: bootstrapJobs = results else: client.close() searchSource.close() resolveSource.close() return resolveResult if (searchSource.mainSource and False not in searchSource.mainSource.hasTroves( [(x[0], x[2][0], x[2][1]) for x in bootstrapJobs | buildReqJobs | crossReqJobs])): # All troves came from resolveTroves therefore the result is # cacheable. resolveResult.jobHash = resolveJob.getJobHash() client.close() searchSource.close() resolveSource.close() self.logger.debug(' took %s seconds' % (time.time() - start)) self.logger.info(' Resolved troves:') if crossReqJobs: self.logger.info(' Cross Requirements:') self.logger.info('\n '.join(['%s=%s[%s]' % (x[0], x[2][0], x[2][1]) for x in sorted(crossReqJobs)])) if bootstrapJobs: self.logger.info(' Chroot Init Requirements:') self.logger.info('\n '.join(['%s=%s[%s]' % (x[0], x[2][0], x[2][1]) for x in sorted(bootstrapJobs)])) if buildReqJobs: self.logger.info(' Build Requirements:') self.logger.info('\n '.join(['%s=%s[%s]' % (x[0], x[2][0], x[2][1]) for x in sorted(buildReqJobs)])) if resolveResult.jobHash: self.logger.info("Resolve result can be cached, hash key is %s", resolveResult.jobHash) resolveResult.troveResolved(buildReqJobs, crossReqJobs, bootstrapJobs) return resolveResult
class RmakeMain(options.MainHandler): name = 'rmake' version = constants.version abstractCommand = command.rMakeCommand configClass = buildcfg.BuildConfiguration useConaryOptions = False commandList = command._commands def usage(self, rc=1, showAll=False): print 'rmake: front end to rMake build tool' if not showAll: print print 'Common Commands (use "rmake help" for the full list)' return options.MainHandler.usage(self, rc, showAll=showAll) def initializePlugins(self, argv): p = plugins.getPluginManager(argv, buildcfg.BuildConfiguration) p.callClientHook('client_preInit', self, argv) return p def getConfigFile(self, argv): pluginManager = self.initializePlugins(argv) if '--skip-default-config' in argv: argv.remove('--skip-default-config') read = False else: read = True buildConfig = buildcfg.BuildConfiguration(readConfigFiles=read) conaryConfig = conarycfg.ConaryConfiguration(readConfigFiles=read) return buildConfig, conaryConfig, pluginManager def runCommand(self, thisCommand, (buildConfig, conaryConfig, pluginManager), argSet, args): pluginManager.callClientHook('client_preCommand', self, thisCommand, (buildConfig, conaryConfig), argSet, args) compat.checkRequiredVersions() thisCommand.verbose = (log.getVerbosity() <= log.INFO) if args[1] != 'help': # NOTE: the help system assumes that the base level of output # you want is "warning", but rmake is more verbose than that. # Due to limitations in how configurable the help system is, # I can't easily fix that. Someday I should though. For now, # if we're running help, we make log.WARNING the default level, # and otherwise log.INFO is the default. log.setMinVerbosity(log.INFO) # don't let the buildFlavor be overridden yet client = helper.rMakeHelper(buildConfig=buildConfig, promptPassword=True) pluginManager.callClientHook('client_preCommand2', self, client, thisCommand) try: return options.MainHandler.runCommand(self, thisCommand, client, buildConfig, argSet, args) except errors.BadParameters: if not thisCommand.verbose: log.setVerbosity(log.WARNING) thisCommand.usage() raise