def find_job(self, uuid): for job in self.jobs: if job.uuid == uuid: return job return None def is_alive(self): return time.time() - self.last_seen < self.node_timeout def has_slots(self): return len(self.jobs) < self.slots def get_score(self): return len(self.jobs) / float(self.slots) register(ImageNode) def main(args): parser = optparse.OptionParser() parser.add_option('-c', '--config-file', default=config.CONFIG_PATH) parser.add_option('-n', '--no-daemon', action='store_true') parser.add_option('-p', '--pid-file', default='/var/run/mcp-dispatcher.pid') options, args = parser.parse_args(args) cfg = config.MCPConfig() if (options.config_file != config.CONFIG_PATH or os.path.exists(config.CONFIG_PATH)): cfg.read(options.config_file) setupLogging(cfg.logLevel, toFile=cfg.logPath, toStderr=options.no_daemon)
If you create an error in rMake, it should derive from this class, and have a str() that is acceptable output for the command line, with an "error: " prompt before it. Any relevant data for this error should be stored outside of the string so it can be accessed from non-command-line interfaces. """ @classmethod def __thaw__(class_, data): return class_(*data) def __freeze__(self): return self.args apiutils.register(RmakeError) class BadParameters(RmakeError): """ Raised when a command is given bad parameters at the command line. """ pass class JobNotFound(RmakeError): def __str__(self): return "JobNotFound: Could not find job with jobId %s" % self.args[0] @classmethod def __thaw__(class_, data):
return d @classmethod def __thaw__(class_, d): self = class_() self.__dict__.update(d) self.buildReqs = thaw('installJobList', self.buildReqs) self.crossReqs = thaw('installJobList', self.crossReqs) self.bootstrapReqs = thaw('installJobList', self.bootstrapReqs) self.missingDeps = thaw('dependencyMissingList', self.missingDeps) self.missingBuildReqs = [(x[0], thaw('troveSpec', x[1])) for x in self.missingBuildReqs] return self register(ResolveResult) class DependencyResolver(object): """ Resolves dependencies for one trove. """ def __init__(self, logger, repos=None): self.logger = logger self.repos = repos def getSources(self, resolveJob, cross=False): cfg = resolveJob.getConfig() if cross: buildFlavor = deps.overrideFlavor(
d = dict(trove=freeze('BuildTrove', self.trove), buildCfg=freeze('BuildConfiguration', self.buildCfg), builtTroves=freeze('troveTupleList', self.builtTroves), crossTroves=freeze('troveTupleList', self.crossTroves), inCycle=self.inCycle) return d @classmethod def __thaw__(class_, d): self = class_(**d) self.trove = thaw('BuildTrove', self.trove) self.buildCfg = thaw('BuildConfiguration', self.buildCfg) self.builtTroves = thaw('troveTupleList', self.builtTroves) self.crossTroves = thaw('troveTupleList', self.crossTroves) return self register(ResolveJob) class DependencyGraph(graph.DirectedGraph): # FIXME: remove with next release of conary def __contains__(self, trove): return trove in self.data.hashedData def generateDotFile(self, out, filterFn=None): def formatNode(node): name, version, flavor, context = node.getNameVersionFlavor(True) name = name.split(':')[0] versionStr = '%s' % (version.trailingRevision()) archFlavor = flavorutil.getArchFlags(flavor, withFlags=False) restFlavor = flavorutil.removeInstructionSetFlavor(flavor) archFlavor.union(restFlavor) if context:
@staticmethod def __thaw__(frzLoaded): d = {} stack = [(d, frzLoaded)] while stack: loadDict, frozenDict = stack.pop() for spec, (frzTroveTup, newFrzDict) in frozenDict.iteritems(): subLoadDict = {} loadDict[spec] = (thaw("troveTuple", frzTroveTup), subLoadDict) if newFrzDict: stack.append((subLoadDict, newFrzDict)) return d apiutils.register(LoadSpecs) class LoadSpecsList(object): @staticmethod def __freeze__(loadSpecsList): return [apiutils.freeze("LoadSpecs", x) for x in loadSpecsList] @staticmethod def __thaw__(frzLoadSpecsList): return [apiutils.thaw("LoadSpecs", x) for x in frzLoadSpecsList] apiutils.register(LoadSpecsList)
@classmethod def __freeze__(class_, eventList): apiVer, eventList = eventList newEventList = [] for ((event, subevent), data) in eventList: if not isinstance(data[0], int): data = [(data[0][0], freeze('troveContextTuple', data[0][1]))] + data[1:] fn = getattr(class_, 'freeze_' + event, None) if fn is not None: data = fn(apiVer, data) newEventList.append(((event, subevent), data)) return apiVer, newEventList @classmethod def __thaw__(class_, eventList): apiVer, eventList = eventList newEventList = [] for ((event, subevent), data) in eventList: if not isinstance(data[0], int): data = [(data[0][0], thaw('troveContextTuple', data[0][1]))] + data[1:] fn = getattr(class_, 'thaw_' + event, None) if fn is not None: data = fn(apiVer, data) newEventList.append(((event, subevent), data)) return apiVer, newEventList apiutils.register(_EventListFreezer)
d.update(missingDeps=freeze('dependencyMissingList', self.missingDeps)) return d @classmethod def __thaw__(class_, d): self = class_() self.__dict__.update(d) self.buildReqs = thaw('installJobList', self.buildReqs) self.crossReqs = thaw('installJobList', self.crossReqs) self.bootstrapReqs = thaw('installJobList', self.bootstrapReqs) self.missingDeps = thaw('dependencyMissingList', self.missingDeps) self.missingBuildReqs = [(x[0], thaw('troveSpec', x[1])) for x in self.missingBuildReqs] return self register(ResolveResult) class DependencyResolver(object): """ Resolves dependencies for one trove. """ def __init__(self, logger, repos=None): self.logger = logger self.repos = repos def getSources(self, resolveJob, cross=False): cfg = resolveJob.getConfig() if cross: buildFlavor = deps.overrideFlavor(resolveJob.buildCfg.buildFlavor,
RmakeError - superclass for all well-defined rMake errors. If you create an error in rMake, it should derive from this class, and have a str() that is acceptable output for the command line, with an "error: " prompt before it. Any relevant data for this error should be stored outside of the string so it can be accessed from non-command-line interfaces. """ @classmethod def __thaw__(class_, data): return class_(*data) def __freeze__(self): return self.args apiutils.register(RmakeError) class BadParameters(RmakeError): """ Raised when a command is given bad parameters at the command line. """ pass class JobNotFound(RmakeError): def __str__(self): return "JobNotFound: Could not find job with jobId %s" % self.args[0] @classmethod def __thaw__(class_, data): return class_(*data)
return new class StatusSubscriber(_AbstractStatusSubscriber, FreezableStatusSubscriberMixin): def parse(self, field, data): if field not in self.fields: getattr(self, 'parse_' + field)(data) else: self[field] = data def parse_apiVersion(self, data): self.apiVersion = data def parse_event(self, data): event = data.split(None) for event in data.split(): fields = event.split('+', 1) if len(fields) == 1: self.watchEvent(event) else: self.watchEvent(fields[0], fields[1].split(',')) def __deepcopy__(self, memo): s = self.__class__(self.subscriberId, self.uri) [s.parse(*x.split(None, 1)) for x in self.freezeData()[1:]] return s apiutils.register(apiutils.api_freezable(StatusSubscriber), name='Subscriber')
self._setState(JOB_STATE_BUILT, 'Commit failed: %s' % message) def jobCommitted(self, troveMap): self._setState(JOB_STATE_COMMITTED, '') publisher = self.getPublisher() publisher.jobCommitted(self, troveMap) def exceptionOccurred(self, err, tb): self.jobFailed(failure.InternalError(str(err), tb)) def _setState(self, state, status='', *args): self.state = state self.status = status self._publisher.jobStateUpdated(self, state, status, *args) apiutils.register(apiutils.api_freezable(BuildJob)) def NewBuildJob(db, troveTups, jobConfig=None, state=JOB_STATE_INIT, uuid=''): """ Create a new build job that is attached to the database - i.e. that will send notifications to the database when it is updated. Note this is the preferred way to create a BuildJob, since it gives the job a jobId. """ job = BuildJob(None, troveTups, state=state, uuid=uuid) if jobConfig: job.setMainConfig(jobConfig) db.addJob(job) return job
new.parse(field, val) return new class StatusSubscriber(_AbstractStatusSubscriber, FreezableStatusSubscriberMixin): def parse(self, field, data): if field not in self.fields: getattr(self, "parse_" + field)(data) else: self[field] = data def parse_apiVersion(self, data): self.apiVersion = data def parse_event(self, data): event = data.split(None) for event in data.split(): fields = event.split("+", 1) if len(fields) == 1: self.watchEvent(event) else: self.watchEvent(fields[0], fields[1].split(",")) def __deepcopy__(self, memo): s = self.__class__(self.subscriberId, self.uri) [s.parse(*x.split(None, 1)) for x in self.freezeData()[1:]] return s apiutils.register(apiutils.api_freezable(StatusSubscriber), name="Subscriber")
def jobCommitted(self, troveMap): self._setState(JOB_STATE_COMMITTED, '') publisher = self.getPublisher() publisher.jobCommitted(self, troveMap) def exceptionOccurred(self, err, tb): self.jobFailed(failure.InternalError(str(err), tb)) def _setState(self, state, status='', *args): self.state = state self.status = status self._publisher.jobStateUpdated(self, state, status, *args) apiutils.register(apiutils.api_freezable(BuildJob)) def NewBuildJob(db, troveTups, jobConfig=None, state=JOB_STATE_INIT, uuid=''): """ Create a new build job that is attached to the database - i.e. that will send notifications to the database when it is updated. Note this is the preferred way to create a BuildJob, since it gives the job a jobId. """ job = BuildJob(None, troveTups, state=state, uuid=uuid) if jobConfig: job.setMainConfig(jobConfig) db.addJob(job) return job
for job in self.jobs: if job.uuid == uuid: return job return None def is_alive(self): return time.time() - self.last_seen < self.node_timeout def has_slots(self): return len(self.jobs) < self.slots def get_score(self): return len(self.jobs) / float(self.slots) register(ImageNode) def main(args): parser = optparse.OptionParser() parser.add_option('-c', '--config-file', default=config.CONFIG_PATH) parser.add_option('-n', '--no-daemon', action='store_true') parser.add_option('-p', '--pid-file', default='/var/run/mcp-dispatcher.pid') options, args = parser.parse_args(args) cfg = config.MCPConfig() if (options.config_file != config.CONFIG_PATH or os.path.exists(config.CONFIG_PATH)): cfg.read(options.config_file)
# any macros separately (RMK-996) if 'macros' in self: macros = sorted(x for x in self.macros.iteritems()) if macros: return '\n'.join('%s = %r' % x for x in macros) + '\n' return '' def _writeKey(self, out, cfgItem, value, options): if cfgItem.name in self._cfg_hidden: if not options.get('displayHidden', False): return conarycfg.ConaryConfiguration._writeKey(self, out, cfgItem, self[cfgItem.name], options) apiutils.register(apiutils.api_freezable(BuildConfiguration), 'BuildConfiguration') class SanitizedBuildConfiguration(object): @staticmethod def __freeze__(cfg): cfg = apiutils.freeze('BuildConfiguration', cfg) cfg['user'] = [] cfg['entitlement'] = [] return cfg @staticmethod def __thaw__(cfg): return apiutils.thaw('BuildConfiguration', cfg) apiutils.register(SanitizedBuildConfiguration)
else: yield paramType.__thaw__(param) def _thawReturn(api, val, version): r = api.returnType[version] if r is not None: val = r.__thaw__(val) return val class ApiError(errors.RmakeError): pass apiutils.register(ApiError) class NoSuchMethodError(ApiError): def __init__(self, method): self.method = method ApiError.__init__(self, 'No such method: %s' % method) class CallData(object): __slots__ = [ 'auth', 'apiMajorVersion', 'apiMinorVersion', 'methodVersion', 'logger', 'method', 'responseHandler', 'debug', 'authMethod' ] def __init__(self,
user = '******' % (self.rmakeUser) url = '%s://%s%s:%s%s' % (type, user, host, port, path) return url def updateConfig(): if hasattr(buildcfg.RmakeBuildContext, 'extend'): # Conary >= 2.5 buildcfg.RmakeBuildContext.extend(BuildContext) buildcfg.BuildConfiguration.extend(BuildContext) else: # Conary < 2.5 buildcfg.RmakeBuildContext.rmakeUrl = BuildContext.rmakeUrl buildcfg.RmakeBuildContext.rmakeUser = BuildContext.rmakeUser buildcfg.RmakeBuildContext.clientCert = BuildContext.clientCert buildcfg.BuildConfiguration.getServerUri = getServerUri class SanitizedBuildConfiguration(buildcfg.SanitizedBuildConfiguration): @staticmethod def __freeze__(cfg): cfg = buildcfg.SanitizedBuildConfiguration.__freeze__(cfg) if 'rmakeUser' in cfg: del cfg['rmakeUser'] return cfg @staticmethod def __thaw__(cfg): return apiutils.thaw('BuildConfiguration', cfg) apiutils.register(SanitizedBuildConfiguration)
class Chroot(object): def __init__(self, host, path, jobId, troveTuple, active): self.host = host self.path = path if not jobId: jobId = 0 assert(path is not None) self.jobId = jobId self.troveTuple = troveTuple self.active = active def __freeze__(self): d = self.__dict__.copy() if self.troveTuple: d['troveTuple'] = freeze('troveTuple', self.troveTuple) else: d['troveTuple'] = '' return d @classmethod def __thaw__(class_, d): self = class_(**d) if self.troveTuple: self.troveTuple = thaw('troveTuple', self.troveTuple) else: self.troveTuple = None return self apiutils.register(Chroot)
if paramType is None: yield param elif isinstance(paramType, tuple): yield paramType[1](param) else: yield paramType.__thaw__(param) def _thawReturn(api, val, version): r = api.returnType[version] if r is not None: val = r.__thaw__(val) return val class ApiError(errors.RmakeError): pass apiutils.register(ApiError) class NoSuchMethodError(ApiError): def __init__(self, method): self.method = method ApiError.__init__(self, 'No such method: %s' % method) class CallData(object): __slots__ = ['auth', 'apiMajorVersion', 'apiMinorVersion', 'methodVersion', 'logger', 'method', 'responseHandler', 'debug', 'authMethod'] def __init__(self, auth, callData, logger, method, responseHandler, debug=False, authMethod=None): self.apiMajorVersion = callData['apiMajorVersion'] self.apiMinorVersion = callData['apiMinorVersion'] self.methodVersion = callData['methodVersion']
return url def updateConfig(): if hasattr(buildcfg.RmakeBuildContext, 'extend'): # Conary >= 2.5 buildcfg.RmakeBuildContext.extend(BuildContext) buildcfg.BuildConfiguration.extend(BuildContext) else: # Conary < 2.5 buildcfg.RmakeBuildContext.rmakeUrl = BuildContext.rmakeUrl buildcfg.RmakeBuildContext.rmakeUser = BuildContext.rmakeUser buildcfg.RmakeBuildContext.clientCert = BuildContext.clientCert buildcfg.BuildConfiguration.getServerUri = getServerUri class SanitizedBuildConfiguration(buildcfg.SanitizedBuildConfiguration): @staticmethod def __freeze__(cfg): cfg = buildcfg.SanitizedBuildConfiguration.__freeze__(cfg) if 'rmakeUser' in cfg: del cfg['rmakeUser'] return cfg @staticmethod def __thaw__(cfg): return apiutils.thaw('BuildConfiguration', cfg) apiutils.register(SanitizedBuildConfiguration)
class Node(object): def __init__(self, name, hostname, slots, flavors, active, chroots=[]): self.name = name self.hostname = hostname self.slots = slots self.flavors = flavors self.active = active self.chroots = chroots @staticmethod def __freeze__(self): return dict(hostname=self.hostname, flavors=[freeze('flavor', x) for x in self.flavors], name=self.name, active=self.active, slots=self.slots, chroots=[freeze('Chroot', x) for x in self.chroots]) @classmethod def __thaw__(class_, d): self = class_(**d) self.chroots = [thaw('Chroot', x) for x in self.chroots] self.flavors = [thaw('flavor', x) for x in self.flavors] return self def addChroot(self, chroot): self.chroots.append(chroot) apiutils.register(Node)
# from rmake.lib.apiutils import thaw, freeze from rmake.lib import apiutils class Node(object): def __init__(self, name, hostname, slots, flavors, active, chroots=[]): self.name = name self.hostname = hostname self.slots = slots self.flavors = flavors self.active = active self.chroots = chroots @staticmethod def __freeze__(self): return dict(hostname=self.hostname, flavors=[ freeze('flavor', x) for x in self.flavors ], name=self.name, active=self.active, slots=self.slots, chroots=[ freeze('Chroot', x) for x in self.chroots]) @classmethod def __thaw__(class_, d): self = class_(**d) self.chroots = [ thaw('Chroot', x) for x in self.chroots ] self.flavors = [ thaw('flavor', x) for x in self.flavors ] return self apiutils.register(Node)
# any macros separately (RMK-996) if 'macros' in self: macros = sorted(x for x in self.macros.iteritems()) if macros: return '\n'.join('%s = %r' % x for x in macros) + '\n' return '' def _writeKey(self, out, cfgItem, value, options): if cfgItem.name in self._cfg_hidden: if not options.get('displayHidden', False): return conarycfg.ConaryConfiguration._writeKey(self, out, cfgItem, self[cfgItem.name], options) apiutils.register(apiutils.api_freezable(BuildConfiguration), 'BuildConfiguration') class SanitizedBuildConfiguration(object): @staticmethod def __freeze__(cfg): cfg = apiutils.freeze('BuildConfiguration', cfg) cfg['user'] = [] cfg['entitlement'] = [] return cfg @staticmethod def __thaw__(cfg): return apiutils.thaw('BuildConfiguration', cfg)
@staticmethod def __thaw__(frzLoaded): d = {} stack = [(d, frzLoaded)] while stack: loadDict, frozenDict = stack.pop() for spec, (frzTroveTup, newFrzDict) in frozenDict.iteritems(): subLoadDict = {} loadDict[spec] = (thaw('troveTuple', frzTroveTup), subLoadDict) if newFrzDict: stack.append((subLoadDict, newFrzDict)) return d apiutils.register(LoadSpecs) class LoadSpecsList(object): @staticmethod def __freeze__(loadSpecsList): return [apiutils.freeze('LoadSpecs', x) for x in loadSpecsList] @staticmethod def __thaw__(frzLoadSpecsList): return [apiutils.thaw('LoadSpecs', x) for x in frzLoadSpecsList] apiutils.register(LoadSpecsList)
def __repr__(self): return '<ImageJob %s>' % (self.uuid,) def __freeze__(self): return dict(rbuilder_url=self.rbuilder_url, job_data=self.job_data, uuid=self.uuid) @classmethod def __thaw__(cls, d): return cls(**d) # Scheduler helpers def assign_uuid(self): self.uuid = os.urandom(16).encode('hex') register(ImageJob) class _ImageJobs(object): name = 'ImageJobs' @staticmethod def __freeze__(jobList): return [freeze('ImageJob', job) for job in jobList] @staticmethod def __thaw__(jobList): return [thaw('ImageJob', job) for job in jobList] register(_ImageJobs)