Пример #1
0
def getJobsToDisplay(dcfg, client, jobId=None, troveSpecs=None,
                     activeOnly=False, jobLimit=None):
    if troveSpecs:
        troveSpecs = ( cmdutil.parseTroveSpec(x) for x in troveSpecs )
        troveSpecs = [ (x[0].split(':')[0] + ':source', x[1], x[2], x[3]) for x in troveSpecs ]

    # Only retrieve the configuration if we have to show it
    withConfigs = dcfg.showConfig
    if not jobId:
        jobIds = client.client.listJobs(activeOnly=activeOnly,
                                        jobLimit=jobLimit)
        jobList = client.client.getJobs(jobIds,
                                        withTroves=dcfg.needTroves)
    else:
        jobList = [ client.client.getJob(jobId, withConfigs = withConfigs) ]

    newJobList = []
    if troveSpecs:
        for job in jobList:
            results = job.findTrovesWithContext(None, troveSpecs, None,
                                                allowMissing=True)
            allTups = list(itertools.chain(*results.itervalues()))
            if allTups:
                newJobList.append((job, allTups))
        return newJobList
    else:
        return [ (x, None) for x in jobList ]
Пример #2
0
def getJobsToDisplay(dcfg, client, jobId=None, troveSpecs=None,
                     activeOnly=False, jobLimit=None):
    if troveSpecs:
        troveSpecs = ( cmdutil.parseTroveSpec(x) for x in troveSpecs )
        troveSpecs = [ (x[0].split(':')[0] + ':source', x[1], x[2], x[3]) for x in troveSpecs ]

    # Only retrieve the configuration if we have to show it
    withConfigs = dcfg.showConfig
    if not jobId:
        jobIds = client.client.listJobs(activeOnly=activeOnly,
                                        jobLimit=jobLimit)
        jobList = client.client.getJobs(jobIds,
                                        withTroves=dcfg.needTroves)
    else:
        jobList = [ client.client.getJob(jobId, withConfigs = withConfigs) ]

    newJobList = []
    if troveSpecs:
        for job in jobList:
            results = job.findTrovesWithContext(None, troveSpecs, None,
                                                allowMissing=True)
            allTups = list(itertools.chain(*results.itervalues()))
            if allTups:
                newJobList.append((job, allTups))
        return newJobList
    else:
        return [ (x, None) for x in jobList ]
Пример #3
0
    def startChrootSession(self, jobId, troveSpec, command, 
                           superUser=False, chrootHost=None, chrootPath=None):
        job = self.client.getJob(jobId, withTroves=False)
        if not troveSpec:
            troveTups = list(job.iterTroveList(True))
            if len(troveTups) > 1:
                raise errors.RmakeError('job has more than one trove in it, must specify trovespec to chroot into')

        else:
            newTroveSpec = cmdutil.parseTroveSpec(troveSpec)
            newTroveSpec = (newTroveSpec[0].split(':')[0] + ':source',) + newTroveSpec[1:]
            troveTups = job.findTrovesWithContext(None, [newTroveSpec])[newTroveSpec]
            if len(troveTups) > 1:
                err = ['%s matches more than one trove:' % troveSpec]
                for troveTup in troveTups:
                    err.append('  %s=%s[%s]{%s}' % troveTup)
                raise errors.RmakeError('\n'.join(err))
        troveTup = troveTups[0]
        chrootConnection = self.client.connectToChroot(jobId, troveTup,
                                                       command,
                                                       superUser=superUser,
                                                       chrootHost=chrootHost, 
                                                       chrootPath=chrootPath)
        chrootConnection.interact()
Пример #4
0
    def startChrootSession(self, jobId, troveSpec, command, 
                           superUser=False, chrootHost=None, chrootPath=None):
        job = self.client.getJob(jobId, withTroves=False)
        if not troveSpec:
            troveTups = list(job.iterTroveList(True))
            if len(troveTups) > 1:
                raise errors.RmakeError('job has more than one trove in it, must specify trovespec to chroot into')

        else:
            newTroveSpec = cmdutil.parseTroveSpec(troveSpec)
            newTroveSpec = (newTroveSpec[0].split(':')[0] + ':source',) + newTroveSpec[1:]
            troveTups = job.findTrovesWithContext(None, [newTroveSpec])[newTroveSpec]
            if len(troveTups) > 1:
                err = ['%s matches more than one trove:' % troveSpec]
                for troveTup in troveTups:
                    err.append('  %s=%s[%s]{%s}' % troveTup)
                raise errors.RmakeError('\n'.join(err))
        troveTup = troveTups[0]
        chrootConnection = self.client.connectToChroot(jobId, troveTup,
                                                       command,
                                                       superUser=superUser,
                                                       chrootHost=chrootHost, 
                                                       chrootPath=chrootPath)
        chrootConnection.interact()
Пример #5
0
def getBuildJob(buildConfig, conaryclient, troveSpecList,
                message=None, recurseGroups=BUILD_RECURSE_GROUPS_NONE, 
                configDict=None, oldTroveDict=None, updateSpecs=None,
                rebuild=False):
    trovesByContext = {}

    for troveSpec in list(troveSpecList):
        if not isinstance(troveSpec, tuple):
            troveSpec = cmdutil.parseTroveSpec(troveSpec)

        if len(troveSpec) == 3:
            context = ''
        else:
            context = troveSpec[3]
            troveSpec = troveSpec[:3]

        if troveSpec[2] is None:
            troveSpec = (troveSpec[0], troveSpec[1], deps.parseFlavor(''))
        trovesByContext.setdefault(context, []).append(troveSpec)

    job = buildjob.BuildJob()

    # don't store all the contexts with this job - they're useless past the
    # initialization step.
    if configDict:
        mainConfig = configDict['']
        job.setMainConfig(configDict[''])
    else:
        cfg = copy.deepcopy(buildConfig)
        cfg.dropContexts()
        mainConfig = cfg
    mainConfig.recurseGroups = int(recurseGroups)
    job.setMainConfig(mainConfig)

    baseMatchRules = mainConfig.matchTroveRule
    for contextStr, troveSpecList in trovesByContext.iteritems():
        contextBaseMatchRules = baseMatchRules
        if configDict and contextStr in configDict:
            cfg = configDict[contextStr]
        elif contextStr:
            # making this a copy is critical
            cfg = copy.deepcopy(buildConfig)
            for context in contextStr.split(','):
                cfg.setContext(context)
            cfg.dropContexts()
        else:
            # don't bother with baseMatchRules in the base config.
            contextBaseMatchRules = []
            cfg = copy.deepcopy(buildConfig)
            cfg.dropContexts()
            contextStr = ''
            job.setMainConfig(cfg)
        cfg.initializeFlavors()
        use.setBuildFlagsFromFlavor(None, cfg.buildFlavor, error=False)
        if not cfg.buildLabel and cfg.installLabelPath:
            cfg.buildLabel = cfg.installLabelPath[0]
        troveSpecList = list(set(troveSpecList))
        troveList = getTrovesToBuild(cfg, conaryclient, troveSpecList,
                         message=None,
                         recurseGroups=recurseGroups,
                         matchSpecs=contextBaseMatchRules + cfg.matchTroveRule,
                         reposName=mainConfig.reposName,
                         updateSpecs=updateSpecs)
        if updateSpecs and oldTroveDict and contextStr in oldTroveDict:
            troveList = _matchUpdateRestrictions(mainConfig.reposName,
                                                 oldTroveDict[contextStr],
                                                 troveList,
                                                 updateSpecs)
        if rebuild:
            prebuiltBinaries = _findLatestBinariesForTroves(conaryclient,
                                                        mainConfig.reposName,
                                                        troveList)
            if not job.getMainConfig().prebuiltBinaries:
                job.getMainConfig().prebuiltBinaries = prebuiltBinaries
            else:
                job.getMainConfig().prebuiltBinaries.extend(prebuiltBinaries)
        if mainConfig.prepOnly:
            buildType = buildtrove.TROVE_BUILD_TYPE_PREP
        else:
            buildType = buildtrove.TROVE_BUILD_TYPE_NORMAL

        for name, version, flavor in troveList:
            if flavor is None:
                flavor = deps.parseFlavor('')
            bt = buildtrove.BuildTrove(None, name, version, flavor,
                                       context=contextStr,
                                       buildType=buildType)
            job.addTrove(name, version, flavor, contextStr, bt)
            job.setTroveConfig(bt, cfg)
    return job
Пример #6
0
def _matchUpdateRestrictions(reposName, oldTroveList,
                             newTroveList, updateSpecs, 
                             binaries=False):
    troveMap = {}
    for troveTup in itertools.chain(oldTroveList, newTroveList):
        if binaries:
            key = (troveTup[0].split(':')[0], troveTup[1], troveTup[2])
        else: 
            key = (troveTup[0].split(':')[0] + ':source', 
                   troveTup[1], troveTup[2])
        troveMap.setdefault(key, []).append(troveTup)

    updateDict = {}
    newUpdateSpecs = []
    if not updateSpecs:
        return newTroveList
    firstMatch = True
    for troveSpec in updateSpecs:
        if not isinstance(troveSpec, tuple):
            troveSpec = cmdutil.parseTroveSpec(troveSpec)

        if binaries:
            troveSpec = (troveSpec[0].split(':')[0], troveSpec[1], troveSpec[2])
        else:
            troveSpec = (troveSpec[0].split(':')[0] + ':source', 
                         troveSpec[1], troveSpec[2])
        if troveSpec[0] and troveSpec[0][0] == '-':
            sense = False
            troveSpec = (troveSpec[0][1:], troveSpec[1], troveSpec[2])
        else:
            sense = True

        name = troveSpec[0]
        if not name:
            filterFn = lambda x: True
        else:
            filterFn = lambda x: fnmatch.fnmatchcase(x[0], name)

        # add all packages that match glob (could be empty in which case
        # all packages are added.
        specs = set([(x[0], troveSpec[1], troveSpec[2]) for x in troveMap
                      if filterFn(x)])
        if not specs:
            newUpdateSpecs.append(troveSpec)
            updateDict[troveSpec] = sense
        updateDict.update(dict.fromkeys(specs, sense))
        for spec in specs:
            if spec in newUpdateSpecs:
                newUpdateSpecs.remove(spec)
        newUpdateSpecs.extend(specs)

    allNewNames = set([ x[0] for x in newTroveList ])
    allOldNames = set([ x[0] for x in oldTroveList ])
    oldTroveList = [ x for x in oldTroveList if x[0] in allNewNames ]

    oldTroves = trovesource.SimpleTroveSource(oldTroveList)
    oldTroves = recipeutil.RemoveHostSource(oldTroves, reposName)
    newTroves = trovesource.SimpleTroveSource(newTroveList)
    newTroves = recipeutil.RemoveHostSource(newTroves, reposName)

    toUse = set()
    firstMatch = True
    for updateSpec in newUpdateSpecs:
        positiveMatch = updateDict[updateSpec]
        oldResults = oldTroves.findTroves(None, [updateSpec], None,
                                          allowMissing=True).get(updateSpec, [])
        newResults = newTroves.findTroves(None, [updateSpec], None,
                                          allowMissing=True).get(updateSpec, [])
        oldNames = set(x[0] for x in oldResults)
        newNames = set(x[0] for x in newResults)
        if positiveMatch:
            if firstMatch:
                # if the user starts with --update info-foo then they want to
                # by default not update anything not mentioned
                toUse = set(oldTroveList)
                toUse.update(x for x in newTroveList 
                             if x[0] not in allOldNames)
                firstMatch = False
            # don't discard any packages for which we don't have
            toKeep = [ x for x in toUse if x[0] not in newNames ]
            toUse.difference_update(oldResults)
            toUse.update(newResults)
            toUse.update(toKeep)
        else:
            if firstMatch:
                # if the user starts with --update -info-foo then they want to
                # update everything _except_ info-foo
                toUse = set(newTroveList)
                firstMatch = False
            toKeep = [ x for x in toUse if x[0] not in oldNames ]
            toUse.difference_update(newResults)
            toUse.update(oldResults)
            toUse.update(toKeep)
    return list(toUse)
Пример #7
0
def getTrovesToBuild(cfg, conaryclient, troveSpecList, message=None, 
                     recurseGroups=BUILD_RECURSE_GROUPS_NONE, matchSpecs=None, 
                     reposName=None, updateSpecs=None):
    toBuild = []
    toFind = {}
    groupsToFind = []
    if not matchSpecs:
        matchSpecs = []
    if reposName is None:
        reposName = cfg.reposName


    repos = conaryclient.getRepos()
    cfg.resolveTroveTups = _getResolveTroveTups(cfg, repos)
    cfg.recurseGroups = int(recurseGroups)

    cfg.buildTroveSpecs = []
    newTroveSpecs = []
    recipesToCook = []
    for troveSpec in list(troveSpecList):
        if not isinstance(troveSpec, tuple):
            troveSpec = cmdutil.parseTroveSpec(troveSpec)
        if len(troveSpec) == 3:
            context = ''
        else:
            context = troveSpec[3]
            troveSpec = troveSpec[:3]

        if (troveSpec[0].startswith('group-') and not recurseGroups
            and not compat.ConaryVersion().supportsCloneNonRecursive()):
            log.warning('You will not be able to commit this group build'
                        ' without upgrading conary.')
        if troveSpec[2] is None:
            troveSpec = (troveSpec[0], troveSpec[1], deps.parseFlavor(''))

        if (not troveSpec[1] and not os.path.isdir(troveSpec[0])
            and os.access(troveSpec[0], os.R_OK)
            and troveSpec[0].endswith('.recipe')):
            # don't rely on cwd, but do allow for symlinks to change
            # when restarting.  Is that sane?  Or should I just do realpath?
            troveSpec = (os.path.abspath(troveSpec[0]),) + troveSpec[1:]
            cfg.buildTroveSpecs.append(troveSpec)
            recipesToCook.append((os.path.realpath(troveSpec[0]), troveSpec[2]))
            continue
        cfg.buildTroveSpecs.append(troveSpec)

        if troveSpec[0].startswith('group-') and recurseGroups:
            groupsToFind.append(troveSpec)
            if recurseGroups == BUILD_RECURSE_GROUPS_SOURCE:
                newTroveSpecs.append(troveSpec)
        else:
            newTroveSpecs.append(troveSpec)

    localTroves = [(_getLocalCook(conaryclient, cfg, x[0], message), x[1])
                     for x in recipesToCook ]
    localTroves = [(x[0][0], x[0][1], x[1]) for x in localTroves]
    if recurseGroups == BUILD_RECURSE_GROUPS_SOURCE:
        compat.ConaryVersion().requireFindGroupSources()
        localGroupTroves = [ x for x in localTroves 
                             if x[0].startswith('group-') ]
        toBuild.extend(_findSourcesForSourceGroup(repos, reposName, cfg,
                                                  groupsToFind,
                                                  localGroupTroves,
                                                  updateSpecs))
    elif recurseGroups == BUILD_RECURSE_GROUPS_BINARY:
        newTroveSpecs.extend(_findSpecsForBinaryGroup(repos, reposName, cfg,
                                                      groupsToFind,
                                                      updateSpecs))

    for troveSpec in newTroveSpecs:
        sourceName = troveSpec[0].split(':')[0] + ':source'

        s = toFind.setdefault((sourceName, troveSpec[1], None), [])
        if troveSpec[2] not in s:
            s.append(troveSpec[2])


    results = repos.findTroves(cfg.buildLabel, toFind, None)

    for troveSpec, troveTups in results.iteritems():
        flavorList = toFind[troveSpec]
        for troveTup in troveTups:
            for flavor in flavorList:
                toBuild.append((troveTup[0], troveTup[1], flavor))

    toBuild.extend(localTroves)

    if matchSpecs:
        toBuild = _filterListByMatchSpecs(reposName, matchSpecs, toBuild)
    return toBuild
Пример #8
0
 def parseString(self, val):
     (name, version, flavor, context) = cmdutil.parseTroveSpec(val)
     return (name, versions.VersionFromString(version), flavor, context)
Пример #9
0
class rMakeHelper(object):
    """
    Client that contains most of the behavior available from the command line.

    This client wraps around the low-level rMake Server client to provide
    functionality that crosses client/server boundaries.

    example:
        > h = rMakeHelper();
        > jobId = h.buildTroves('foo.recipe')
        > h.waitForJob(jobId)
        > if h.getJob(jobId).isPassed(): print "Foo recipe built!"
        > h.commitJob(jobId, message='Updated foo source component')

    @param uri: location to rmake server or rMake Server instance object.
    @type uri: string that starts with http, https, or unix://, or rMakeServer
    instance.
    @param rmakeConfig: Server Configuration (now deprecated)
    @type rmakeConfig: Unused parameter kept for bw compatibility
    @param buildConfig: rMake Build Configuration
    @type buildConfig: rmake.build.buildcfg.BuildConfiguration instance
    (or None to read from filesystem)
    @param root: Root directory to search for configuration files under.
    @type root: string
    @param guiPassword: If True, pop up a gui window for password prompts
    needed for accessing conary repositories.
    @type guiPassword: boolean
    """
    BUILD_RECURSE_GROUPS_BINARY = buildcmd.BUILD_RECURSE_GROUPS_BINARY
    BUILD_RECURSE_GROUPS_SOURCE = buildcmd.BUILD_RECURSE_GROUPS_SOURCE

    def __init__(self, uri=None, rmakeConfig=None, buildConfig=None, root='/',
                 plugins=None, configureClient=True,
                 clientCert=None, promptPassword=False):
        if rmakeConfig:
            log.warning('rmakeConfig parameter is now deprecated')
        if not buildConfig:
            buildConfig = buildcfg.BuildConfiguration(True, root)

        if configureClient:
            if clientCert is None:
                clientCert = buildConfig.clientCert
            if uri is None:
                if (promptPassword and buildConfig.rmakeUser
                        and buildConfig.rmakeUser[0]
                        and not buildConfig.rmakeUser[1]
                        and not clientCert):
                    self._promptPassword(buildConfig)
                uri = buildConfig.getServerUri()

            self.client = client.rMakeClient(uri, clientCert)

        self.buildConfig = buildConfig
        self.plugins = plugins

    def getConaryClient(self, buildConfig=None):
        if buildConfig is None:
            buildConfig = self.buildConfig
        self.client.addRepositoryInfo(buildConfig)
        return conaryclient.ConaryClient(buildConfig)

    def updateBuildConfig(self, buildConfig=None):
        if buildConfig is None:
            buildConfig = self.buildConfig
        self.client.addRepositoryInfo(buildConfig)

    def _promptPassword(self, cfg):
        # Try to share descriptor with rbuild so only one prompt is seen
        user = cfg.rmakeUser[0]
        url = cfg.rmakeUrl.replace(':9999', '')
        keyDesc = 'rbuild:user:%s:%s' % (user, url)
        passwd = keystore.getPassword(keyDesc)
        if passwd and self._setAndCheckPassword(cfg, passwd):
            return
        for x in range(3):
            print "Please enter the password for user %r on %s" % (user,
                    cfg.rmakeUrl)
            passwd = getpass.getpass("Password: "******"The specified credentials were not valid."
            print
        raise errors.RmakeError("Could not authenticate to remote rMake server")

    def _setAndCheckPassword(self, cfg, passwd):
        old = cfg.rmakeUser
        cfg.rmakeUser = (old[0], passwd)
        cli = client.rMakeClient(cfg.getServerUri())
        try:
            cli.ping(seconds=0.01)
        except errors.InsufficientPermission:
            cfg.rmakeUser = old
            return False
        else:
            return True


    def getRepos(self, buildConfig=None):
        return self.getConaryClient(buildConfig).getRepos()

    def displayConfig(self, hidePasswords=True, prettyPrint=True):
        """
            Display the current build configuration for this helper.

            @param hidePasswords: If True, display <pasword> instead of
            the password in the output.
            @param prettyPrint: If True, print output in human-readable format
            that may not be parsable by a config reader.  If False, the
            configuration output should be valid as input.
        """
        self.buildConfig.initializeFlavors()
        if not self.buildConfig.buildLabel:
            self.buildConfig.buildLabel = self.buildConfig.installLabelPath[0]
        self.buildConfig.setDisplayOptions(hidePasswords=hidePasswords,
                                           prettyPrint=prettyPrint)
        self.buildConfig.display()

    def createRestartJob(self, jobId, troveSpecs=None, updateSpecs=None,
                         excludeSpecs=None, updateConfigKeys=None,
                         clearBuildList=False, clearPrebuiltList=False):
        job = self.client.getJob(jobId, withConfigs=True)
        troveSpecList = []
        oldTroveDict = {}
        configDict = {}
        recurseGroups = job.getMainConfig().recurseGroups
        if not excludeSpecs:
            excludeSpecs = []

        self.updateBuildConfig()
        for contextStr, jobConfig in job.getConfigDict().iteritems():
            if not clearBuildList:
                troveSpecList += [ (x[0], x[1], x[2], contextStr)
                                    for x in jobConfig.buildTroveSpecs ]
            oldTroveDict[contextStr] = [ x.getNameVersionFlavor()
                                         for x in job.iterTroves()
                                         if x.context == contextStr ]
            cfg = copy.deepcopy(self.buildConfig)

            for context in contextStr.split(','):
                if context:
                    if cfg.hasSection(context):
                        cfg.setContext(context)
                    else:
                        log.warning('Context %s used in job %s does not exist' % (context, jobId))
            jobConfig.reposName = self.buildConfig.reposName
            # a bug in how jobConfigs are stored + thawed
            # (related to relative paths) causes :memory: not to get
            # transferred correctly over the wire.  We reset the root
            # to :memory: here since the bugfix is conary based.
            jobConfig.root = ':memory:'
            # make sure we have the necessary user information and
            # repositoryMap info to contact the internal repository
            # (in every context).
            jobConfig.user.extend(cfg.user)
            jobConfig.repositoryMap.update(cfg.repositoryMap)
            jobConfig.entitlement.extend(cfg.entitlement)
            if not updateConfigKeys:
                cfg = jobConfig
            elif 'all' in updateConfigKeys:
                pass
            else:
                for key in updateConfigKeys:
                    if key not in cfg:
                        raise errors.ParseError('Unknown value for updateConfigKeys: "%s"' % key)
                    jobConfig[key] = cfg[key]
                cfg = jobConfig

            for spec in excludeSpecs:
                if isinstance(spec, tuple):
                    spec, context = cmdutil.getSpecStringFromTuple(spec)
                else:
                    spec, context = cmdutil.parseTroveSpecContext(spec)
                if context is None or context == contextStr:
                    cfg.addMatchRule('-%s' % spec)
            configDict[contextStr] = cfg

        mainConfig = configDict['']
        if clearPrebuiltList:
            mainConfig.jobcontext = []
        else:
            mainConfig.jobContext += [jobId]
        if troveSpecs:
            troveSpecList.extend(troveSpecs)
        return self._createBuildJob(troveSpecList, buildConfig=mainConfig,
                                    configDict=configDict,
                                    recurseGroups=recurseGroups,
                                    updateSpecs=updateSpecs,
                                    oldTroveDict=oldTroveDict)

    def displayJob(self, job, quiet=False):
        verbose = log.getVerbosity() <= log.DEBUG
        return buildcmd.displayBuildInfo(job, verbose=verbose,
                                         quiet=quiet)

    def buildJob(self, job, quiet=False):
        jobId = self.client.buildJob(job)
        if not quiet:
            print 'Added Job %s' % jobId
            for (n,v,f) in sorted(job.iterTroveList()):
                if f is not None and not f.isEmpty():
                    f = '[%s]' % f
                else:
                    f = ''
                print '  %s=%s/%s%s' % (n, v.trailingLabel(),
                                           v.trailingRevision(), f)
        else:
            print jobId
        return jobId

    def createBuildJob(self, troveSpecList, limitToHosts=None,
                       limitToLabels=None, recurseGroups=False,
                       buildConfig=None, matchSpecs=None, rebuild=False):
        # added to limit api for createBuildJob to the bits that should
        # be passed in from the front end.
        return self._createBuildJob(troveSpecList, limitToHosts=limitToHosts,
                                    limitToLabels=limitToLabels,
                                    recurseGroups=recurseGroups,
                                    buildConfig=buildConfig,
                                    matchSpecs=matchSpecs,
                                    rebuild=rebuild)

    def _createBuildJob(self, troveSpecList, limitToHosts=None,
                        limitToLabels=None, recurseGroups=False,
                        buildConfig=None, configDict=None, matchSpecs=None,
                        oldTroveDict=None, updateSpecs=None,
                        rebuild=False):
        if not isinstance(troveSpecList, (list, tuple)):
            troveSpecList = [troveSpecList]
        if configDict:
            buildConfig = configDict['']
        else:
            if buildConfig is None:
                buildConfig = self.buildConfig
            if not recurseGroups:
                # only use default match rules when recursing.
                buildConfig.clearMatchRules()
        if limitToHosts:
            buildConfig.limitToHosts(limitToHosts)
        if limitToLabels:
            buildConfig.limitToLabels(limitToLabels)
        if matchSpecs:
            for matchSpec in matchSpecs:
                buildConfig.addMatchRule(matchSpec)
        self.updateBuildConfig(buildConfig=buildConfig)
        conaryClient = self.getConaryClient(buildConfig)

        job = buildcmd.getBuildJob(buildConfig,
                                   conaryClient,
                                   troveSpecList,
                                   recurseGroups=recurseGroups,
                                   configDict=configDict,
                                   updateSpecs=updateSpecs,
                                   oldTroveDict=oldTroveDict,
                                   rebuild=rebuild)
        conaryClient.close()
        conaryClient.db.close()
        return job

    def loadJobFromFile(self, loadPath):
        job = buildjob.BuildJob.loadFromFile(loadPath)
        for cfg in job.iterConfigList():
            cfg.repositoryMap.update(self.buildConfig.repositoryMap)
            cfg.user.extend(self.buildConfig.user)
        return job

    def stopJob(self, jobId):
        """
            Stops the given job.

            @param jobId: jobId to stop
            @type jobId: int or uuid
            @raise: RmakeError: If job is already stopped.
        """
        stopped = self.client.stopJob(jobId)

    def getJob(self, jobId, withTroves=True):
        return self.client.getJob(jobId, withTroves=withTroves)

    def createChangeSet(self, jobId, troveSpecs=None):
        """
            Creates a changeset object with all the built troves for a job.

            @param jobId: jobId or uuid for a given job.
            @type jobId: int or uuid
            @return: conary changeset object
            @rtype: conary.repository.changeset.ReadOnlyChangeSet
            @raise: JobNotFound: If job does not exist
        """
        job = self.client.getJob(jobId)
        binTroves = []
        for trove in job.iterTroves():
            binTroves.extend(trove.iterBuiltTroves())
        if not binTroves:
            log.error('No built troves associated with this job')
            return None
        if troveSpecs:
            troveSpecs = cmdline.parseTroveSpecs(troveSpecs)
            source = trovesource.SimpleTroveSource(binTroves)
            results = source.findTroves(None, troveSpecs)
            binTroves = itertools.chain(*results.values())
        jobList = [(x[0], (None, None), (x[1], x[2]), True) for x in binTroves]
        primaryTroveList = [ x for x in binTroves if ':' not in x[0]]
        cs = self.getRepos().createChangeSet(jobList, recurse=False,
                                             primaryTroveList=primaryTroveList)
        return cs

    def createChangeSetFile(self, jobId, path, troveSpecs=None):
        """
            Creates a changeset file with all the built troves for a job.

            @param jobId: jobId or uuid for a given job.
            @type jobId: int or uuid
            @return: False if changeset not created, True if it was.
            @raise: JobNotFound: If job does not exist
        """
        job = self.client.getJob(jobId)
        binTroves = []
        for trove in job.iterTroves():
            binTroves.extend(trove.iterBuiltTroves())
        if not binTroves:
            log.error('No built troves associated with this job')
            return False
        if troveSpecs:
            troveSpecs = [ cmdline.parseTroveSpec(x) for x in troveSpecs ]
            source = trovesource.SimpleTroveSource(binTroves)
            results = source.findTroves(None, troveSpecs)
            binTroves = list(itertools.chain(*results.values()))
            primaryTroveList = binTroves
            recurse = True
        else:
            recurse = False
            primaryTroveList = [ x for x in binTroves if ':' not in x[0]]

        jobList = [(x[0], (None, None), (x[1], x[2]), True) for x in binTroves ]
        self.getRepos().createChangeSetFile(jobList, path, recurse=recurse,
                                            primaryTroveList=primaryTroveList)
        return True

    def commitJobs(self, jobIds, message=None, commitOutdatedSources=False,
                   commitWithFailures=True, waitForJob=False,
                   sourceOnly=False, updateRecipes=True, excludeSpecs=None,
                   writeToFile=None):
        """
            Commits a set of jobs.

            Committing in rMake is slightly different from committing in 
            conary.  rMake uses the conary "clone" command to move the binary
            stored in its internal repository out into the repository the
            source component came from.

            @param jobId: jobId or uuid for a given job.
            @type jobId: int or uuid
            @param message: Message to use for source commits.
            @type message: str
            @param commitOutdatedSources: if True, allow commit of sources
            even if someone else has changed the source component outside
            of rMake before you.
            @param commitWithFailures: if True, allow commit of this job
            even if parts of the job have failed.
            @param waitForJob: if True, wait for the job to finish if necessary
            before committing.
            @param sourceOnly: if True, only commit the source component.
            @param writeToFile: if set to a path, the changeset is written to
            that path instead of committed to the repository (Advanced)
            @return: False if job failed to commit, True if it succeeded.
            @raise: JobNotFound: If job does not exist
        """
        if not isinstance(jobIds, (list, tuple)):
            jobIds = [jobIds]
        jobs = self.client.getJobs(jobIds, withConfigs=True)
        finalJobs = []
        for job in jobs:
            jobId = job.jobId
            if job.isCommitting():
                raise errors.RmakeError("Job %s is already committing" % job.jobId)
            if not job.isFinished() and waitForJob:
                print "Waiting for job %s to complete before committing" % jobId
                try:
                    self.waitForJob(jobId)
                except Exception, err:
                    print "Wait interrupted, not committing"
                    print "You can restart commit by running 'rmake commit %s'" % jobId
                    raise
                job = self.client.getJob(jobId)
            if not job.isFinished():
                log.error('Job %s is not yet finished' % jobId)
                return False
            if job.isFailed() and not commitWithFailures:
                log.error('Job %s has failures, not committing' % jobId)
                return False
            if not list(job.iterBuiltTroves()):
                log.error('Job %s has no built troves to commit' % jobId)
                return True
            finalJobs.append(job)

        jobs = [ x for x in finalJobs if not x.isCommitted() ]
        jobIds = [ x.jobId for x in finalJobs ]

        if not jobs:
            log.error('Job(s) already committed')
            return False
        if excludeSpecs:
            excludeSpecs = [ cmdutil.parseTroveSpec(x) for x in excludeSpecs ]

        self.client.startCommit(jobIds)
        try:
            succeeded, data = commit.commitJobs(self.getConaryClient(), jobs,
                                   self.buildConfig.reposName, message,
                                   commitOutdatedSources=commitOutdatedSources,
                                   sourceOnly=sourceOnly,
                                   excludeSpecs=excludeSpecs,
                                   writeToFile=writeToFile)
            if succeeded:
                def _sortCommitted(tup1, tup2):
                    return cmp((tup1[0].endswith(':source'), tup1),
                               (tup2[0].endswith(':source'), tup2))
                def _formatTup(tup):
                    args = [tup[0], tup[1]]
                    if tup[2].isEmpty():
                        args.append('')
                    else:
                        args.append('[%s]' % buildTroveTup[2])
                    if not tup[3]:
                        args.append('')
                    else:
                        args.append('{%s}' % buildTroveTup[3])
                    return '%s=%s%s%s' % tuple(args)

                self.client.commitSucceeded(data)

            else:
                self.client.commitFailed(jobIds, data)
                log.error(data)
                return False
        except errors.uncatchableExceptions, err:
            self.client.commitFailed(jobIds, str(err))
            raise
Пример #10
0
 def parseString(self, val):
     (name, version, flavor, context) = cmdutil.parseTroveSpec(val)
     return (name, versions.VersionFromString(version), flavor, context)
Пример #11
0
def getBuildJob(buildConfig, conaryclient, troveSpecList,
                message=None, recurseGroups=BUILD_RECURSE_GROUPS_NONE, 
                configDict=None, oldTroveDict=None, updateSpecs=None,
                rebuild=False):
    trovesByContext = {}

    for troveSpec in list(troveSpecList):
        if not isinstance(troveSpec, tuple):
            troveSpec = cmdutil.parseTroveSpec(troveSpec)

        if len(troveSpec) == 3:
            context = ''
        else:
            context = troveSpec[3]
            troveSpec = troveSpec[:3]

        if troveSpec[2] is None:
            troveSpec = (troveSpec[0], troveSpec[1], deps.parseFlavor(''))
        trovesByContext.setdefault(context, []).append(troveSpec)

    job = buildjob.BuildJob()

    # don't store all the contexts with this job - they're useless past the
    # initialization step.
    if configDict:
        mainConfig = configDict['']
        job.setMainConfig(configDict[''])
    else:
        cfg = copy.deepcopy(buildConfig)
        cfg.dropContexts()
        mainConfig = cfg
    mainConfig.recurseGroups = int(recurseGroups)
    job.setMainConfig(mainConfig)

    baseMatchRules = mainConfig.matchTroveRule
    for contextStr, troveSpecList in trovesByContext.iteritems():
        contextBaseMatchRules = baseMatchRules
        if configDict and contextStr in configDict:
            cfg = configDict[contextStr]
        elif contextStr:
            # making this a copy is critical
            cfg = copy.deepcopy(buildConfig)
            for context in contextStr.split(','):
                cfg.setContext(context)
            cfg.dropContexts()
        else:
            # don't bother with baseMatchRules in the base config.
            contextBaseMatchRules = []
            cfg = copy.deepcopy(buildConfig)
            cfg.dropContexts()
            contextStr = ''
            job.setMainConfig(cfg)
        cfg.initializeFlavors()
        use.setBuildFlagsFromFlavor(None, cfg.buildFlavor, error=False)
        if not cfg.buildLabel and cfg.installLabelPath:
            cfg.buildLabel = cfg.installLabelPath[0]
        troveSpecList = list(set(troveSpecList))
        troveList = getTrovesToBuild(cfg, conaryclient, troveSpecList,
                         message=None,
                         recurseGroups=recurseGroups,
                         matchSpecs=contextBaseMatchRules + cfg.matchTroveRule,
                         reposName=mainConfig.reposName,
                         updateSpecs=updateSpecs)
        if updateSpecs and oldTroveDict and contextStr in oldTroveDict:
            troveList = _matchUpdateRestrictions(mainConfig.reposName,
                                                 oldTroveDict[contextStr],
                                                 troveList,
                                                 updateSpecs)
        if rebuild:
            prebuiltBinaries = _findLatestBinariesForTroves(conaryclient,
                                                        mainConfig.reposName,
                                                        troveList)
            if not job.getMainConfig().prebuiltBinaries:
                job.getMainConfig().prebuiltBinaries = prebuiltBinaries
            else:
                job.getMainConfig().prebuiltBinaries.extend(prebuiltBinaries)
        if mainConfig.prepOnly:
            buildType = buildtrove.TROVE_BUILD_TYPE_PREP
        else:
            buildType = buildtrove.TROVE_BUILD_TYPE_NORMAL

        for name, version, flavor in troveList:
            if flavor is None:
                flavor = deps.parseFlavor('')
            bt = buildtrove.BuildTrove(None, name, version, flavor,
                                       context=contextStr,
                                       buildType=buildType)
            job.addTrove(name, version, flavor, contextStr, bt)
            job.setTroveConfig(bt, cfg)
    return job
Пример #12
0
def _matchUpdateRestrictions(reposName, oldTroveList,
                             newTroveList, updateSpecs, 
                             binaries=False):
    troveMap = {}
    for troveTup in itertools.chain(oldTroveList, newTroveList):
        if binaries:
            key = (troveTup[0].split(':')[0], troveTup[1], troveTup[2])
        else: 
            key = (troveTup[0].split(':')[0] + ':source', 
                   troveTup[1], troveTup[2])
        troveMap.setdefault(key, []).append(troveTup)

    updateDict = {}
    newUpdateSpecs = []
    if not updateSpecs:
        return newTroveList
    firstMatch = True
    for troveSpec in updateSpecs:
        if not isinstance(troveSpec, tuple):
            troveSpec = cmdutil.parseTroveSpec(troveSpec)

        if binaries:
            troveSpec = (troveSpec[0].split(':')[0], troveSpec[1], troveSpec[2])
        else:
            troveSpec = (troveSpec[0].split(':')[0] + ':source', 
                         troveSpec[1], troveSpec[2])
        if troveSpec[0] and troveSpec[0][0] == '-':
            sense = False
            troveSpec = (troveSpec[0][1:], troveSpec[1], troveSpec[2])
        else:
            sense = True

        name = troveSpec[0]
        if not name:
            filterFn = lambda x: True
        else:
            filterFn = lambda x: fnmatch.fnmatchcase(x[0], name)

        # add all packages that match glob (could be empty in which case
        # all packages are added.
        specs = set([(x[0], troveSpec[1], troveSpec[2]) for x in troveMap
                      if filterFn(x)])
        if not specs:
            newUpdateSpecs.append(troveSpec)
            updateDict[troveSpec] = sense
        updateDict.update(dict.fromkeys(specs, sense))
        for spec in specs:
            if spec in newUpdateSpecs:
                newUpdateSpecs.remove(spec)
        newUpdateSpecs.extend(specs)

    allNewNames = set([ x[0] for x in newTroveList ])
    allOldNames = set([ x[0] for x in oldTroveList ])
    oldTroveList = [ x for x in oldTroveList if x[0] in allNewNames ]

    oldTroves = trovesource.SimpleTroveSource(oldTroveList)
    oldTroves = recipeutil.RemoveHostSource(oldTroves, reposName)
    newTroves = trovesource.SimpleTroveSource(newTroveList)
    newTroves = recipeutil.RemoveHostSource(newTroves, reposName)

    toUse = set()
    firstMatch = True
    for updateSpec in newUpdateSpecs:
        positiveMatch = updateDict[updateSpec]
        oldResults = oldTroves.findTroves(None, [updateSpec], None,
                                          allowMissing=True).get(updateSpec, [])
        newResults = newTroves.findTroves(None, [updateSpec], None,
                                          allowMissing=True).get(updateSpec, [])
        oldNames = set(x[0] for x in oldResults)
        newNames = set(x[0] for x in newResults)
        if positiveMatch:
            if firstMatch:
                # if the user starts with --update info-foo then they want to
                # by default not update anything not mentioned
                toUse = set(oldTroveList)
                toUse.update(x for x in newTroveList 
                             if x[0] not in allOldNames)
                firstMatch = False
            # don't discard any packages for which we don't have
            toKeep = [ x for x in toUse if x[0] not in newNames ]
            toUse.difference_update(oldResults)
            toUse.update(newResults)
            toUse.update(toKeep)
        else:
            if firstMatch:
                # if the user starts with --update -info-foo then they want to
                # update everything _except_ info-foo
                toUse = set(newTroveList)
                firstMatch = False
            toKeep = [ x for x in toUse if x[0] not in oldNames ]
            toUse.difference_update(newResults)
            toUse.update(oldResults)
            toUse.update(toKeep)
    return list(toUse)
Пример #13
0
def getTrovesToBuild(cfg, conaryclient, troveSpecList, message=None, 
                     recurseGroups=BUILD_RECURSE_GROUPS_NONE, matchSpecs=None, 
                     reposName=None, updateSpecs=None):
    toBuild = []
    toFind = {}
    groupsToFind = []
    if not matchSpecs:
        matchSpecs = []
    if reposName is None:
        reposName = cfg.reposName


    repos = conaryclient.getRepos()
    cfg.resolveTroveTups = _getResolveTroveTups(cfg, repos)
    cfg.recurseGroups = int(recurseGroups)

    cfg.buildTroveSpecs = []
    newTroveSpecs = []
    recipesToCook = []
    for troveSpec in list(troveSpecList):
        if not isinstance(troveSpec, tuple):
            troveSpec = cmdutil.parseTroveSpec(troveSpec)
        if len(troveSpec) == 3:
            context = ''
        else:
            context = troveSpec[3]
            troveSpec = troveSpec[:3]

        if (troveSpec[0].startswith('group-') and not recurseGroups
            and not compat.ConaryVersion().supportsCloneNonRecursive()):
            log.warning('You will not be able to commit this group build'
                        ' without upgrading conary.')
        if troveSpec[2] is None:
            troveSpec = (troveSpec[0], troveSpec[1], deps.parseFlavor(''))

        if (not troveSpec[1] and not os.path.isdir(troveSpec[0])
            and os.access(troveSpec[0], os.R_OK)
            and troveSpec[0].endswith('.recipe')):
            # don't rely on cwd, but do allow for symlinks to change
            # when restarting.  Is that sane?  Or should I just do realpath?
            troveSpec = (os.path.abspath(troveSpec[0]),) + troveSpec[1:]
            cfg.buildTroveSpecs.append(troveSpec)
            recipesToCook.append((os.path.realpath(troveSpec[0]), troveSpec[2]))
            continue
        cfg.buildTroveSpecs.append(troveSpec)

        if troveSpec[0].startswith('group-') and recurseGroups:
            groupsToFind.append(troveSpec)
            if recurseGroups == BUILD_RECURSE_GROUPS_SOURCE:
                newTroveSpecs.append(troveSpec)
        else:
            newTroveSpecs.append(troveSpec)

    localTroves = [(_getLocalCook(conaryclient, cfg, x[0], message), x[1])
                     for x in recipesToCook ]
    localTroves = [(x[0][0], x[0][1], x[1]) for x in localTroves]
    if recurseGroups == BUILD_RECURSE_GROUPS_SOURCE:
        compat.ConaryVersion().requireFindGroupSources()
        localGroupTroves = [ x for x in localTroves 
                             if x[0].startswith('group-') ]
        toBuild.extend(_findSourcesForSourceGroup(repos, reposName, cfg,
                                                  groupsToFind,
                                                  localGroupTroves,
                                                  updateSpecs))
    elif recurseGroups == BUILD_RECURSE_GROUPS_BINARY:
        newTroveSpecs.extend(_findSpecsForBinaryGroup(repos, reposName, cfg,
                                                      groupsToFind,
                                                      updateSpecs))

    for troveSpec in newTroveSpecs:
        sourceName = troveSpec[0].split(':')[0] + ':source'

        s = toFind.setdefault((sourceName, troveSpec[1], None), [])
        if troveSpec[2] not in s:
            s.append(troveSpec[2])


    results = repos.findTroves(cfg.buildLabel, toFind, None)

    for troveSpec, troveTups in results.iteritems():
        flavorList = toFind[troveSpec]
        for troveTup in troveTups:
            for flavor in flavorList:
                toBuild.append((troveTup[0], troveTup[1], flavor))

    toBuild.extend(localTroves)

    if matchSpecs:
        toBuild = _filterListByMatchSpecs(reposName, matchSpecs, toBuild)
    return toBuild