Ejemplo n.º 1
0
    def testFileUpdateMissingKey(self):
        fingerprint = '95B457D16843B21EA3FC73BBC7C32FC1F94E405E'

        # make a changeset with useful stuff that could be installed
        cs = changeset.ChangeSet()
        flavor = deps.parseFlavor('')
        v = versions.VersionFromString('/%s/1.0-1-1' %
                                       self.cfg.buildLabel.asString()).copy()
        v.resetTimeStamps()
        t = trove.Trove('test:test', v, flavor, None)

        path = self.workDir + '/blah'
        f = open(path, 'w')
        f.write('hello, world!\n')
        f.close()
        pathId = sha1helper.md5String('/blah')
        f = files.FileFromFilesystem(path, pathId)

        fileId = f.fileId()
        t.addFile(pathId, '/blah', v, fileId)
        cs.addFile(None, fileId, f.freeze())
        cs.addFileContents(pathId, fileId, changeset.ChangedFileTypes.file,
                           filecontents.FromFilesystem(path),
                           f.flags.isConfig())
        diff = t.diff(None, absolute=1)[0]
        cs.newTrove(diff)
        cs.addPrimaryTrove('test:test', v, flavor)

        # sign the changeset
        csPath = os.path.join(self.workDir, 'test-1.0-1.ccs')
        cs = cook.signAbsoluteChangeset(cs, fingerprint)
        cs.writeToFile(csPath)

        tmpPath = mkdtemp()

        keyCache = openpgpkey.getKeyCache()
        newKeyCache = openpgpkey.OpenPGPKeyFileCache()

        pubRing = self.cfg.pubRing

        self.cfg.pubRing = [tmpPath + '/pubring.gpg']
        keyCacheCallback = openpgpkey.KeyCacheCallback(None, self.cfg)
        newKeyCache.setCallback(keyCacheCallback)

        openpgpkey.setKeyCache(newKeyCache)

        try:
            self.updatePkg(self.rootDir, csPath)
        finally:
            self.cfg.pubRing = pubRing
            openpgpkey.setKeyCache(keyCache)
Ejemplo n.º 2
0
def _old_commit(helper, job):
    '''
    Commit a job to the target repository.

    @param job: rMake job
    '''
    log.info('Starting commit')
    _start_time = time.time()

    okay, changeset, nbf_map = clone_job(helper, job)

    # Sanity check the resulting changeset and produce a mapping of
    # committed versions
    mapping = {job.jobId: {}}
    if okay:
        for trove in iter_new_troves(changeset, helper):
            # Make sure there are no references to the internal repos.
            for _, child_version, _ in trove.iterTroveList(
              strongRefs=True, weakRefs=True):
                assert child_version.getHost() \
                    != helper.cfg.reposName, \
                    "Trove %s references repository" % trove
                #assert not child_name.endswith(':testinfo'), \
                #    "Trove %s references :testinfo component" % trove

            trove_name, trove_version, trove_flavor = \
                trove.getNameVersionFlavor()
            trove_branch = trove_version.branch()
            trove, _ = nbf_map[(trove_name, trove_branch, trove_flavor)]
            trove_nvfc = trove.getNameVersionFlavor(withContext=True)
            # map jobId -> trove -> binaries
            mapping[trove.jobId].setdefault(trove_nvfc, []).append(
                (trove_name, trove_version, trove_flavor))
    else:
        raise RuntimeError('failed to clone finished build')

    if compat.ConaryVersion().signAfterPromote():
        changeset = cook.signAbsoluteChangeset(changeset)
    helper.getRepos().commitChangeSet(changeset)

    _finish_time = time.time()
    log.info('Commit took %.03f seconds', _finish_time - _start_time)
    return mapping
Ejemplo n.º 3
0
def commitJobs(conaryclient, jobList, reposName, message=None,
               commitOutdatedSources=False, sourceOnly = False,
               excludeSpecs=None, writeToFile=None):
    jobsToCommit = {}
    alreadyCommitted = []
    finalCs = changeset.ReadOnlyChangeSet()
    mapping = {}
    for job in jobList:
        if job.isCommitted():
            alreadyCommitted.append(job)
        else:
            jobsToCommit[job.jobId] = job
    jobsToCommit = jobsToCommit.values() # dedup job list

    if not jobsToCommit:
        err = 'Job(s) already committed'
        return False, err

    allTroves = []
    trovesByBranch = {}
    alreadyCommitted = False
    for job in jobsToCommit:
        mapping[job.jobId] = {}
        for trove in job.iterTroves():
            allTroves.append(trove)
            troveVersion = trove.getVersion()
            if troveVersion.getHost() == reposName:
                if not troveVersion.branch().hasParentBranch():
                    message = ('Cannot commit filesystem cook %s - '
                               ' nowhere to commit to!' % trove.getName())
                    return False, message
    assert(allTroves)
    source = trovesource.SimpleTroveSource()
    if excludeSpecs:
        excludeSpecsWithContext = {}
        troveMap = {}
        for excludeSpec in excludeSpecs:
            if len(excludeSpec) == 4:
                context = excludeSpec[3]
            else:
                context = None

            excludeSpecsWithContext.setdefault(
                                        excludeSpec[:3], []).append(context)
        excludeSpecs = [ x[:3] for x in excludeSpecs ]

        for trove in allTroves:
            troveTup = (trove.getName().split(':')[0],
                        trove.getVersion(),
                        trove.getFlavor())
            source.addTrove(*troveTup)
            troveMap.setdefault(troveTup, []).append(trove)

        source.searchAsDatabase()
        matches = source.findTroves(None, excludeSpecs, None, allowMissing=True)
        trvMatches = []
        for excludeSpec, matchList in matches.iteritems():
            contexts = excludeSpecsWithContext[excludeSpec]
            for match in matchList:
                for trv in troveMap[match]:
                    if trv.context in contexts or None in contexts:
                        trvMatches.append(trv)

        allTroves = [ x for x in allTroves if x not in trvMatches ]
        if not allTroves:
            message = ('All troves excluded - not committing')
            return False, message

    repos = conaryclient.getRepos()

    trovesByNBF = {}
    sourcesToCheck = []
    branchMap = {}
    trovesToClone = []
    for trove in allTroves:
        builtTroves = list(trove.iterBuiltTroves())
        if not builtTroves:
            continue
        if builtTroves[0][1].getHost() != reposName:
            alreadyCommitted = True
            for n,v,f in builtTroves:
                trovesByNBF[n, v.branch(), f] = (trove, v)
            continue

        troveVersion = trove.getVersion()
        if troveVersion.getHost() == reposName:
            sourceTup = (trove.getName(), troveVersion, Flavor())
            targetBranch = troveVersion.branch().parentBranch()
            branchMap[troveVersion.branch()] = targetBranch
            nbf = trove.getName(), targetBranch, Flavor()
            if nbf in trovesByNBF:
                if trovesByNBF[nbf][1] != troveVersion:
                    badVersion = trovesByNBF[nbf][1]
                    return False, ("Cannot commit two different versions of source component %s:"
                                   " %s and %s" % (trove.getName(), troveVersion, badVersion))
            trovesByNBF[nbf] = trove, troveVersion
            sourcesToCheck.append(sourceTup)
        if sourceOnly:
            continue

        for troveTup in builtTroves:
            branch = troveTup[1].branch()
            targetBranch = branch.parentBranch()
            # add mapping so that when the cloning is done
            # we can tell what commit resulted in what binaries.
            nbf = (troveTup[0], targetBranch, troveTup[2])
            if nbf in trovesByNBF:
                otherBinary = trovesByNBF[nbf][0].getBinaryTroves()[0]
                if otherBinary[1].branch() == targetBranch:
                    # this one's already committed.
                    break
                # discard the later of the two commits.
                if trovesByNBF[nbf][0].getVersion() > trove.getVersion():
                    # we're the earlier one
                    badTrove, badVersion = trovesByNBF[nbf]
                    newTrove = trove
                    newVersion = troveTup[1]
                else:
                    badTrove = trove
                    badVersion = troveTup[1]
                    newTrove, newVersion = trovesByNBF[nbf]
                name = nbf[0]
                flavor = nbf[2]

                skipped = []
                for badTroveTup in badTrove.iterBuiltTroves():
                    badNbf = (badTroveTup[0], targetBranch, badTroveTup[2])
                    if not ':' in badTroveTup[0]:
                        skipped.append(badTroveTup[0])

                    if badNbf in trovesByNBF and badTrove is trovesByNBF[badNbf][0]:
                        del trovesByNBF[badNbf]

                skipped = '%s' % (', '.join(skipped))
                log.warning("Not committing %s on %s[%s]%s - overridden by"
                            " %s[%s]%s" % (skipped, badTroveTup[1],
                             badTroveTup[2],
                             badTrove.getContextStr(), newVersion, flavor,
                             newTrove.getContextStr()))
                if trove is badTrove:
                    break

            trovesByNBF[nbf] = trove, troveTup[1]
            branchMap[branch] = targetBranch

    for nbf, (trove, tupVersion) in trovesByNBF.items():
        if tupVersion.branch() != nbf[1]:
            trovesToClone.append((nbf[0], tupVersion, nbf[2]))

    if not trovesToClone:
        if sourceOnly:
            err = 'Could not find sources to commit'
        elif alreadyCommitted:
            log.warning('All built troves have already been committed')
            return True, {}
        else:
            err = 'Can only commit built troves, none found'
        return False, err
    if sourcesToCheck and not commitOutdatedSources:
        outdated = _checkOutdatedSources(repos, sourcesToCheck)
        if outdated:
            outdated = ( '%s=%s (replaced by newer %s)' \
                         % (name, builtVer, newVer.trailingRevision())
                         for (name, builtVer, newVer) in outdated)
            err = ('The following source troves are out of date:\n%s\n\n'
                   'Use --commit-outdated-sources to commit anyway' %
                   '\n'.join(outdated))
            return False, err

    # only update build info if we'll be okay if some buildreqs are not 
    # updated
    updateBuildInfo = compat.ConaryVersion().acceptsPartialBuildReqCloning()
    callback = callbacks.CloneCallback(conaryclient.cfg, message)
    passed, cs = conaryclient.createTargetedCloneChangeSet(
                                        branchMap, trovesToClone,
                                        updateBuildInfo=updateBuildInfo,
                                        cloneSources=False,
                                        trackClone=False,
                                        callback=callback, fullRecurse=False)
    if passed:
        oldTroves = []
        for troveCs in cs.iterNewTroveList():
            if troveCs.getOldVersion():
                oldTroves.append(troveCs.getOldNameVersionFlavor())
        if oldTroves:
            oldDict = {}
            for oldTrove in repos.getTroves(oldTroves):
                oldDict.setdefault(oldTrove.getNameVersionFlavor(),
                                    []).append(oldTrove)
        for troveCs in cs.iterNewTroveList():
            if troveCs.getOldVersion():
                trv = oldDict[troveCs.getOldNameVersionFlavor()].pop()
                trv.applyChangeSet(troveCs)
            else:
                trv = Trove(troveCs)
            for _, childVersion, _ in trv.iterTroveList(strongRefs=True,
                                                        weakRefs=True):
                # make sure there are not any references to the internal
                # rmake repository - that would be a bad bug - easy to
                # do with the way we do cooking of groups.
                onRepos = childVersion.getHost() == reposName
                assert not onRepos, "Trove %s references repository" % trv
            n,v,f = troveCs.getNewNameVersionFlavor()
            trove, troveVersion = trovesByNBF[n, v.branch(), f]
            troveNVFC = trove.getNameVersionFlavor(withContext=True)
            # map jobId -> trove -> binaries
            mapping[trove.jobId].setdefault(troveNVFC, []).append((n,v,f))
    else:
        return False, 'Creating clone failed'

    signatureKey = conaryclient.cfg.signatureKey
    if signatureKey and compat.ConaryVersion().signAfterPromote():
        finalCs = signAbsoluteChangeset(cs, signatureKey)
    if writeToFile:
        cs.writeToFile(writeToFile)
    else:
        repos.commitChangeSet(cs, callback=callback)
    return True, mapping
Ejemplo n.º 4
0
def _shadowAndCommit(conaryclient, cfg, recipeDir, stateFile, message):
    repos = conaryclient.getRepos()
    conaryCompat = compat.ConaryVersion()

    oldSourceVersion = stateFile.getVersion()
    targetLabel = cfg.getTargetLabel(oldSourceVersion)
    if not targetLabel: 
        raise errors.RmakeError(
                    'Cannot cook local recipes unless a target label is set')
    skipped, cs = conaryclient.createShadowChangeSet(str(targetLabel),
                                           [stateFile.getNameVersionFlavor()])
    recipePath = recipeDir + '/' + stateFile.getName().split(':')[0] + '.recipe'
    recipeClass, pathList = _getPathList(repos, cfg, recipePath, relative=True)

    troveName = stateFile.getName()
    troveVersion = stateFile.getVersion()

    if not skipped:
        signAbsoluteChangeset(cs, None)
        repos.commitChangeSet(cs)

    log.info("Shadowing %s to internal repository..." % troveName)
    shadowBranch = troveVersion.createShadow(targetLabel).branch()
    shadowVersion = repos.findTrove(None,
                                    (troveName, str(shadowBranch), 
                                    None), None)[0][1]


    cwd = os.getcwd()
    prefix = 'rmake-shadow-%s-' % troveName.split(':')[0]
    shadowSourceDir = tempfile.mkdtemp(prefix=prefix)
    try:
        log.info("Committing local changes to %s to the"
                  " internal repository..." % troveName)
        log.resetErrorOccurred()
        checkin.checkout(repos, cfg, shadowSourceDir,
                        ['%s=%s' % (troveName, shadowVersion)])

        if compat.ConaryVersion().stateFileVersion() > 0:
            kw = dict(repos=repos)
        else:
            kw = {}
        # grab new and old state and make any modifications due to adding
        # or deleting of files (we assume files that don't exist are 
        # autosource and can be ignored)
        oldState = conaryCompat.ConaryStateFromFile(recipeDir + '/CONARY',
                                                    repos=repos).getSourceState()
        newConaryState = conaryCompat.ConaryStateFromFile(
                                                shadowSourceDir + '/CONARY',
                                                repos=repos)
        newState = newConaryState.getSourceState()

        neededFiles = set(x[1] for x in oldState.iterFileList()
                          if os.path.exists(os.path.join(recipeDir, x[1])))
        neededFiles.update(pathList)
        autoSourceFiles = set(x[1] for x in oldState.iterFileList()
                          if oldState.fileIsAutoSource(x[0]))

        existingFiles = set(x[1] for x in newState.iterFileList()
                        if os.path.exists(os.path.join(shadowSourceDir, x[1])))
        toCopy = neededFiles & existingFiles
        toDel = existingFiles - neededFiles
        toAdd = neededFiles - existingFiles
        for sourceFile in (toCopy | toAdd):
            newPath = os.path.join(shadowSourceDir, sourceFile)
            if os.path.dirname(sourceFile):
                util.mkdirChain(os.path.dirname(newPath))
            if os.path.isdir(sourceFile):
                util.mkdirChain(newPath)
            else:
                shutil.copyfile(os.path.join(recipeDir, sourceFile), newPath)

        os.chdir(shadowSourceDir)
        if hasattr(cfg.sourceSearchDir, '_getUnexpanded'):
            cfg.configKey('sourceSearchDir',
                          cfg.sourceSearchDir._getUnexpanded())

        for f in toDel:
            checkin.removeFile(f)
        if toDel:
            # toDel modifies the CONARY file on disk, so reload with the
            # changes made there.
            newState = conaryCompat.ConaryStateFromFile(
                                            shadowSourceDir + '/CONARY',
                                            repos=repos).getSourceState()

        if conaryCompat.stateFileVersion() == 0:
            checkin.addFiles(toAdd)
        else:
            oldPathIds = dict((x[1], x[0]) for x in oldState.iterFileList())
            for path in toAdd:
                if path in oldPathIds:
                    isConfig = oldState.fileIsConfig(oldPathIds[path])
                else:
                    isConfig = _getConfigInfo(path)
                checkin.addFiles([path], binary=not isConfig, text=isConfig)
            if toAdd:
                # get the new pathIDs for all the added troves, 
                # since we can't set the refresh setting without the 
                # needed pathIds
                newState = conaryCompat.ConaryStateFromFile(
                                                shadowSourceDir + '/CONARY',
                                                repos=repos).getSourceState()
            newPathIds = dict((x[1], x[0]) for x in newState.iterFileList())

            for path in (toCopy | toAdd):
                if path in oldPathIds:
                    isConfig = oldState.fileIsConfig(oldPathIds[path])
                else:
                    isConfig = _getConfigInfo(path)

                newState.fileIsConfig(newPathIds[path], isConfig)

            for path in autoSourceFiles:
                if path in newPathIds:
                    needsRefresh = oldState.fileNeedsRefresh(oldPathIds[path])
                    newState.fileNeedsRefresh(newPathIds[path], needsRefresh)

            # if the factory changed, update it
            if newState.getFactory() != oldState.getFactory():
                newState.setFactory(oldState.getFactory())

            # we may have modified the state file. Write it back out to 
            # disk so it will be picked up by the commit.
            newConaryState.setSourceState(newState)
            newConaryState.write(shadowSourceDir + '/CONARY')

        if message is None and compat.ConaryVersion().supportsCloneCallback():
            message = 'Automated rMake commit'

        _doCommit('%s/%s' % (recipeDir, troveName), repos, cfg, message)

        newState = state.ConaryStateFromFile(shadowSourceDir + '/CONARY', **kw)
        return newState.getSourceState().getNameVersionFlavor()
    finally:
        os.chdir(cwd)
        if hasattr(cfg.sourceSearchDir, '_getUnexpanded'):
            cfg.configKey('sourceSearchDir',
                          cfg.sourceSearchDir._getUnexpanded())
        shutil.rmtree(shadowSourceDir)
Ejemplo n.º 5
0
def commitJobs(conaryclient,
               jobList,
               reposName,
               message=None,
               commitOutdatedSources=False,
               sourceOnly=False,
               excludeSpecs=None,
               writeToFile=None):
    jobsToCommit = {}
    alreadyCommitted = []
    finalCs = changeset.ReadOnlyChangeSet()
    mapping = {}
    for job in jobList:
        if job.isCommitted():
            alreadyCommitted.append(job)
        else:
            jobsToCommit[job.jobId] = job
    jobsToCommit = jobsToCommit.values()  # dedup job list

    if not jobsToCommit:
        err = 'Job(s) already committed'
        return False, err

    allTroves = []
    trovesByBranch = {}
    alreadyCommitted = False
    for job in jobsToCommit:
        mapping[job.jobId] = {}
        for trove in job.iterTroves():
            allTroves.append(trove)
            troveVersion = trove.getVersion()
            if troveVersion.getHost() == reposName:
                if not troveVersion.branch().hasParentBranch():
                    message = ('Cannot commit filesystem cook %s - '
                               ' nowhere to commit to!' % trove.getName())
                    return False, message
    assert (allTroves)
    source = trovesource.SimpleTroveSource()
    if excludeSpecs:
        excludeSpecsWithContext = {}
        troveMap = {}
        for excludeSpec in excludeSpecs:
            if len(excludeSpec) == 4:
                context = excludeSpec[3]
            else:
                context = None

            excludeSpecsWithContext.setdefault(excludeSpec[:3],
                                               []).append(context)
        excludeSpecs = [x[:3] for x in excludeSpecs]

        for trove in allTroves:
            troveTup = (trove.getName().split(':')[0], trove.getVersion(),
                        trove.getFlavor())
            source.addTrove(*troveTup)
            troveMap.setdefault(troveTup, []).append(trove)

        source.searchAsDatabase()
        matches = source.findTroves(None,
                                    excludeSpecs,
                                    None,
                                    allowMissing=True)
        trvMatches = []
        for excludeSpec, matchList in matches.iteritems():
            contexts = excludeSpecsWithContext[excludeSpec]
            for match in matchList:
                for trv in troveMap[match]:
                    if trv.context in contexts or None in contexts:
                        trvMatches.append(trv)

        allTroves = [x for x in allTroves if x not in trvMatches]
        if not allTroves:
            message = ('All troves excluded - not committing')
            return False, message

    repos = conaryclient.getRepos()

    trovesByNBF = {}
    sourcesToCheck = []
    branchMap = {}
    trovesToClone = []
    for trove in allTroves:
        builtTroves = list(trove.iterBuiltTroves())
        if not builtTroves:
            continue
        if builtTroves[0][1].getHost() != reposName:
            alreadyCommitted = True
            for n, v, f in builtTroves:
                trovesByNBF[n, v.branch(), f] = (trove, v)
            continue

        troveVersion = trove.getVersion()
        if troveVersion.getHost() == reposName:
            sourceTup = (trove.getName(), troveVersion, Flavor())
            targetBranch = troveVersion.branch().parentBranch()
            branchMap[troveVersion.branch()] = targetBranch
            nbf = trove.getName(), targetBranch, Flavor()
            if nbf in trovesByNBF:
                if trovesByNBF[nbf][1] != troveVersion:
                    badVersion = trovesByNBF[nbf][1]
                    return False, (
                        "Cannot commit two different versions of source component %s:"
                        " %s and %s" %
                        (trove.getName(), troveVersion, badVersion))
            trovesByNBF[nbf] = trove, troveVersion
            sourcesToCheck.append(sourceTup)
        if sourceOnly:
            continue

        for troveTup in builtTroves:
            branch = troveTup[1].branch()
            targetBranch = branch.parentBranch()
            # add mapping so that when the cloning is done
            # we can tell what commit resulted in what binaries.
            nbf = (troveTup[0], targetBranch, troveTup[2])
            if nbf in trovesByNBF:
                otherBinary = trovesByNBF[nbf][0].getBinaryTroves()[0]
                if otherBinary[1].branch() == targetBranch:
                    # this one's already committed.
                    break
                # discard the later of the two commits.
                if trovesByNBF[nbf][0].getVersion() > trove.getVersion():
                    # we're the earlier one
                    badTrove, badVersion = trovesByNBF[nbf]
                    newTrove = trove
                    newVersion = troveTup[1]
                else:
                    badTrove = trove
                    badVersion = troveTup[1]
                    newTrove, newVersion = trovesByNBF[nbf]
                name = nbf[0]
                flavor = nbf[2]

                skipped = []
                for badTroveTup in badTrove.iterBuiltTroves():
                    badNbf = (badTroveTup[0], targetBranch, badTroveTup[2])
                    if not ':' in badTroveTup[0]:
                        skipped.append(badTroveTup[0])

                    if badNbf in trovesByNBF and badTrove is trovesByNBF[
                            badNbf][0]:
                        del trovesByNBF[badNbf]

                skipped = '%s' % (', '.join(skipped))
                log.warning("Not committing %s on %s[%s]%s - overridden by"
                            " %s[%s]%s" %
                            (skipped, badTroveTup[1], badTroveTup[2],
                             badTrove.getContextStr(), newVersion, flavor,
                             newTrove.getContextStr()))
                if trove is badTrove:
                    break

            trovesByNBF[nbf] = trove, troveTup[1]
            branchMap[branch] = targetBranch

    for nbf, (trove, tupVersion) in trovesByNBF.items():
        if tupVersion.branch() != nbf[1]:
            trovesToClone.append((nbf[0], tupVersion, nbf[2]))

    if not trovesToClone:
        if sourceOnly:
            err = 'Could not find sources to commit'
        elif alreadyCommitted:
            log.warning('All built troves have already been committed')
            return True, {}
        else:
            err = 'Can only commit built troves, none found'
        return False, err
    if sourcesToCheck and not commitOutdatedSources:
        outdated = _checkOutdatedSources(repos, sourcesToCheck)
        if outdated:
            outdated = ( '%s=%s (replaced by newer %s)' \
                         % (name, builtVer, newVer.trailingRevision())
                         for (name, builtVer, newVer) in outdated)
            err = ('The following source troves are out of date:\n%s\n\n'
                   'Use --commit-outdated-sources to commit anyway' %
                   '\n'.join(outdated))
            return False, err

    # only update build info if we'll be okay if some buildreqs are not
    # updated
    updateBuildInfo = compat.ConaryVersion().acceptsPartialBuildReqCloning()
    callback = callbacks.CloneCallback(conaryclient.cfg, message)
    passed, cs = conaryclient.createTargetedCloneChangeSet(
        branchMap,
        trovesToClone,
        updateBuildInfo=updateBuildInfo,
        cloneSources=False,
        trackClone=False,
        callback=callback,
        fullRecurse=False)
    if passed:
        oldTroves = []
        for troveCs in cs.iterNewTroveList():
            if troveCs.getOldVersion():
                oldTroves.append(troveCs.getOldNameVersionFlavor())
        if oldTroves:
            oldDict = {}
            for oldTrove in repos.getTroves(oldTroves):
                oldDict.setdefault(oldTrove.getNameVersionFlavor(),
                                   []).append(oldTrove)
        for troveCs in cs.iterNewTroveList():
            if troveCs.getOldVersion():
                trv = oldDict[troveCs.getOldNameVersionFlavor()].pop()
                trv.applyChangeSet(troveCs)
            else:
                trv = Trove(troveCs)
            for _, childVersion, _ in trv.iterTroveList(strongRefs=True,
                                                        weakRefs=True):
                # make sure there are not any references to the internal
                # rmake repository - that would be a bad bug - easy to
                # do with the way we do cooking of groups.
                onRepos = childVersion.getHost() == reposName
                assert not onRepos, "Trove %s references repository" % trv
            n, v, f = troveCs.getNewNameVersionFlavor()
            trove, troveVersion = trovesByNBF[n, v.branch(), f]
            troveNVFC = trove.getNameVersionFlavor(withContext=True)
            # map jobId -> trove -> binaries
            mapping[trove.jobId].setdefault(troveNVFC, []).append((n, v, f))
    else:
        return False, 'Creating clone failed'

    signatureKey = conaryclient.cfg.signatureKey
    if signatureKey and compat.ConaryVersion().signAfterPromote():
        finalCs = signAbsoluteChangeset(cs, signatureKey)
    if writeToFile:
        cs.writeToFile(writeToFile)
    else:
        repos.commitChangeSet(cs, callback=callback)
    return True, mapping
Ejemplo n.º 6
0
def _shadowAndCommit(conaryclient, cfg, recipeDir, stateFile, message):
    repos = conaryclient.getRepos()
    conaryCompat = compat.ConaryVersion()

    oldSourceVersion = stateFile.getVersion()
    targetLabel = cfg.getTargetLabel(oldSourceVersion)
    if not targetLabel: 
        raise errors.RmakeError(
                    'Cannot cook local recipes unless a target label is set')
    skipped, cs = conaryclient.createShadowChangeSet(str(targetLabel),
                                           [stateFile.getNameVersionFlavor()])
    recipePath = recipeDir + '/' + stateFile.getName().split(':')[0] + '.recipe'
    recipeClass, pathList = _getPathList(repos, cfg, recipePath, relative=True)

    troveName = stateFile.getName()
    troveVersion = stateFile.getVersion()

    if not skipped:
        signAbsoluteChangeset(cs, None)
        repos.commitChangeSet(cs)

    log.info("Shadowing %s to internal repository..." % troveName)
    shadowBranch = troveVersion.createShadow(targetLabel).branch()
    shadowVersion = repos.findTrove(None,
                                    (troveName, str(shadowBranch), 
                                    None), None)[0][1]


    cwd = os.getcwd()
    prefix = 'rmake-shadow-%s-' % troveName.split(':')[0]
    shadowSourceDir = tempfile.mkdtemp(prefix=prefix)
    try:
        log.info("Committing local changes to %s to the"
                  " internal repository..." % troveName)
        log.resetErrorOccurred()
        checkin.checkout(repos, cfg, shadowSourceDir,
                        ['%s=%s' % (troveName, shadowVersion)])

        if compat.ConaryVersion().stateFileVersion() > 0:
            kw = dict(repos=repos)
        else:
            kw = {}
        # grab new and old state and make any modifications due to adding
        # or deleting of files (we assume files that don't exist are 
        # autosource and can be ignored)
        oldState = conaryCompat.ConaryStateFromFile(recipeDir + '/CONARY',
                                                    repos=repos).getSourceState()
        newConaryState = conaryCompat.ConaryStateFromFile(
                                                shadowSourceDir + '/CONARY',
                                                repos=repos)
        newState = newConaryState.getSourceState()

        neededFiles = set(x[1] for x in oldState.iterFileList()
                          if os.path.exists(os.path.join(recipeDir, x[1])))
        neededFiles.update(pathList)
        autoSourceFiles = set(x[1] for x in oldState.iterFileList()
                          if oldState.fileIsAutoSource(x[0]))

        existingFiles = set(x[1] for x in newState.iterFileList()
                        if os.path.exists(os.path.join(shadowSourceDir, x[1])))
        toCopy = neededFiles & existingFiles
        toDel = existingFiles - neededFiles
        toAdd = neededFiles - existingFiles
        for sourceFile in (toCopy | toAdd):
            newPath = os.path.join(shadowSourceDir, sourceFile)
            if os.path.dirname(sourceFile):
                util.mkdirChain(os.path.dirname(newPath))
            if os.path.isdir(sourceFile):
                util.mkdirChain(newPath)
            else:
                shutil.copyfile(os.path.join(recipeDir, sourceFile), newPath)

        os.chdir(shadowSourceDir)
        if hasattr(cfg.sourceSearchDir, '_getUnexpanded'):
            cfg.configKey('sourceSearchDir',
                          cfg.sourceSearchDir._getUnexpanded())

        for f in toDel:
            checkin.removeFile(f)
        if toDel:
            # toDel modifies the CONARY file on disk, so reload with the
            # changes made there.
            newState = conaryCompat.ConaryStateFromFile(
                                            shadowSourceDir + '/CONARY',
                                            repos=repos).getSourceState()

        if conaryCompat.stateFileVersion() == 0:
            checkin.addFiles(toAdd)
        else:
            oldPathIds = dict((x[1], x[0]) for x in oldState.iterFileList())
            for path in toAdd:
                if path in oldPathIds:
                    isConfig = oldState.fileIsConfig(oldPathIds[path])
                else:
                    isConfig = _getConfigInfo(path)
                checkin.addFiles([path], binary=not isConfig, text=isConfig)
            if toAdd:
                # get the new pathIDs for all the added troves, 
                # since we can't set the refresh setting without the 
                # needed pathIds
                newState = conaryCompat.ConaryStateFromFile(
                                                shadowSourceDir + '/CONARY',
                                                repos=repos).getSourceState()
            newPathIds = dict((x[1], x[0]) for x in newState.iterFileList())

            for path in (toCopy | toAdd):
                if path in oldPathIds:
                    isConfig = oldState.fileIsConfig(oldPathIds[path])
                else:
                    isConfig = _getConfigInfo(path)

                newState.fileIsConfig(newPathIds[path], isConfig)

            for path in autoSourceFiles:
                if path in newPathIds:
                    needsRefresh = oldState.fileNeedsRefresh(oldPathIds[path])
                    newState.fileNeedsRefresh(newPathIds[path], needsRefresh)

            # if the factory changed, update it
            if newState.getFactory() != oldState.getFactory():
                newState.setFactory(oldState.getFactory())

            # we may have modified the state file. Write it back out to 
            # disk so it will be picked up by the commit.
            newConaryState.setSourceState(newState)
            newConaryState.write(shadowSourceDir + '/CONARY')

        if message is None and compat.ConaryVersion().supportsCloneCallback():
            message = 'Automated rMake commit'

        _doCommit('%s/%s' % (recipeDir, troveName), repos, cfg, message)

        newState = state.ConaryStateFromFile(shadowSourceDir + '/CONARY', **kw)
        return newState.getSourceState().getNameVersionFlavor()
    finally:
        os.chdir(cwd)
        if hasattr(cfg.sourceSearchDir, '_getUnexpanded'):
            cfg.configKey('sourceSearchDir',
                          cfg.sourceSearchDir._getUnexpanded())
        shutil.rmtree(shadowSourceDir)