def test(self): components = self.recipe.autopkg.components reqDepSet = deps.DependencySet() provDepSet = deps.DependencySet() for pkg in components.values(): reqDepSet.union(pkg.requires) provDepSet.union(pkg.provides) self.depSet = deps.DependencySet() self.depSet.union(reqDepSet - provDepSet) depSetList = [ ] for dep in self.depSet.iterDepsByClass(self.depClass): depSet = deps.DependencySet() depSet.addDep(self.depClass, dep) depSetList.append(depSet) if not depSetList: return False self._initComponentExceptions() cfg = self.recipe.cfg self.db = database.Database(cfg.root, cfg.dbPath) self.systemProvides = self.db.getTrovesWithProvides(depSetList) self.unprovided = [x for x in depSetList if x not in self.systemProvides] self.transitiveBuildRequires = self.recipe._getTransitiveBuildRequiresNames() # For compatibility with older external policy that derives from this self.truncatedBuildRequires = self.transitiveBuildRequires self.setTalk() self.missingBuildRequires = set() return True
def testDependencies(self): depSet = deps.DependencySet() depSet.addDep(deps.FileDependencies, deps.Dependency('/foo/bar', [])) d = DependenciesStream() d.set(depSet) assert (d() == depSet) d2 = DependenciesStream(d.freeze()) assert (d == d2) assert (str(d2.deps) == "file: /foo/bar") # currently there should be no diff between d and d2 assert (d.diff(d2) is None) # add another dep to d (by modifying its dependency set) # now the diff should contain all of the dependencies in d # (when the diff method is used, d2 would be the "old" dependency # set) depSet.addDep(deps.FileDependencies, deps.Dependency('/foo/baz', [])) d.set(depSet) diff = d.diff(d2) d3 = DependenciesStream(diff) assert (d == d3) # now change d's depset again, and do a three way merge back to # the d3 state depSet.addDep(deps.FileDependencies, deps.Dependency('/foo/fred', [])) base = deps.DependencySet() d.twm(diff, base) assert (d == d3) # verify that going from an empty depSet to one with deps in it # works depSet = deps.DependencySet() d = DependenciesStream() d.set(depSet) diff = d2.diff(d) d.twm(diff, d) assert (d == d2) # verify that going from a depSet with deps in it to an empty one # works depSet = deps.DependencySet() d = DependenciesStream() d.set(depSet) assert (d2.freeze() != '') assert (d.freeze() == '') diff = d.diff(d2) assert (diff == '') d2.twm(diff, d2) assert (d == d2) assert (d2.freeze() == '')
def __init__(self, name, recipe): self.name = name self.requires = deps.DependencySet() self.provides = deps.DependencySet() self.provides.addDep(deps.TroveDependencies, deps.Dependency(name)) self.flavor = _getUseFlavor(recipe) self.linkGroups = {} self.requiresMap = {} self.providesMap = {} self.hardlinkMap = {} self.badhardlinks = [] self.recipe = recipe dict.__init__(self)
def check(self): failed = [] for job, requires in self.jobs: r = requires.copy() for depClass in self.ignoreDepClasses: r.removeDepsByClass(depClass) if not self.masterProvides.satisfies(r): failedDep = deps.DependencySet() for depClass, dep in r.iterDeps(): subSet = deps.DependencySet() subSet.addDeps(depClass, [dep]) if not self.masterProvides.satisfies(subSet): failedDep.union(subSet) failed.append(((job[0], job[2][0], job[2][1]), failedDep)) return failed
def testRegularFileBasics(self): foo = filetypes.RegularFile(contents='foo1') fileObj = foo.get(pathId) f = foo.getContents() self.assertEquals(f.read(), 'foo1') self.assertEquals(fileObj.flags(), 0) self.assertEquals(fileObj.flavor(), deps.Flavor()) self.assertEquals(fileObj.provides(), deps.DependencySet()) self.assertEquals(fileObj.requires(), deps.DependencySet()) self.assertEquals(fileObj.inode.perms(), 0644) self.assertEquals(fileObj.inode.owner(), 'root') self.assertEquals(fileObj.inode.group(), 'root') self.assertEquals(fileObj.lsTag, '-') self.assertEquals(fileObj.linkGroup(), None) self.assertEquals( fileObj.fileId(), '(\x01\x9a\xcbz\xbb\x93\x15\x01c\xcf\xd5\x14\xef\xf7,S\xbb\xf8p') requires = deps.ThawDependencySet('4#foo::runtime') provides = deps.ThawDependencySet('11#foo') flv = deps.parseFlavor('xen,domU is: x86') bar = filetypes.RegularFile(contents=StringIO('bar'), config=True, provides=provides, requires=requires, flavor=flv, owner='foo', group='bar', perms=0700, mtime=12345, tags=['tag1', 'tag2']) fileObj = bar.get(pathId) self.assertEquals(bool(fileObj.flags.isInitialContents()), False) self.assertEquals(bool(fileObj.flags.isTransient()), False) self.assertEquals(bool(fileObj.flags.isConfig()), True) self.assertEquals(fileObj.requires(), requires) self.assertEquals(fileObj.provides(), provides) self.assertEquals(fileObj.flavor(), flv) self.assertEquals(fileObj.inode.perms(), 0700) self.assertEquals(fileObj.inode.owner(), 'foo') self.assertEquals(fileObj.inode.group(), 'bar') self.assertEquals(fileObj.inode.mtime(), 12345) self.assertEquals(fileObj.tags(), ['tag1', 'tag2'])
def hashGroupDeps(self, groupTroves, depClass, dependency): depSet = deps.DependencySet() depSet.addDep(depClass, dependency) frz = depSet.freeze() troveList = sorted( self.hashTrove(withFiles=False, withFileContents=False, *x.getNameVersionFlavor()) for x in groupTroves) str = '[1]%s%s%s' % (len(frz), frz, ''.join(troveList)) return sha1helper.sha1ToString(sha1helper.sha1String(str))
def updateArgs(self, *args, **keywords): if len(args) is 2: name = args[1] if ':' not in name: name = name + ':rpm' if not self.provisions.get(name): self.provisions[name] = deps.DependencySet() reMatch = self.provisionRe.match(args[0]) if not reMatch or len(reMatch.groups()) != 3: return depClass = reMatch.group(1).strip().lower() if depClass != 'rpm' and depClass != 'rpmlib': raise policy.PolicyError, "RPMProvides cannot be used to " \ "provide the non-rpm dependency: '%s'" % args[0] dep = reMatch.group(2).strip() flags = reMatch.group(3).strip().split() flags = [(x, deps.FLAG_SENSE_REQUIRED) for x in flags if x] if not self.provisions.get(name): self.provisions[name] = deps.DependencySet() self.provisions[name].addDep( deps.dependencyClassesByName[depClass], deps.Dependency(dep, flags)) exceptDeps = keywords.pop('exceptDeps', None) if exceptDeps: if type(exceptDeps) is str: exceptDeps = ('.*', exceptDeps) assert (type(exceptDeps) == tuple) if type(exceptDeps[0]) is tuple: self.exceptDeps.extend(exceptDeps) else: self.exceptDeps.append(exceptDeps) # CNY-3518: set the default for whether to merge modules -- # this should be passed in only from RPMRequires if '_mergeKmodSymbols' in keywords: self.mergeKmodSymbols = keywords.pop('_mergeKmodSymbols') policy.Policy.updateArgs(self, **keywords)
def updateArgs(self, *args, **keywords): if len(args) is 2: name = args[1] if ':' not in name: name = name + ':rpm' reMatch = self.requirementRe.match(args[0]) if not reMatch or len(reMatch.groups()) != 3: return depClass = reMatch.group(1).strip().lower() if depClass != 'rpm' and depClass != 'rpmlib': raise policy.PolicyError, "RPMRequires cannot be used to " \ "provide the non-rpm dependency: '%s'" % args[0] dep = reMatch.group(2).strip() flags = reMatch.group(3).strip().split() flags = [(x, deps.FLAG_SENSE_REQUIRED) for x in flags if x] if not self.requirements.get(name): self.requirements[name] = deps.DependencySet() self.requirements[name].addDep( deps.dependencyClassesByName[depClass], deps.Dependency(dep, flags)) allowUnusedFilters = keywords.pop('allowUnusedFilters', False) or \ self.allowUnusedFilters exceptions = keywords.pop('exceptions', None) if exceptions: if type(exceptions) is str: self.excepts.add(exceptions) if not allowUnusedFilters: self.unusedFilters['exceptions'].add(exceptions) elif type(exceptions) in (tuple, list): self.excepts.update(exceptions) if not allowUnusedFilters: self.unusedFilters['exceptions'].update(exceptions) exceptDeps = keywords.pop('exceptDeps', None) if exceptDeps: if type(exceptDeps) is str: exceptDeps = ('.*', exceptDeps) assert (type(exceptDeps) == tuple) if type(exceptDeps[0]) is tuple: self.exceptDeps.extend(exceptDeps) else: self.exceptDeps.append(exceptDeps) # CNY-3518: set the default for whether to merge modules if 'mergeKmodSymbols' in keywords: self.mergeKmodSymbols = keywords.pop('mergeKmodSymbols') self.recipe.RPMProvides(_mergeKmodSymbols=self.mergeKmodSymbols) policy.Policy.updateArgs(self, **keywords)
def _filterProvides(self, compName, provides): removeDeps = deps.DependencySet() for depClass, dep in provides.iterDeps(): for compRe, depRe in self.exceptDeps: if not compRe.match(compName): continue depName = '%s: %s' % (depClass.tagName, str(dep)) if depRe.match(depName): removeDeps.addDep(depClass, dep) break return provides - removeDeps
def __init__(self, **kwargs): assert type(self) != _File for arg in kwargs: aliasedArg = self.aliasedArgs.get(arg, arg) if aliasedArg not in self.kwargs: raise ParameterError("'%s' is not allowed for this class" % arg) for key, val in self.aliasedArgs.iteritems(): if key in kwargs and val in kwargs: raise ParameterError( \ "'%s' and '%s' cannot be specified together" % \ (key, val)) elif key in kwargs: kwargs[val] = kwargs[key] for key, val in self.__class__.kwargs.iteritems(): setattr(self, key, kwargs.get(key, val)) self.mtime = int(self.mtime or time.time()) if type(self.requires) == str: self.requires = deps.parseDep(self.requires) elif self.requires is None: self.requires = deps.DependencySet() if isinstance(self.provides, str): self.provides = deps.parseDep(self.provides) elif self.provides is None: self.provides = deps.DependencySet() if type(self.flavor) == str: self.flavor = deps.parseFlavor(self.flavor) if self.flavor is None: self.flavor = deps.Flavor() if self.tags is None: self.tags = []
def getRpmLibProvidesSet(rpm): """ Retreieve a dependency set that represents the rpmlib provides from the loaded rpm module @param rpm: the rpm module @type rpm: module @return: A dependency containing the virtual items that rpmlib provides @rtype: conary.deps.deps.DependencySet() """ depset = deps.DependencySet() for prov in rpm.ds.Rpmlib(): dep = deps.parseDep('rpmlib: ' + prov.N().split('(')[1].split(')')[0]) depset.union(dep) return depset
def getDepsForTroveList(self, troveTupList, provides = True, requires = True): def missingNeeded(depTuple): if depTuple is None: return True if provides and depTuple[0] is None: return True if requires and depTuple[1] is None: return True return False def mergeCacheEntry(troveTup, depTuple): existing = self.depCache.get(depTuple) if existing is None: self.depCache[troveTup] = depInfo else: self.depCache[troveTup] = (depTuple[0] or existing[0], depTuple[1] or existing[1]) # look in the dep cache and trove cache result = [ None ] * len(troveTupList) for i, tup in enumerate(troveTupList): result[i] = self.getDepCacheEntry(tup) if result[i] is None and self.troveIsCached(tup): trv = self.cache[tup] result[i] = (trv.getProvides(), trv.getRequires()) elif result[i] is None and trove.troveIsPackage(tup[0]): # packages provide only themselves; querying the repository # to figure that out seems unnecessarily complicated result[i] = (deps.parseDep('trove: %s' % tup[0]), deps.DependencySet()) needed = [ (i, troveTup) for i, (troveTup, depSets) in enumerate(izip(troveTupList, result)) if missingNeeded(depSets) ] if not needed: return result # use the getDepsForTroveList call; it raises an error if it needs # to access some repositories which don't support it log.info("Getting deps for %d troves" % len(needed)) try: depList = self.troveSource.getDepsForTroveList( [ x[1] for x in needed ], provides = provides, requires = requires) except netclient.PartialResultsError, e: # we can't use this call everywhere; handle what we can and we'll # deal with the None's later depList = e.partialResults
def postProcess(self): packagepolicy.PackageSpec.postProcess(self) fileProvides = deps.DependencySet() fileRequires = deps.DependencySet() for fileObj in self.pathObjs.values(): fileProvides.union(fileObj.provides()) fileRequires.union(fileObj.requires()) for comp in self.recipe.autopkg.components.values(): if comp.name in self.recipe._componentReqs: # copy component dependencies for components which came # from derived packages, only for dependencies that are # not expressed in the file dependencies comp.requires.union(self.recipe._componentReqs[comp.name] - fileRequires) # copy only the provisions that won't be handled through # ComponentProvides, which may remove capability flags depSet = deps.DependencySet() for dep in self.recipe._componentProvs[comp.name].iterDeps(): if (dep[0] is deps.TroveDependencies and dep[1].getName()[0] in self.recipe._componentReqs): continue depSet.addDep(*dep) comp.provides.union(depSet - fileProvides)
def resolveDependenciesByGroups(self, repos, groupTroves, depList): allToFind = [] allFound = [] allMissing = [] for depSet in depList: d = {} toFind = deps.DependencySet() found = [] missingIdx = [] allToFind.append(toFind) allFound.append(found) allMissing.append(missingIdx) for idx, (depClass, dependency) in enumerate(depSet.iterDeps(sort=True)): depHash = str( self.hashGroupDeps(groupTroves, depClass, dependency)) if self.store.hasFile(depHash): outFile = self.store.openFile(depHash) results = DependencyResultList(outFile.read()).get() found.append(results) else: toFind.addDep(depClass, dependency) found.append(None) missingIdx.append((idx, depHash)) if [x for x in allToFind if x]: allResults = repos.resolveDependenciesByGroups( groupTroves, allToFind) for found, toFind, missingIdx in itertools.izip( allFound, allToFind, allMissing): if toFind.isEmpty(): continue iter = itertools.izip(missingIdx, toFind.iterDeps(sort=True), allResults[toFind]) for (idx, depHash), (depClass, dependency), resultList in iter: found[idx] = resultList if self.readOnly: continue depResultList = DependencyResultList() [depResultList.add(*x) for x in resultList] s = StringIO() s.write(depResultList.freeze()) s.seek(0) self.store.addFile(s, depHash, integrityCheck=False) allResults = {} for result, depSet in itertools.izip(allFound, depList): allResults[depSet] = result return allResults
def do(self): components = self.recipe.autopkg.getComponents() componentNames = set(x.getName() for x in components) for cmp in components: removed = False depSet = deps.DependencySet() for depClass, dep in cmp.requires.iterDeps(): depName = depClass.tagName if depName == 'trove' and str(dep) not in componentNames: self.info("removing 'trove: %s' for bootstrap flavor", dep) removed = True # record bootstrap flavor if use.Use.bootstrap: pass else: depSet.addDep(depClass, dep) if removed: cmp.requires = depSet
def do(self): packagepolicy.ComponentRequires.do(self) # Remove any intercomponent dependencies which point to troves which # are now empty. We wouldn't have created any, but we could have # inherited some during PackageSpec components = self.recipe.autopkg.components packageMap = self.recipe.autopkg.packageMap mainSet = set([main.name for main in packageMap]) for comp in components.values(): removeDeps = deps.DependencySet() for dep in comp.requires.iterDepsByClass(deps.TroveDependencies): name = dep.getName()[0] if ':' in name: main = name.split(':', 1)[0] if (main in mainSet and (name not in components or not components[name])): removeDeps.addDep(deps.TroveDependencies, dep) comp.requires -= removeDeps
def __init__(self, readConfigFiles=False, ignoreErrors=False): self.setIgnoreErrors(ignoreErrors) servercfg.rMakeBuilderConfiguration.__init__(self) if readConfigFiles: self.readFiles() if not self.hostName: self.hostName = socket.getfqdn() if not self.name: self.name = self.hostName.split('.')[0] if not self.buildFlavors: insSet = deps.DependencySet() for depList in arch.currentArch: for dep in depList: flags = dep.getFlags()[0] # don't include "prefers" flags. flags = [(x[0], x[1]) for x in flags if x[1] in (deps.FLAG_SENSE_REQUIRED, deps.FLAG_SENSE_DISALLOWED)] newDep = deps.Dependency(dep.name, flags) insSet.addDep(deps.InstructionSetDependency, newDep) self.buildFlavors.add(insSet)
def _getRecursiveRequirements(self, db, troveList, flavorPath): # gets the recursive requirements for the listed packages seen = set() while troveList: depSetList = [] for trv in db.getTroves(list(troveList), withFiles=False): required = deps.DependencySet() oldRequired = trv.getRequires() [ required.addDep(*x) for x in oldRequired.iterDeps() if x[0] != deps.AbiDependency ] depSetList.append(required) seen.update(troveList) sols = db.getTrovesWithProvides(depSetList, splitByDep=True) troveList = set() for depSetSols in sols.itervalues(): for depSols in depSetSols: bestChoices = [] # if any solution for a dep is satisfied by the installFlavor # path, then choose the solutions that are satisfied as # early as possible on the flavor path. Otherwise return # all solutions. for flavor in flavorPath: bestChoices = [ x for x in depSols if flavor.satisfies(x[2]) ] if bestChoices: break if bestChoices: depSols = set(bestChoices) else: depSols = set(depSols) depSols.difference_update(seen) troveList.update(depSols) return seen
def _addPhantomTrove(self, changeSet, rpmlibHeader, callback, num, total): header = rpmhelper.headerFromBlob(rpmlibHeader.unload()) callback.capsuleSyncCreate(self.kind, str(header.getNevra()), num, total) name, version, flavor = self._getPhantomNVF(header) # Fake trove trv = trove.Trove(name, version, flavor) provides = header.getProvides() provides.addDep(deps.TroveDependencies, deps.Dependency(name)) trv.setProvides(provides) trv.setRequires(header.getRequires(enableRPMVersionDeps=False)) # Fake capsule file path = str(header.getNevra()) + '.rpm' fileHelper = filetypes.RegularFile(contents='') fileStream = fileHelper.get(pathId=trove.CAPSULE_PATHID) trv.addRpmCapsule(path, version, fileStream.fileId(), header) changeSet.addFile(None, fileStream.fileId(), fileStream.freeze()) # Fake encapsulated files self._addPhantomContents(changeSet, trv, header) trv.computeDigests() changeSet.newTrove(trv.diff(None)[0]) # Make a fake package to contain the fake component pkgName = name.split(':')[0] if self.db.hasTrove(pkgName, version, flavor): # It's possible to erase just the component and leave the package, # so don't try to create it again. return pkg = trove.Trove(pkgName, version, flavor) provides = deps.DependencySet() provides.addDep(deps.TroveDependencies, deps.Dependency(pkgName)) pkg.setProvides(provides) pkg.setIsCollection(True) pkg.addTrove(name, version, flavor, byDefault=True) pkg.computeDigests() changeSet.newTrove(pkg.diff(None)[0])
def testMultilib(self): depSet = deps.DependencySet() depSet.addDeps(deps.InstructionSetDependency, arch.flags_x86_64()[0]) use.setBuildFlagsFromFlavor(None, depSet) self.assertTrue(use.Arch.x86_64) self.assertFalse(use.Arch.x86)
def filterSuggestions(self, depList, sugg, suggMap): """ Given a list of several suggestions for one dependency, pick the dep that matches the best. """ troves = set() for (troveTup, depSet) in depList: choicesBySolution = {} seen = set() if depSet in sugg: suggList = set() choicesAndDep = itertools.izip(sugg[depSet], depSet.iterDeps(sort=True)) for choiceList, (depClass, dep) in choicesAndDep: troveNames = set(x[0] for x in choiceList) if self.db: affTroveDict = \ dict((x, self.db.trovesByName(x)) for x in troveNames) else: affTroveDict = dict.fromkeys(troveNames, {}) # iterate over flavorpath -- use suggestions # from first flavor on flavorpath that gets a match for installFlavor in self.flavor: choice = self.selectResolutionTrove( troveTup, dep, depClass, choiceList, installFlavor, affTroveDict) if choice: suggList.add(choice) l = suggMap.setdefault(troveTup, set()) l.add(choice) if choice not in seen: if choice not in choicesBySolution: d = deps.DependencySet() choicesBySolution[choice] = d else: d = choicesBySolution[choice] d.addDep(depClass, dep) break if choicesBySolution: for choice, depSet in sorted( choicesBySolution.iteritems()): seen.add(choice) depSet = str(depSet).split('\n') if len(depSet) > 5: depSet = depSet[0:5] + ['...'] depSet = '\n '.join(depSet) log.debug( 'Resolved:\n' ' %s=%s/%s[%s]\n' ' Required: %s\n' ' Adding: %s=%s/%s[%s]', troveTup[0], troveTup[1].trailingLabel(), troveTup[1].trailingRevision(), troveTup[2], depSet, choice[0], choice[1].trailingLabel(), choice[1].trailingRevision(), choice[2]) troves.update([(x[0], (None, None), x[1:], True) for x in suggList]) return troves
def __init__(self, troveSource, ignoreDepClasses=set()): self.ignoreDepClasses = ignoreDepClasses self.masterProvides = deps.DependencySet() self.troveSource = troveSource self.jobs = []
def testDatabase1(self): db = sqldb.Database(':memory:') f1 = files.FileFromFilesystem("/etc/passwd", self.id1) f2 = files.FileFromFilesystem("/etc/services", self.id2) f3 = files.FileFromFilesystem("/etc/group", self.id3) trv = trove.Trove("testcomp", self.v10, self.emptyFlavor, None) trv.addFile(self.id1, "/bin/1", self.v10, f1.fileId()) trv.addFile(self.id2, "/bin/2", self.v10, f2.fileId()) trv.addFile(self.id3, "/bin/3", self.v10, f3.fileId()) trv.troveInfo.size.set(1234) trv.troveInfo.sourceName.set('thesource') req = deps.DependencySet() req.addDep(deps.FileDependencies, deps.Dependency("/bin/bash")) req.addDep(deps.TroveDependencies, deps.Dependency("foo:runtime")) req.addDep(deps.SonameDependencies, deps.Dependency("libtest.so.1")) trv.setRequires(req) trvInfo = db.addTrove(trv) db.addFile(trvInfo, f1.pathId(), "/bin/1", f1.fileId(), self.v10, fileStream=f1.freeze()) db.addFile(trvInfo, f2.pathId(), "/bin/2", f2.fileId(), self.v10, fileStream=f2.freeze()) db.addFile(trvInfo, f3.pathId(), "/bin/3", f3.fileId(), self.v10, fileStream=f3.freeze()) db.addTroveDone(trvInfo) dbTrv = db.getTroves([("testcomp", self.v10, self.emptyFlavor)])[0] assert (dbTrv == trv) assert (dbTrv.__class__ == trove.Trove) assert (db.trovesArePinned([("testcomp", self.v10, self.emptyFlavor) ]) == [False]) dbTrv = db.getTroves([("testcomp", self.v10, self.emptyFlavor)], withFileObjects=True)[0] assert (dbTrv == trv) assert (dbTrv.__class__ == trove.TroveWithFileObjects) for f in (f1, f2, f3): assert (dbTrv.getFileObject(f.fileId()) == f) trv2 = trove.Trove("testpkg", self.v10, self.emptyFlavor, None) ti = trv2.addTrove(trv.getName(), self.v10, trv.getFlavor()) trv2.addTrove("weakref", self.v10, trv.getFlavor(), weakRef=True) ti = db.addTrove(trv2, pin=True) db.addTroveDone(ti) assert (db.trovesArePinned([("testpkg", self.v10, self.emptyFlavor) ]) == [True]) assert (db.getTroves([("testpkg", self.v10, self.emptyFlavor) ])[0] == trv2) assert (db.getTroves([ ("testpkg", self.v10, self.emptyFlavor) ])[0].getVersion().timeStamps() == trv2.getVersion().timeStamps()) assert (db.getTroves([("testpkg", self.v10, self.emptyFlavor), ("testcomp", self.v10, self.emptyFlavor), ("testitem", self.v10, self.emptyFlavor)], True) == [trv2, trv, None]) assert (db.getTroves([("testpkg", self.v10, self.emptyFlavor), ("testcomp", self.v10, req)], True) == [trv2, None]) assert (db.findTroveContainers(["testpkg", "testcomp"]) == [[], [("testpkg", self.v10, self.emptyFlavor)]]) assert (db.getTroveContainers([ ("testpkg", self.v10, self.emptyFlavor), ("testcomp", self.v10, self.emptyFlavor) ]) == [[], [("testpkg", self.v10, self.emptyFlavor)]]) res = db.findTroveReferences(["testpkg", "testcomp"]) assert (db.findTroveReferences(["testpkg", "testcomp" ]) == [[], [("testcomp", self.v10, self.emptyFlavor)]]) v10new = VersionFromString("/conary.rpath.com@test:trunk/1.2-10") assert (db.getTroves([("testpkg", v10new, self.emptyFlavor) ])[0] == trv2) assert (db.getTroves([ ("testpkg", v10new, self.emptyFlavor) ])[0].getVersion().timeStamps() == trv2.getVersion().timeStamps()) assert (set(db.findByNames(['testpkg', 'testcomp'])) == set([ ("testpkg", self.v10, self.emptyFlavor), ("testcomp", self.v10, self.emptyFlavor) ])) db.eraseTrove("testcomp", self.v10, None) assert (db.getTroves([("testpkg", self.v10, self.emptyFlavor) ])[0] == trv2) trv.computePathHashes() trvInfo = db.addTrove(trv) db.addFile(trvInfo, f1.pathId(), "/bin/1", f1.fileId(), self.v10, fileStream=f1.freeze()) db.addFile(trvInfo, f2.pathId(), "/bin/2", f2.fileId(), self.v10, fileStream=f1.freeze()) db.addFile(trvInfo, f3.pathId(), "/bin/3", f3.fileId(), self.v10, fileStream=f1.freeze()) db.addTroveDone(trvInfo) assert (db.getTroves([("testcomp", self.v10, self.emptyFlavor) ])[0] == trv) db.removeFileFromTrove(trv, "/bin/1") changedTrv = db.getTroves([trv.getNameVersionFlavor()], pristine=False)[0] otherChangedTrv = db.getTroves([trv.getNameVersionFlavor()], withFiles=False, pristine=False)[0] assert (len(changedTrv.idMap) + 1 == len(trv.idMap)) assert (len(changedTrv.troveInfo.pathHashes) + 1 == len( trv.troveInfo.pathHashes)) assert (changedTrv.troveInfo.pathHashes == otherChangedTrv.troveInfo.pathHashes) assert (len(otherChangedTrv.idMap) == 0) changedTrv.addFile(self.id1, "/bin/1", self.v10, f1.fileId()) assert (changedTrv.idMap == trv.idMap) changedTrv.computePathHashes() assert (changedTrv.troveInfo.pathHashes == trv.troveInfo.pathHashes) assert (db.getTroves([("testcomp", self.v10, self.emptyFlavor)], pristine=True)[0] == trv) db.eraseTrove("testpkg", self.v10, None) assert (db.getTroves([("testpkg", self.v10, self.emptyFlavor) ]) == [None]) self.assertRaises(KeyError, db.instances.getVersion, 100) db.eraseTrove("testcomp", self.v10, None) db.commit() cu = db.db.cursor() # make sure the versions got removed; the None entry is still there cu.execute("SELECT count(*) FROM Versions") assert (cu.next()[0] == 1) # make sure the dependency table got cleaned up cu.execute("SELECT count(*) FROM Dependencies") assert (cu.next()[0] == 0) # make sure the instances table got cleaned up cu.execute("SELECT count(*) FROM Instances") assert (cu.next()[0] == 0) # make sure the troveInfo table got cleaned up cu.execute("SELECT count(*) FROM TroveInfo") assert (cu.next()[0] == 0)
def doFile(self, path): if hasattr(self.recipe, '_getCapsulePathsForFile'): if self.recipe._getCapsulePathsForFile(path): return d = self.macros.destdir f = util.joinPaths(d, path) if not os.path.islink(f): return recipe = self.recipe contents = os.readlink(f) if contents[0] == '/': self.warn( 'Absolute symlink %s points to %s,' ' should probably be relative', path, contents) return abscontents = util.joinPaths(os.path.dirname(path), contents) # now resolve any intermediate symlinks dl = len(os.path.realpath(d)) abscontents = os.path.realpath(d + abscontents)[dl:] ap = recipe.autopkg if abscontents in ap.pathMap: if ap.findComponent(abscontents) != ap.findComponent(path) and \ not path.endswith('.so') and \ not ap.findComponent(path).getName().endswith(':test'): # warn about suspicious cross-component symlink fromPkg = ap.findComponent(path) targetPkg = ap.findComponent(abscontents) found = False for depClass, dep in fromPkg.requires.iterDeps(): d = deps.DependencySet() d.addDep(depClass, dep) if targetPkg.provides.satisfies(d): found = True break if not found: self.warn('symlink %s points from package %s to %s', path, ap.findComponent(path).getName(), ap.findComponent(abscontents).getName()) else: for targetFilter, requirement in self.targetFilters: if targetFilter.match(abscontents): # contents are an exception self.info('allowing special dangling symlink %s -> %s', path, contents) if requirement: self.info( 'automatically adding requirement' ' %s for symlink %s', requirement, path) # Requires has already run, touch this up pkg = ap.findComponent(path) if path not in pkg.requiresMap: pkg.requiresMap[path] = deps.DependencySet() pkg.requiresMap[path].addDep( deps.TroveDependencies, deps.Dependency(requirement, [])) f = pkg.getFile(path) f.requires.set(pkg.requiresMap[path]) pkg.requires.union(f.requires()) return for pathName in recipe.autopkg.pathMap: if pathName.startswith(abscontents): # a link to a subdirectory of a file that is # packaged is still OK; this test is expensive # and almost never needed, so put off till last return self.error("Dangling symlink: %s points to non-existant %s (%s)" % (path, contents, abscontents)) # now that an error has been logged, we need to get rid of the file # so the rest of policy won't barf trying to access a file which # doesn't *really* exist (CNP-59) os.unlink(self.recipe.macros.destdir + path)
def resolveDependencies(self, label, depList, leavesOnly=False): def _depClassAndName(oneDep): s = set() for depClass, depName, flags in oneDep.iterRawDeps(): s.add((depClass, depName)) return s reqNames = set() finalDepList = [] cachedSuggMap = {} for dep in depList: cachedResult = self.troveCache.getDepSolution( self.troveTupSig, dep) if cachedResult is None: # Cache miss reqNames.update(_depClassAndName(dep)) finalDepList.append(dep) else: # Cache hit ... if cachedResult: # ... and a trove matched cachedSuggMap[dep] = cachedResult if not finalDepList: return cachedSuggMap # Retrieve provides for all troves in the set emptyDep = deps.DependencySet() troveDeps = self.troveCache.getDepsForTroveList(self.troveTupList, provides=True, requires=False) if self.providesIndex is None: index = {} self.providesIndex = index for i, (troveTup, (p, r)) in enumerate( itertools.izip(self.troveTupList, troveDeps)): classAndNameSet = _depClassAndName(p) for classAndName in classAndNameSet: val = index.get(classAndName) if val is None: index[classAndName] = [i] else: val.append(i) # For each requirement to be resolved, load any matching provides into # the resolver DB depLoader = self.depDb.bulkLoader() for classAndName in reqNames: val = self.providesIndex.get(classAndName) if val is None: continue for i in val: if self.inDepDb[i]: continue depTroveId = depLoader.addRaw(troveDeps[i][0], emptyDep) self.depTroveIdMap[depTroveId] = i self.inDepDb[i] = True depLoader.done() self.depDb.commit() if not self.depTroveIdMap: # No requirements were matched by troves in this set, so add # negative cache entries for all of the requirements checked. for depSet in depList: self.troveCache.addDepSolution(self.troveTupSig, depSet, []) return cachedSuggMap suggMap = self.depDb.resolve(label, finalDepList, leavesOnly=leavesOnly, troveIdList=self.depTroveIdMap.keys()) # Convert resolver results back to trove tuples and insert into the # suggestion map for depSet, solListList in suggMap.iteritems(): newSolListList = [] for solList in solListList: newSolListList.append([ self.troveTupList[self.depTroveIdMap[x]] for x in solList ]) if newSolListList: suggMap[depSet] = newSolListList self.troveCache.addDepSolution(self.troveTupSig, depSet, newSolListList) # Add negative cache entries for any remaining requirements that # weren't solved for depSet in finalDepList: if depSet not in suggMap: self.troveCache.addDepSolution(self.troveTupSig, depSet, []) self.depDb.db.rollback() suggMap.update(cachedSuggMap) return suggMap
def _getDepsetFromHeader(self, tags, mergeKmodSymbols=False): if isinstance(tags, tuple): assert len(tags) == 2 rpmdeps = self.get(tags[0], []) rpmvers = self.get(tags[1], []) if len(rpmdeps) != len(rpmvers): rpmvers = itertools.repeat(None, len(rpmdeps)) else: rpmdeps = self.get(tags, []) rpmvers = itertools.repeat(None, len(rpmdeps)) depset = deps.DependencySet() for dep, ver in itertools.izip(rpmdeps, rpmvers): if dep.startswith('/'): depset.addDep(deps.FileDependencies, deps.Dependency(dep)) elif dep.startswith('rpmlib'): # this is of the form rpmlib(Something). We just want the # Something depset.addDep(deps.RpmLibDependencies, deps.Dependency(dep.split('(')[1].split(')')[0])) elif '(' in dep: if '.so' in dep.split('(')[0] and not ( dep.startswith('perl(') or dep.startswith('config(')): # assume it is a shlib or package name; # convert anything inside () to a flag flags = self.flagre.findall(dep) if flags: # the dependency name is everything until the first ( dep = self.depnamere.match(dep).group(1) if len(flags) == 2: # if we have (flags)(64bit), we need to pop # the 64bit marking off the end and namespace the # dependency name. dep += '[%s]' % flags.pop() flags = [(x, deps.FLAG_SENSE_REQUIRED) for x in flags if x] else: flags = [] depset.addDep(deps.RpmDependencies, deps.Dependency(dep, flags)) elif self.localere.match(dep): # locale RPM flags get translated to conary dep flags m = self.localere.match(dep) nf = m.group(1).split(':') if len(nf) == 1: name = '' flags = nf[0].split(';') else: name = ':' + ':'.join(nf[0:-1]) flags = nf[-1].split(';') flags = [(x, deps.FLAG_SENSE_REQUIRED) for x in flags if x] depset.addDep(deps.RpmDependencies, deps.Dependency('locale%s' % name, flags)) elif self.kmodre.match(dep): m = self.kmodre.match(dep) modname = m.group(2) # add the version if it is a hex string with at least # 8 chars l = None if ver and len(ver) >= 8: try: l = long(ver, 16) except ValueError: pass if l: modname = "%s:%s" % (modname, ver) else: log.warning("dependency '%s' is expected to have " "a hexadecimal hash >= 8 characters " "for a version. Instead it has a " "version of '%s' which will be " "ignored." % (dep, ver)) if mergeKmodSymbols: flags = [ (modname, deps.FLAG_SENSE_REQUIRED), ] depset.addDep(deps.RpmDependencies, deps.Dependency(m.group(1), flags)) else: modname = '%s[%s]' % (m.group(1), modname) flags = [] depset.addDep(deps.RpmDependencies, deps.Dependency(modname, flags)) else: # replace any () with [] because () are special to Conary dep = dep.replace('(', '[').replace(')', ']') depset.addDep(deps.RpmDependencies, deps.Dependency(dep, [])) else: depset.addDep(deps.RpmDependencies, deps.Dependency(dep, [])) return depset
def toDepSet(self, dep, depClass): ds = deps.DependencySet() ds.addDep(depClass, dep) return ds
def do(self): for comp in self.recipe.autopkg.components.items(): capsule = self.recipe._getCapsule(comp[0]) if capsule and capsule[0] == 'rpm': if not self.filters: self.filters = [(x, filter.Filter(x, self.macros)) for x in self.excepts] path = capsule[1] matchFound = False for regexp, f in self.filters: if f.match(path): self.unusedFilters['exceptions'].discard(regexp) matchFound = True if matchFound: continue h = rpmhelper.readHeader(file(path)) rReqs, rProv = h.getDeps( mergeKmodSymbols=self.mergeKmodSymbols) # integrate user specified requirements if self.requirements: userReqs = self.requirements.get(comp[0]) if userReqs: rReqs.union(userReqs) # remove rpm provisions from the requirements rReqs = rReqs.difference(rProv) # cull duplicate rpm reqs that have a standard conary # representations # currently we only handle perl and sonames culledReqs = deps.DependencySet() cnyReqs = comp[1].requires cnyProv = comp[1].provides if rReqs.hasDepClass(deps.RpmDependencies): soDeps = deps.DependencySet() soDeps.addDeps(deps.SonameDependencies, \ list(cnyReqs.iterDepsByClass(deps.SonameDependencies))+\ list(cnyProv.iterDepsByClass(deps.SonameDependencies))) for r in list(rReqs.iterDepsByClass(deps.RpmDependencies)): reMatch = self.rpmStringRe.match(r.name) if reMatch and reMatch.groups(): rpmFile = reMatch.group(1) rpmFlags = reMatch.group(2).strip() else: rpmFile = r.name rpmFlags = '' if rpmFile == 'perl' and rpmFlags: ds = deps.DependencySet() dep = deps.Dependency(rpmFlags) dep.flags = r.flags ds.addDep(deps.PerlDependencies, dep) if cnyReqs.satisfies(ds) or \ cnyProv.satisfies(ds): culledReqs.addDep(deps.RpmDependencies, r) elif '.so' in rpmFile: ds = deps.DependencySet() if rpmFlags == '64bit': elfPrefix = 'ELF64/' else: elfPrefix = 'ELF32/' dep = deps.Dependency(elfPrefix + rpmFile) dep.flags = r.flags ds.addDep(deps.SonameDependencies, dep) if soDeps.satisfies(ds): culledReqs.addDep(deps.RpmDependencies, r) rReqs = rReqs.difference(culledReqs) # remove any excepted deps for filt, exceptRe in self.exceptDeps: if filt.match(path): for depClass, dep in list(rReqs.iterDeps()): matchName = '%s: %s' % (depClass.tagName, str(dep)) if exceptRe.match(matchName): rReqs.removeDeps(depClass, [dep]) cnyReqs.union(rReqs)
def do(self): # For the purposes of this policy, the transitive buildRequires # includes suggestions already made for handling shared libraries, # since this policy is explicitly a fallback for the unusual # case of static linking outside of the package being built. transitiveBuildRequires = self.transitiveBuildRequires.union(self.warnedSoNames) cfg = self.recipe.cfg db = database.Database(cfg.root, cfg.dbPath) foundLibNames = set() allPossibleProviders = set() missingBuildRequires = set() self.buildDirLibNames = None destdir = self.recipe.macros.destdir builddir = self.recipe.macros.builddir tooManyChoices = {} noTroveFound = {} noLibraryFound = set() components = self.recipe.autopkg.components pathMap = self.recipe.autopkg.pathMap reqDepSet = deps.DependencySet() sharedLibraryRequires = set() for pkg in components.values(): reqDepSet.union(pkg.requires) for dep in reqDepSet.iterDepsByClass(deps.SonameDependencies): soname = os.path.basename(dep.name).split('.')[0] sharedLibraryRequires.add(soname) if soname.startswith('lib'): sharedLibraryRequires.add(soname[3:]) else: sharedLibraryRequires.add('lib%s' %soname) troveLibraries = set() for path in pathMap.iterkeys(): basename = os.path.basename(path) if basename.startswith('lib') and basename.find('.') >= 0: troveLibraries.add(basename[3:].split('.')[0]) self.recipe.synchronizeLogs() f = file(self.recipe.getSubscribeLogPath()) libRe = re.compile('^-l[a-zA-Z]+$') libDirRe = re.compile('^-L/..*$') def logLineTokens(): for logLine in f: logLine = logLine.strip() if not self.r.match(logLine): continue yield logLine.split() for logLine in self.logLines: yield logLine.split() def pathSetToTroveSet(pathSet): troveSet = set() for path in pathSet: for pathReq in set(trove.getName() for trove in db.iterTrovesByPath(path)): pathReqCandidates = _providesNames(pathReq) # remove any recursive or non-existing buildreqs pathReqCandidates = [x for x in pathReqCandidates if db.hasTroveByName(x)] if not pathReqCandidates: continue allPossibleProviders.update(pathReqCandidates) # only the best option pathReqCandidates = pathReqCandidates[0:1] # now apply exceptions pathReqCandidates = self._removeExceptionsFromList( pathReqCandidates) troveSet.add(pathReqCandidates[0]) return troveSet def buildDirContains(libName): # If we can find this library built somewhere in the # builddir, chances are that the internal library is # what is being linked to in any case. if self.buildDirLibNames is None: # walk builddir once, the first time this is called self.buildDirLibNames = set() for dirpath, dirnames, filenames in os.walk(builddir): for fileName in filenames: if fileName.startswith('lib') and '.' in fileName: self.buildDirLibNames.add(fileName[3:].split('.')[0]) return libName in self.buildDirLibNames for tokens in logLineTokens(): libNames = set(x[2:] for x in tokens if libRe.match(x)) # Add to this set, for this line only, system library dirs, # nothing in destdir or builddir libDirs = self.libDirs.copy() for libDir in set(x[2:].rstrip('/') for x in tokens if libDirRe.match(x) and not x[2:].startswith(destdir) and not x[2:].startswith(builddir)): libDir = util.normpath(libDir) libDirs.setdefault(util.normpath('%s%s' %(cfg.root, libDir)), libDir) libDirs.setdefault(libDir, libDir) for libName in sorted(list(libNames)): if libName not in foundLibNames: if libName in sharedLibraryRequires: foundLibNames.add(libName) continue if libName in troveLibraries: foundLibNames.add(libName) continue if buildDirContains(libName): foundLibNames.add(libName) continue foundLibs = set() for libDirRoot, libDir in libDirs.iteritems(): for ext in ('a', 'so'): # If there is no .a, look for the .so in case # no shared library dependency is found from # packaged files (CNP-132) if util.exists('%s/lib%s.%s' %(libDirRoot, libName, ext)): foundLibs.add('%s/lib%s.%s' %(libDir, libName, ext)) break troveSet = pathSetToTroveSet(foundLibs) if len(troveSet) == 1: # found just one, we can confidently recommend it recommended = list(troveSet)[0] if recommended not in transitiveBuildRequires: self.info("Add '%s' to buildRequires for -l%s (%s)", recommended, libName, ', '.join(sorted(list(foundLibs)))) missingBuildRequires.add(recommended) foundLibNames.add(libName) elif len(troveSet): # found more, we might need to recommend a choice tooManyChoices.setdefault(libName, [ ' '.join(sorted(list(foundLibs))), "', '".join(sorted(list(troveSet)))]) elif foundLibs: # found files on system, but no troves providing them noTroveFound.setdefault(libName, ' '.join(sorted(list(foundLibs)))) else: # note that this does not prevent us from # *looking* again, because the next time # there might be a useful -L in the link line noLibraryFound.add(libName) if tooManyChoices: for libName in sorted(list(tooManyChoices.keys())): if libName not in foundLibNames: # Found multiple choices for libName, and never came # up with a better recommendation, so recommend a choice. # Note: perhaps someday this can become an error # when we have a better sense of how frequently # it is wrong... foundLibNames.add(libName) foundLibs, troveSet = tooManyChoices[libName] self.warn('Multiple troves match files %s for -l%s:' ' choose one of the following entries' " for buildRequires: '%s'", foundLibs, libName, troveSet) if noTroveFound: for libName in sorted(list(noTroveFound.keys())): if libName not in foundLibNames: # Never found any trove containing these libraries, # not even a file in the builddir foundLibNames.add(libName) foundLibs = noTroveFound[libName] self.info('No trove found matching any of files' ' %s for -l%s:' ' possible missing buildRequires', foundLibs, libName) if noLibraryFound: for libName in sorted(list(noLibraryFound)): if libName not in foundLibNames: # Note: perhaps someday this can become an error # when we have a better sense of how frequently # it is wrong... self.info('No files found matching -l%s:' ' possible missing buildRequires', libName) if missingBuildRequires: self.talk('add to buildRequires: %s', str(sorted(list(missingBuildRequires)))) reportMissingBuildRequires(self.recipe, missingBuildRequires) if allPossibleProviders: reportFoundBuildRequires(self.recipe, allPossibleProviders) f.close()
def testTroves(self, flavor=None): if flavor is None: flavor = deps.Flavor() store = self._connect() dirSet = set(['/etc', '/bin']) baseSet = set( ['passwd', 'services', 'group', '1', '2', '3', 'distributed']) v10 = ThawVersion("/conary.rpath.com@test:trunk/10:1.2-10") branch = v10.branch() store.createTroveBranch("testtrove", branch) f1 = files.FileFromFilesystem("/etc/passwd", self.id1) f2 = files.FileFromFilesystem("/etc/services", self.id2) f3 = files.FileFromFilesystem("/etc/group", self.id3) # make a really huge dependency, thus a very large file stream req = deps.DependencySet() for x in xrange(10000): req.addDep(deps.SonameDependencies, deps.Dependency("libtest.so.%d" % x)) f3.requires.set(req) # make sure it's way too big for a blob in mysql assert (len(f3.freeze()) >= 50000) cl = changelog.ChangeLog( "test", "*****@*****.**", """\ Some changes are good. Some changes are bad. Some changes just are. """) trv = trove.Trove('testcomp', v10, flavor, cl) trv.addFile(f1.pathId(), "/bin/1", v10, f1.fileId()) trv.addFile(f2.pathId(), "/bin/2", v10, f2.fileId()) trv.addFile(f3.pathId(), "/bin/3", v10, f3.fileId()) trv.addFile(self.id4, "/bin/distributed", v10, self.fid4) trv.troveInfo.size.set(1234) trv.troveInfo.sourceName.set('somesource') req = deps.DependencySet() req.addDep(deps.FileDependencies, deps.Dependency("/bin/bash")) req.addDep(deps.TroveDependencies, deps.Dependency("foo:runtime")) req.addDep(deps.SonameDependencies, deps.Dependency("libtest.so.1")) trv.setRequires(req) # this also lets us peek at the database to make sure libtest.so.1 # is only in the dep table once prv = deps.DependencySet() prv.addDep(deps.SonameDependencies, deps.Dependency("libtest.so.1")) trv.setProvides(prv) trv.computeDigests() store.db.transaction() store.addTroveSetStart([], dirSet, baseSet) troveInfo = store.addTrove(trv, trv.diff(None)[0]) troveInfo.addFile(f1.pathId(), "/bin/1", f1.fileId(), v10, fileStream=f1.freeze()) troveInfo.addFile(f2.pathId(), "/bin/2", f2.fileId(), v10, fileStream=f2.freeze()) troveInfo.addFile(f3.pathId(), "/bin/3", f3.fileId(), v10, fileStream=f3.freeze()) troveInfo.addFile(self.id4, "/bin/distributed", self.fid4, v10) store.addTroveDone(troveInfo) store.addTroveSetDone() store.db.commit() cu = store.db.cursor() cu.execute("SELECT count(*) FROM Dependencies WHERE " "name = 'libtest.so.1'") self.assertEqual(cu.next(), (1, )) # make sure the sha1s were stored cu.execute(""" SELECT dirname, basename, sha1 FROM TroveFiles JOIN FileStreams USING (streamId) JOIN FilePaths ON TroveFiles.filePathId = FilePaths.filePathId JOIN Dirnames ON FilePaths.dirnameId = Dirnames.dirnameId JOIN Basenames ON FilePaths.basenameId = Basenames.basenameId ORDER BY dirname,basename""") items = [(os.path.join(cu.frombinary(x[0]), cu.frombinary(x[1])), cu.frombinary(x[2])) for x in cu.fetchall()] self.assertEqual(items, [("/bin/1", f1.contents.sha1()), ("/bin/2", f2.contents.sha1()), ("/bin/3", f3.contents.sha1()), ("/bin/distributed", None)]) cl = changelog.ChangeLog("test", "*****@*****.**", "another log\n") fromRepos = store.getTrove("testcomp", v10, flavor, cl) self.assertEqual(fromRepos, trv) self.assertEqual(fromRepos.getVersion().timeStamps(), trv.getVersion().timeStamps()) self.assertEqual(fromRepos.getChangeLog(), trv.getChangeLog()) self.assertEqual([ x for x in store.getTrove("testcomp", v10, flavor, withFiles=False).iterFileList() ], []) l = store.iterFilesInTrove("testcomp", v10, flavor, sortByPath=True) l = [x for x in l] self.assertEqual(l, [(f1.pathId(), "/bin/1", f1.fileId(), v10), (f2.pathId(), "/bin/2", f2.fileId(), v10), (f3.pathId(), "/bin/3", f3.fileId(), v10), (self.id4, "/bin/distributed", self.fid4, v10)]) cl = changelog.ChangeLog("test", "*****@*****.**", "log for testpkg\n") trv2 = trove.Trove("testpkg", v10, flavor, cl) trv2.addTrove(trv.getName(), v10, flavor) trv2.addTrove("weakref", v10, flavor, weakRef=True) trv2.computeDigests() store.addTroveSetStart([], dirSet, baseSet) troveInfo = store.addTrove(trv2, trv2.diff(None)[0]) store.addTroveDone(troveInfo) store.addTroveSetDone() self.assertEqual(store.getTrove("testpkg", v10, flavor), trv2) self.assertEqual([ x for x in store.iterTroves([("testcomp", v10, flavor), ("testpkg", v10, flavor)]) ], [trv, trv2]) self.assertEqual([ x for x in store.iterTroves([("testpkg", v10, flavor), ("testcomp", v10, flavor)]) ], [trv2, trv]) self.assertEqual([ x for x in store.iterTroves([("testpkg", v10, flavor), ("testpkg", v10, flavor)]) ], [trv2, trv2]) self.assertEqual([ x for x in store.iterTroves([("testpkg", v10, flavor), ("blah", v10, flavor)]) ], [trv2, None]) self.assertEqual([ x for x in store.iterTroves([("blah", v10, flavor), ("testpkg", v10, flavor)]) ], [None, trv2]) self.assertEqual( [x for x in store.iterTroves([("blah", v10, flavor)])], [None]) self.assertEqual([ x for x in store.iterTroves([( "testcomp", v10, flavor), ("blah", v10, flavor), ("testpkg", v10, flavor)]) ], [trv, None, trv2]) # erasing doesn't work #store.eraseTrove("testcomp", v10, None) #store.commit() self.assertEqual(store.getTrove("testpkg", v10, flavor), trv2) map = {'testpkg': [v10]} flavors = store.getTroveFlavors(map) if flavor is not None: flavorStr = flavor.freeze() else: flavorStr = '' self.assertEqual(flavors, {'testpkg': {v10: [flavorStr]}}) map = {'testpkg3': [v10]} flavors = store.getTroveFlavors(map) self.assertEqual(flavors, {'testpkg3': {v10: []}}) # test getFiles fileObjs = store.getFiles([(f1.pathId(), f1.fileId()), (f2.pathId(), f2.fileId())]) self.assertEqual(fileObjs[(f1.pathId(), f1.fileId())], f1) self.assertEqual(fileObjs[(f2.pathId(), f2.fileId())], f2) # test that asking for an invalid fileid/pathid pair results # in no entry for the (pathid, fileid) in the returned dict invalidPathId = md5FromString('9' * 32) invalidFileId = sha1FromString('9' * 40) fileObjs = store.getFiles([(invalidPathId, invalidFileId)]) # make sure fileObjs is empty assert (not fileObjs) # test that asking for contents that have to come from # a different repository works - we should get None # back fileObjs = store.getFiles([(self.id4, self.fid4)]) self.assertEqual(fileObjs, {(self.id4, self.fid4): None})