Esempio n. 1
0
    def testURLArguments(self):
        # standard url
        url1 = lookaside.laUrl('http://foo.example.com/bar.tar')
        self.assertEqual(url1.filePath(), '/foo.example.com/bar.tar')

        # url with arguments
        url2 = lookaside.laUrl('http://foo.example.com/bar.tar?arg=value')
        self.assertEqual(url2.filePath(), '/foo.example.com/bar.tar?arg=value')

        # mirror url with arguments
        mirrorUrl = lookaside.laUrl('mirror://testmirror.com/baz.tar?arg=bif')
        url3 = lookaside.laUrl('http://foo.example.com/bar.tar?arg=value',
                               parent=mirrorUrl)
        self.assertEqual(url3.filePath(), '/testmirror.com/baz.tar?arg=bif')

        # url with arguments and no filename
        url4 = lookaside.laUrl('http://foo.example.com/?arg=value')
        self.assertEqual(url4.filePath(), '/foo.example.com/?arg=value')

        # CNY-3674
        url5 = lookaside.laUrl(
            'lookaside://lp:lightdm/lp:lightdm--466.tar.bz2')
        self.assertEqual(url5.host, 'lp:lightdm')
Esempio n. 2
0
    def testURLArguments(self):
        # standard url
        url1 = lookaside.laUrl('http://foo.example.com/bar.tar')
        self.assertEqual(url1.filePath(), '/foo.example.com/bar.tar')

        # url with arguments
        url2 = lookaside.laUrl('http://foo.example.com/bar.tar?arg=value')
        self.assertEqual(url2.filePath(),
                             '/foo.example.com/bar.tar?arg=value')

        # mirror url with arguments
        mirrorUrl = lookaside.laUrl('mirror://testmirror.com/baz.tar?arg=bif')
        url3 = lookaside.laUrl('http://foo.example.com/bar.tar?arg=value',
                               parent=mirrorUrl)
        self.assertEqual(url3.filePath(),
                             '/testmirror.com/baz.tar?arg=bif')

        # url with arguments and no filename
        url4 = lookaside.laUrl('http://foo.example.com/?arg=value')
        self.assertEqual(url4.filePath(), '/foo.example.com/?arg=value')

        # CNY-3674
        url5 = lookaside.laUrl('lookaside://lp:lightdm/lp:lightdm--466.tar.bz2')
        self.assertEqual(url5.host, 'lp:lightdm')
Esempio n. 3
0
    def _merge(self):
        changeSet = ChangeSet()
        deleteDirs = set()
        doCommit = False
        # If this is not None then all ephemeral sources will still be fetched
        # but will be placed in this directory instead.
        if self.helper.plan.ephemeralSourceDir:
            ephDir = self.helper.makeEphemeralDir()
        else:
            ephDir = None

        def _addFile(path, contents, isText):
            if path in oldFiles:
                # Always recycle pathId if available.
                pathId, _, oldFileId, oldFileVersion = oldFiles[path]
            else:
                pathId = hashlib.md5(path).digest()
                oldFileId = oldFileVersion = None

            fileHelper = filetypes.RegularFile(contents=contents,
                    config=isText)
            fileStream = fileHelper.get(pathId)
            fileStream.flags.isSource(set=True)
            fileId = fileStream.fileId()

            # If the fileId matches, recycle the fileVersion too.
            if fileId == oldFileId:
                fileVersion = oldFileVersion
            else:
                fileVersion = newTrove.getVersion()

            filesToAdd[fileId] = (fileStream, fileHelper.contents, isText)
            newTrove.addFile(pathId, path, fileVersion, fileId)

        for package, (recipeText, recipeObj), oldTrove in zip(
                self.packages, self.recipes, self.oldTroves):

            filesToAdd = {}
            oldFiles = {}
            if oldTrove is not None:
                for pathId, path, fileId, fileVer in oldTrove.iterFileList():
                    oldFiles[path] = (pathId, path, fileId, fileVer)
            newTrove = Trove(package.name, package.nextVersion, deps.Flavor())
            newTrove.setFactory(package.targetConfig.factory)

            # Add upstream files to new trove. Recycle pathids from the old
            # version.
            # LAZY: assume that everything other than the recipe is binary.
            # Conary has a magic module, but it only accepts filenames!
            for path, contents in package.recipeFiles.iteritems():
                isText = path == package.getRecipeName()
                _addFile(path, contents, isText)

            # Collect requested auto sources from recipe. Unknown recipe types
            # will not be loaded so recipeObj will be the class, so assume
            # these have no sources.
            if not inspect.isclass(recipeObj):
                recipeFiles = dict((os.path.basename(x.getPath()), x)
                    for x in recipeObj.getSourcePathList())
                newFiles = set(x[1] for x in newTrove.iterFileList())

                needFiles = set(recipeFiles) - newFiles
                for autoPath in needFiles:
                    source = recipeFiles[autoPath]
                    if getattr(source, 'contents', None
                            ) and not source.sourcename:
                        # Ignore trove scripts that have inline contents
                        continue
                    if not autoPath:
                        log.error("bob does not support 'gussed' filenames; "
                                "please use a full path for source '%s' in "
                                "package %s", source.getPath(), package.name)
                        raise RuntimeError("Unsupported source action")
                    if (autoPath in oldFiles
                            and not self.helper.plan.refreshSources
                            and not source.ephemeral):
                        # File exists in old version.
                        pathId, path, fileId, fileVer = oldFiles[autoPath]
                        newTrove.addFile(pathId, path, fileVer, fileId)
                        continue

                    if source.ephemeral and not ephDir:
                        continue

                    # File doesn't exist; need to create it.
                    if source.ephemeral:
                        laUrl = lookaside.laUrl(source.getPath())
                        tempDir = joinPaths(ephDir,
                                os.path.dirname(laUrl.filePath()))
                        mkdirChain(tempDir)
                    else:
                        tempDir = tempfile.mkdtemp()
                        deleteDirs.add(tempDir)
                    snapshot = _getSnapshot(self.helper, package, source,
                            tempDir)

                    if not source.ephemeral and snapshot:
                        autoPathId = hashlib.md5(autoPath).digest()
                        autoObj = FileFromFilesystem(snapshot, autoPathId)
                        autoObj.flags.isAutoSource(set=True)
                        autoObj.flags.isSource(set=True)
                        autoFileId = autoObj.fileId()

                        autoContents = filecontents.FromFilesystem(snapshot)
                        filesToAdd[autoFileId] = (autoObj, autoContents, False)
                        newTrove.addFile(autoPathId, autoPath,
                            newTrove.getVersion(), autoFileId)

            # If the old and new troves are identical, just use the old one.
            if oldTrove and _sourcesIdentical(
                    oldTrove, newTrove, [self.oldChangeSet, filesToAdd]):
                package.setDownstreamVersion(oldTrove.getVersion())
                log.debug('Skipped %s=%s', oldTrove.getName(),
                        oldTrove.getVersion())
                continue

            # Add files and contents to changeset.
            for fileId, (fileObj, fileContents, cfgFile) in filesToAdd.items():
                changeSet.addFileContents(fileObj.pathId(), fileObj.fileId(),
                    ChangedFileTypes.file, fileContents, cfgFile)
                changeSet.addFile(None, fileObj.fileId(), fileObj.freeze())

            # Create a changelog entry.
            changeLog = ChangeLog(
                name=self.helper.cfg.name, contact=self.helper.cfg.contact,
                message=self.helper.plan.commitMessage + '\n')
            newTrove.changeChangeLog(changeLog)

            # Calculate trove digests and add the trove to the changeset
            newTrove.invalidateDigests()
            newTrove.computeDigests()
            newTroveCs = newTrove.diff(None, absolute=True)[0]
            changeSet.newTrove(newTroveCs)
            doCommit = True

            package.setDownstreamVersion(newTrove.getVersion())
            log.debug('Created %s=%s', newTrove.getName(), newTrove.getVersion())

        if doCommit:
            cook.signAbsoluteChangesetByConfig(changeSet, self.helper.cfg)
            f = tempfile.NamedTemporaryFile(dir=os.getcwd(), suffix='.ccs',
                    delete=False)
            f.close()
            changeSet.writeToFile(f.name)
            try:
                self.helper.getRepos().commitChangeSet(changeSet)
            except:
                log.error("Error committing changeset to repository, "
                        "failed changeset is saved at %s", f.name)
                raise
            else:
                os.unlink(f.name)

        for path in deleteDirs:
            shutil.rmtree(path)
Esempio n. 4
0
    def populateLcache(self):
        """
        Populate a repository lookaside cache
        """

        class lcachePopulationState:
            """Used to track the state of the lcache to enable it to be
            efficiently populated on-demand"""

            classes = None
            sourcePaths = {}
            completedActions = set()
            pathMap = {}

        if not self.laReposCache.repos:
            return

        if not self._lcstate:
            repos = self.laReposCache.repos
            self._lcstate = lcachePopulationState()

            # build a list containing this recipe class and any ancestor class
            # from which it descends
            recipeClass = self.__class__
            classes = [recipeClass]
            bases = list(recipeClass.__bases__)
            while bases:
                parent = bases.pop()
                bases.extend(list(parent.__bases__))
                if issubclass(parent, Recipe):
                    classes.append(parent)
            # reverse the class list, this way the files will be found in the
            # youngest descendant first
            classes.reverse()
            self._lcstate.classes = classes

            for rclass in self._lcstate.classes:
                if not rclass._trove:
                    continue
                srcName = rclass._trove.getName()
                srcVersion = rclass._trove.getVersion()
                # CNY-31: walk over the files in the trove we found upstream
                # (which we may have modified to remove the non-autosourced
                # files.
                # Also, if an autosource file is marked as needing to be
                # refreshed in the Conary state file, the lookaside cache has
                # to win, so don't populate it with the repository file)
                fileList = []
                for pathId, path, fileId, version in rclass._trove.iterFileList():

                    assert path[0] != "/"
                    # we might need to retrieve this source file
                    # to enable a build, so we need to find the
                    # sha1 hash of it since that's how it's indexed
                    # in the file store
                    if isinstance(version, versions.NewVersion):
                        # don't try and look up things on the NewVersion label!
                        continue
                    fileList.append((pathId, path, fileId, version))

                fileObjs = repos.getFileVersions([(x[0], x[2], x[3]) for x in fileList])
                for i in range(len(fileList)):
                    fileObj = fileObjs[i]
                    if isinstance(fileObj, files_mod.RegularFile):
                        (pathId, path, fileId, version) = fileList[i]
                        self._lcstate.pathMap[path] = (srcName, srcVersion, pathId, path, fileId, version, fileObj)

        # populate the repository source lookaside cache from the :source
        # components
        sourcePaths = self._lcstate.sourcePaths
        actions = set(self.getSourcePathList()) - self._lcstate.completedActions
        for a in actions:
            self._lcstate.completedActions.add(a)
            ps = a.getPathAndSuffix()

            # check if we have an autosourced file
            if any(x in ps[0] for x in ["://", ":pserver:", ":ext:"]):
                # use guess name if it is provided
                k = os.path.basename(ps[0]) or ps[1]
                assert k
                sourcePaths[k] = ps
            else:
                sourcePaths[ps[0]] = ps
                if a.ephemeral:
                    raise RecipeFileError("File '%s' is marked as ephemeral " "but is not autosourced" % (ps[0],))

        pathMap = self._lcstate.pathMap
        delList = []
        for path in pathMap:
            fullPath = None
            if path in sourcePaths:
                fullPath = lookaside.laUrl(sourcePaths[path][0]).filePath()
            elif path.find("/") == -1:  # we might have a guessed name
                for k in sourcePaths:
                    if k and path.startswith(k) and sourcePaths[k][2]:
                        for sk in sourcePaths[k][2]:
                            if path.endswith(sk) and len(k) + len(sk) == len(path) - 1:
                                fullUrl = sourcePaths[k][0] + k + "." + sk
                                fullPath = lookaside.laUrl(fullUrl).filePath()

            (srcName, srcVersion, pathId, path, fileId, version, fileObj) = pathMap[path]
            if not fullPath:
                if fileObj.flags.isAutoSource():
                    continue
                else:
                    fullPath = path
            self.laReposCache.addFileHash(
                fullPath,
                srcName,
                srcVersion,
                pathId,
                path,
                fileId,
                version,
                fileObj.contents.sha1(),
                fileObj.inode.perms(),
            )
            delList.append(path)

        for path in delList:
            if path in sourcePaths:
                del sourcePaths[path]
            del pathMap[path]
Esempio n. 5
0
    def populateLcache(self):
        """
        Populate a repository lookaside cache
        """
        class lcachePopulationState:
            """Used to track the state of the lcache to enable it to be
            efficiently populated on-demand"""
            classes = None
            sourcePaths = {}
            completedActions = set()
            pathMap = {}

        if not self.laReposCache.repos:
            return

        if not self._lcstate:
            repos = self.laReposCache.repos
            self._lcstate = lcachePopulationState()

            # build a list containing this recipe class and any ancestor class
            # from which it descends
            recipeClass = self.__class__
            classes = [recipeClass]
            bases = list(recipeClass.__bases__)
            while bases:
                parent = bases.pop()
                bases.extend(list(parent.__bases__))
                if issubclass(parent, Recipe):
                    classes.append(parent)
            # reverse the class list, this way the files will be found in the
            # youngest descendant first
            classes.reverse()
            self._lcstate.classes = classes

            for rclass in self._lcstate.classes:
                if not rclass._trove:
                    continue
                srcName = rclass._trove.getName()
                srcVersion = rclass._trove.getVersion()
                # CNY-31: walk over the files in the trove we found upstream
                # (which we may have modified to remove the non-autosourced
                # files.
                # Also, if an autosource file is marked as needing to be
                # refreshed in the Conary state file, the lookaside cache has
                # to win, so don't populate it with the repository file)
                fileList = []
                for pathId, path, fileId, version in \
                        rclass._trove.iterFileList():

                    assert (path[0] != "/")
                    # we might need to retrieve this source file
                    # to enable a build, so we need to find the
                    # sha1 hash of it since that's how it's indexed
                    # in the file store
                    if isinstance(version, versions.NewVersion):
                        # don't try and look up things on the NewVersion label!
                        continue
                    fileList.append((pathId, path, fileId, version))

                fileObjs = repos.getFileVersions([(x[0], x[2], x[3])
                                                  for x in fileList])
                for i in range(len(fileList)):
                    fileObj = fileObjs[i]
                    if isinstance(fileObj, files_mod.RegularFile):
                        (pathId, path, fileId, version) = fileList[i]
                        self._lcstate.pathMap[path] = (srcName, srcVersion,
                                                       pathId, path, fileId,
                                                       version, fileObj)

        # populate the repository source lookaside cache from the :source
        # components
        sourcePaths = self._lcstate.sourcePaths
        actions = set(
            self.getSourcePathList()) - self._lcstate.completedActions
        for a in actions:
            self._lcstate.completedActions.add(a)
            ps = a.getPathAndSuffix()

            # check if we have an autosourced file
            if any(x in ps[0] for x in ['://', ':pserver:', ':ext:']):
                # use guess name if it is provided
                k = os.path.basename(ps[0]) or ps[1]
                assert (k)
                sourcePaths[k] = ps
            else:
                sourcePaths[ps[0]] = ps
                if a.ephemeral:
                    raise RecipeFileError("File '%s' is marked as ephemeral "
                                          "but is not autosourced" % (ps[0], ))

        pathMap = self._lcstate.pathMap
        delList = []
        for path in pathMap:
            fullPath = None
            if path in sourcePaths:
                fullPath = lookaside.laUrl(sourcePaths[path][0]).filePath()
            elif path.find("/") == -1:  # we might have a guessed name
                for k in sourcePaths:
                    if k and path.startswith(k) and sourcePaths[k][2]:
                        for sk in sourcePaths[k][2]:
                            if path.endswith(sk) and \
                                    len(k) + len(sk) == len(path)-1:
                                fullUrl = sourcePaths[k][0] + k + '.' + sk
                                fullPath = \
                                    lookaside.laUrl(fullUrl).filePath()

            (srcName, srcVersion, pathId, path, fileId, version, fileObj) = \
                pathMap[path]
            if not fullPath:
                if fileObj.flags.isAutoSource():
                    continue
                else:
                    fullPath = path
            self.laReposCache.addFileHash(fullPath, srcName, srcVersion,
                                          pathId, path, fileId, version,
                                          fileObj.contents.sha1(),
                                          fileObj.inode.perms())
            delList.append(path)

        for path in delList:
            if path in sourcePaths:
                del sourcePaths[path]
            del pathMap[path]
Esempio n. 6
0
    def _merge(self):
        changeSet = ChangeSet()
        deleteDirs = set()
        doCommit = False
        # If this is not None then all ephemeral sources will still be fetched
        # but will be placed in this directory instead.
        if self.helper.plan.ephemeralSourceDir:
            ephDir = self.helper.makeEphemeralDir()
        else:
            ephDir = None

        def _addFile(path, contents, isText):
            if path in oldFiles:
                # Always recycle pathId if available.
                pathId, _, oldFileId, oldFileVersion = oldFiles[path]
            else:
                pathId = hashlib.md5(path).digest()
                oldFileId = oldFileVersion = None

            fileHelper = filetypes.RegularFile(contents=contents,
                    config=isText)
            fileStream = fileHelper.get(pathId)
            fileStream.flags.isSource(set=True)
            fileId = fileStream.fileId()

            # If the fileId matches, recycle the fileVersion too.
            if fileId == oldFileId:
                fileVersion = oldFileVersion
            else:
                fileVersion = newTrove.getVersion()

            filesToAdd[fileId] = (fileStream, fileHelper.contents, isText)
            newTrove.addFile(pathId, path, fileVersion, fileId)

        for package, (recipeText, recipeObj), oldTrove in zip(
                self.packages, self.recipes, self.oldTroves):

            filesToAdd = {}
            oldFiles = {}
            if oldTrove is not None:
                for pathId, path, fileId, fileVer in oldTrove.iterFileList():
                    oldFiles[path] = (pathId, path, fileId, fileVer)
            newTrove = Trove(package.name, package.nextVersion, deps.Flavor())
            newTrove.setFactory(package.targetConfig.factory)

            # Add upstream files to new trove. Recycle pathids from the old
            # version.
            # LAZY: assume that everything other than the recipe is binary.
            # Conary has a magic module, but it only accepts filenames!
            for path, contents in package.recipeFiles.iteritems():
                isText = path == package.getRecipeName()
                _addFile(path, contents, isText)

            # Collect requested auto sources from recipe. Unknown recipe types
            # will not be loaded so recipeObj will be the class, so assume
            # these have no sources.
            if not inspect.isclass(recipeObj):
                recipeFiles = dict((os.path.basename(x.getPath()), x)
                    for x in recipeObj.getSourcePathList())
                newFiles = set(x[1] for x in newTrove.iterFileList())

                needFiles = set(recipeFiles) - newFiles
                for autoPath in needFiles:
                    source = recipeFiles[autoPath]
                    if (autoPath in oldFiles
                            and not self.helper.plan.refreshSources
                            and not source.ephemeral):
                        # File exists in old version.
                        pathId, path, fileId, fileVer = oldFiles[autoPath]
                        newTrove.addFile(pathId, path, fileVer, fileId)
                        continue

                    if source.ephemeral and not ephDir:
                        continue

                    # File doesn't exist; need to create it.
                    if source.ephemeral:
                        laUrl = lookaside.laUrl(source.getPath())
                        tempDir = joinPaths(ephDir,
                                os.path.dirname(laUrl.filePath()))
                        mkdirChain(tempDir)
                    else:
                        tempDir = tempfile.mkdtemp()
                        deleteDirs.add(tempDir)
                    snapshot = _getSnapshot(self.helper, package, source,
                            tempDir)

                    if not source.ephemeral and snapshot:
                        autoPathId = hashlib.md5(autoPath).digest()
                        autoObj = FileFromFilesystem(snapshot, autoPathId)
                        autoObj.flags.isAutoSource(set=True)
                        autoObj.flags.isSource(set=True)
                        autoFileId = autoObj.fileId()

                        autoContents = filecontents.FromFilesystem(snapshot)
                        filesToAdd[autoFileId] = (autoObj, autoContents, False)
                        newTrove.addFile(autoPathId, autoPath,
                            newTrove.getVersion(), autoFileId)

            # If the old and new troves are identical, just use the old one.
            if oldTrove and _sourcesIdentical(
                    oldTrove, newTrove, [self.oldChangeSet, filesToAdd]):
                package.setDownstreamVersion(oldTrove.getVersion())
                log.debug('Skipped %s=%s', oldTrove.getName(),
                        oldTrove.getVersion())
                continue

            # Add files and contents to changeset.
            for fileId, (fileObj, fileContents, cfgFile) in filesToAdd.items():
                changeSet.addFileContents(fileObj.pathId(), fileObj.fileId(),
                    ChangedFileTypes.file, fileContents, cfgFile)
                changeSet.addFile(None, fileObj.fileId(), fileObj.freeze())

            # Create a changelog entry.
            changeLog = ChangeLog(
                name=self.helper.cfg.name, contact=self.helper.cfg.contact,
                message=self.helper.plan.commitMessage + '\n')
            newTrove.changeChangeLog(changeLog)

            # Calculate trove digests and add the trove to the changeset
            newTrove.invalidateDigests()
            newTrove.computeDigests()
            newTroveCs = newTrove.diff(None, absolute=True)[0]
            changeSet.newTrove(newTroveCs)
            doCommit = True

            package.setDownstreamVersion(newTrove.getVersion())
            log.debug('Created %s=%s', newTrove.getName(), newTrove.getVersion())

        if doCommit:
            cook.signAbsoluteChangesetByConfig(changeSet, self.helper.cfg)
            f = tempfile.NamedTemporaryFile(dir=os.getcwd(), suffix='.ccs',
                    delete=False)
            f.close()
            changeSet.writeToFile(f.name)
            try:
                self.helper.getRepos().commitChangeSet(changeSet)
            except:
                log.error("Error committing changeset to repository, "
                        "failed changeset is saved at %s", f.name)
                raise
            else:
                os.unlink(f.name)

        for path in deleteDirs:
            shutil.rmtree(path)