Ejemplo n.º 1
0
    def testSnapshot(self):
        file(self.rootDir + '/fake', 'w').write('# comment\n')
        smf = self.getSystemModel('/fake')
        self.assertEquals(smf.model.filedata, ['# comment\n'])
        self.assertEquals(smf.snapshotExists(), False)
        self.assertEquals(smf.exists(), True)

        smf.writeSnapshot()
        self.assertEquals(smf.snapshotExists(), True)
        self.assertEquals(
            file(self.rootDir + '/fake.next', 'r').read(), '# comment\n')
        smf.closeSnapshot()
        self.assertEquals(smf.snapshotExists(), False)
        self.assertEquals(util.exists(self.rootDir + '/fake.next'), False)

        file(self.rootDir + '/fake.next',
             'w').write('# comment\ninstall foo\n')
        smf = self.getSystemModel('/fake')
        self.assertEquals(smf.model.filedata, ['# comment\n', 'install foo\n'])
        self.assertEquals(
            file(self.rootDir + '/fake.next', 'r').read(), '# comment\n'
            'install foo\n')
        self.assertEquals(smf.snapshotExists(), True)
        self.assertEquals(smf.exists(), True)
        smf.closeSnapshot()
        self.assertEquals(smf.snapshotExists(), False)
        self.assertEquals(util.exists(self.rootDir + '/fake.next'), False)

        smf.writeSnapshot()
        self.assertEquals(smf.snapshotExists(), True)
        smf.deleteSnapshot()
        self.assertEquals(smf.snapshotExists(), False)
Ejemplo n.º 2
0
        def _setSiteConfig(macros, arch, osName, setEnviron=False):
            if osName is None:
                osName = self.macros.os
            archConfig = None
            osConfig = None
            for siteDir in self.cfg.siteConfigPath:
                ac = '/'.join((siteDir, arch))
                if util.exists(ac):
                    archConfig = ac
                if osName:
                    oc = '/'.join((siteDir, osName))
                    if util.exists(oc):
                        osConfig = oc
            if not archConfig and not osConfig:
                macros.env_siteconfig = ''
                return

            siteConfig = None
            if setEnviron and 'CONFIG_SITE' in os.environ:
                siteConfig = os.environ['CONFIG_SITE']
            siteConfig = ' '.join((x for x in [siteConfig, archConfig, osConfig]
                                   if x is not None))
            macros.env_siteconfig = siteConfig
            if setEnviron:
                os.environ['CONFIG_SITE'] = siteConfig
Ejemplo n.º 3
0
    def testSnapshot(self):
        file(self.rootDir + '/fake', 'w').write('# comment\n')
        smf = self.getSystemModel('/fake')
        self.assertEquals(smf.model.filedata, ['# comment\n'])
        self.assertEquals(smf.snapshotExists(), False)
        self.assertEquals(smf.exists(), True)

        smf.writeSnapshot()
        self.assertEquals(smf.snapshotExists(), True)
        self.assertEquals(file(self.rootDir + '/fake.next', 'r').read(),
            '# comment\n')
        smf.closeSnapshot()
        self.assertEquals(smf.snapshotExists(), False)
        self.assertEquals(util.exists(self.rootDir + '/fake.next'), False)

        file(self.rootDir + '/fake.next', 'w').write('# comment\ninstall foo\n')
        smf = self.getSystemModel('/fake')
        self.assertEquals(smf.model.filedata, ['# comment\n', 'install foo\n'])
        self.assertEquals(file(self.rootDir + '/fake.next', 'r').read(),
            '# comment\n'
            'install foo\n')
        self.assertEquals(smf.snapshotExists(), True)
        self.assertEquals(smf.exists(), True)
        smf.closeSnapshot()
        self.assertEquals(smf.snapshotExists(), False)
        self.assertEquals(util.exists(self.rootDir + '/fake.next'), False)

        smf.writeSnapshot()
        self.assertEquals(smf.snapshotExists(), True)
        smf.deleteSnapshot()
        self.assertEquals(smf.snapshotExists(), False)
Ejemplo n.º 4
0
        def _setSiteConfig(macros, arch, osName, setEnviron=False):
            if osName is None:
                osName = self.macros.os
            archConfig = None
            osConfig = None
            for siteDir in self.cfg.siteConfigPath:
                ac = '/'.join((siteDir, arch))
                if util.exists(ac):
                    archConfig = ac
                if osName:
                    oc = '/'.join((siteDir, osName))
                    if util.exists(oc):
                        osConfig = oc
            if not archConfig and not osConfig:
                macros.env_siteconfig = ''
                return

            siteConfig = None
            if setEnviron and 'CONFIG_SITE' in os.environ:
                siteConfig = os.environ['CONFIG_SITE']
            siteConfig = ' '.join((x
                                   for x in [siteConfig, archConfig, osConfig]
                                   if x is not None))
            macros.env_siteconfig = siteConfig
            if setEnviron:
                os.environ['CONFIG_SITE'] = siteConfig
Ejemplo n.º 5
0
    def handleCheck(self, startGroups, stopGroups, lines, fullpath):

        if stopGroups is None:
            # we lost sync, don't start guessing because we care about
            # the result of the check
            return
        sought = startGroups[0]
        success = self.parseSuccess(stopGroups[0])
        includeDirs = [ '%(includedir)s/' %self.macros]
        root = self.recipe.cfg.root

        if success:
            if self.headerRe.match(sought):
                # look for header files
                for tokens in (x.split() for x in lines):
                    for token in tokens:
                        if token.startswith('-I/') and len(token) > 3:
                            includeDirs.append(token[2:])
                for dirName in includeDirs:
                    seekPath = util.normpath('%s/%s' %(dirName, sought))
                    if util.exists('%s%s' %(root, seekPath)):
                        self.foundPaths.add(seekPath)
                        break

            libName = self.libRe.match(sought)
            if libName:
                libName = libName.group(0)
                # Take advantage of the fact that the actual test will
                # include running the compiler with the library in the
                # link line in such a way that the
                # EnforceStaticLibBuildRequirements policy knows how
                # to understand it.
                # EnforceStaticLibBuildRequirements does not handle the
                # leading "configure:01234: " portion of the output,
                # so give it every line that has further content and
                # let it find the lines that it cares about
                logLines = (x.split(': ', 1) for x in lines)
                logLines = (x[1] for x in logLines if len(x) > 1)
                self.recipe.EnforceStaticLibBuildRequirements(logLines=logLines)

            candidate = None
            if sought.startswith('/'):
                candidate = sought
            elif type(success) is str and success.startswith('/'):
                candidate = success
            if candidate:
                # configure:2345: checking for /bin/sought
                # configure:6543: result: yes
                # configure:4441: checking for egrep
                # configure:4519: result: /bin/grep -E
                # configure:4535: checking for ld used by gcc
                # configure:4602: result: /usr/bin/ld
                seekPath = candidate.split()[0]
                if util.exists(util.normpath('%s%s' %(root, seekPath))):
                    self.foundPaths.update(set(
                        self.greylistFilter(set((seekPath,)), fullpath)))
Ejemplo n.º 6
0
    def testMkdirIfMissing(self):
        umask = os.umask(022)
        os.umask(umask)

        assert(not util.exists(self.workDir + '/newdir'))
        file_utils.mkdirIfMissing(self.workDir + '/newdir')
        assert((os.stat(self.workDir + '/newdir').st_mode & 0777)==
                (0777 & ~umask))
Ejemplo n.º 7
0
    def testMkdirIfMissing(self):
        umask = os.umask(022)
        os.umask(umask)

        assert (not util.exists(self.workDir + '/newdir'))
        file_utils.mkdirIfMissing(self.workDir + '/newdir')
        assert ((os.stat(self.workDir + '/newdir').st_mode
                 & 0777) == (0777 & ~umask))
Ejemplo n.º 8
0
    def fileChanged(self, path):
        """
        check to see if the file has changed
        @param path: the path to check
        @return: FILE_MISSING, FILE_CHANGED, FILE_UNCHANGED, FILE_NEW
        @rtype: int
        """
        newPath = util.joinPaths(self.macros.destdir, path)
        if not util.exists(newPath):
            return self.FILE_MISSING

        from conary.build.recipe import RECIPE_TYPE_CAPSULE
        if self.recipe._recipeType == RECIPE_TYPE_CAPSULE:
            if not os.path.isfile(newPath):
                # for capsules we get everything but contents from
                # the capsule header
                return self.FILE_UNCHANGED

            # For derived capsule recipes we use the exploder to provide the old
            # sha1. For regular capsule recipes we use the capsuleFileSha1s map
            # to provide the old sha1.
            oldSha1 = None
            if os.path.islink(newPath):
                oldSha1 = os.readlink(newPath)
            elif hasattr(self.recipe, 'exploder'):
                oldf = self.recipe.exploder.fileObjMap.get(path, None)
                if oldf and oldf.hasContents:
                    oldSha1 = oldf.contents.sha1()
            else:
                capPaths = self.recipe._getCapsulePathsForFile(path)
                if not capPaths:
                    return self.FILE_NEW
                oldSha1 = self.recipe.capsuleFileSha1s[capPaths[0]][path]

            if oldSha1:
                if os.path.islink(newPath):
                    newSha1 = os.readlink(newPath)
                else:
                    newSha1 = sha1helper.sha1FileBin(newPath)
                if oldSha1 == newSha1:
                    return self.FILE_UNCHANGED
                return self.FILE_CHANGED
            return self.FILE_NEW

        oldMtime = self.recipe._derivedFiles.get(path, None)
        if os.path.islink(newPath):
            # symlinks are special, we compare the target of the link
            # instead of the mtime
            newMtime = os.readlink(newPath)
        else:
            newMtime = os.lstat(newPath).st_mtime
        if oldMtime:
            if oldMtime == newMtime:
                return self.FILE_UNCHANGED
            return self.FILE_CHANGED
        return self.FILE_NEW
Ejemplo n.º 9
0
    def fileChanged(self, path):
        """
        check to see if the file has changed
        @param path: the path to check
        @return: FILE_MISSING, FILE_CHANGED, FILE_UNCHANGED, FILE_NEW
        @rtype: int
        """
        newPath = util.joinPaths(self.macros.destdir, path)
        if not util.exists(newPath):
            return self.FILE_MISSING

        from conary.build.recipe import RECIPE_TYPE_CAPSULE
        if self.recipe._recipeType == RECIPE_TYPE_CAPSULE:
            if not os.path.isfile(newPath):
                # for capsules we get everything but contents from
                # the capsule header
                return self.FILE_UNCHANGED

            # For derived capsule recipes we use the exploder to provide the old
            # sha1. For regular capsule recipes we use the capsuleFileSha1s map
            # to provide the old sha1.
            oldSha1=None
            if os.path.islink(newPath):
                oldSha1 = os.readlink(newPath)
            elif hasattr(self.recipe,'exploder'):
                oldf = self.recipe.exploder.fileObjMap.get(path,None)
                if oldf and oldf.hasContents:
                    oldSha1 = oldf.contents.sha1()
            else:
                capPaths = self.recipe._getCapsulePathsForFile(path)
                if not capPaths:
                    return self.FILE_NEW
                oldSha1 = self.recipe.capsuleFileSha1s[capPaths[0]][path]

            if oldSha1:
                if os.path.islink(newPath):
                    newSha1 = os.readlink(newPath)
                else:
                    newSha1 = sha1helper.sha1FileBin(newPath)
                if oldSha1 == newSha1:
                    return self.FILE_UNCHANGED
                return self.FILE_CHANGED
            return self.FILE_NEW

        oldMtime = self.recipe._derivedFiles.get(path, None)
        if os.path.islink(newPath):
            # symlinks are special, we compare the target of the link
            # instead of the mtime
            newMtime = os.readlink(newPath)
        else:
            newMtime = os.lstat(newPath).st_mtime
        if oldMtime:
            if oldMtime == newMtime:
                return self.FILE_UNCHANGED
            return self.FILE_CHANGED
        return self.FILE_NEW
Ejemplo n.º 10
0
    def _readFile(self, path):

        if not util.exists(path):
            return

        for line in file(path, 'rU').readlines():
            line = line.strip().rstrip('/')
            if not line or line.startswith('#'):
                continue
            self.append(line)
Ejemplo n.º 11
0
    def _readFile(self, path):

        if not util.exists(path):
            return

        for line in file(path, "rU").readlines():
            line = line.strip().rstrip("/")
            if not line or line.startswith("#"):
                continue
            self.append(line)
Ejemplo n.º 12
0
    def save(self, path):
        # return early if we aren't going to have permission to save
        try:
            fd, cacheName = tempfile.mkstemp(
                    prefix=os.path.basename(path) + '.',
                    dir=os.path.dirname(path))
            os.close(fd)
        except (IOError, OSError):
            # may not have permissions; say, not running as root
            return

        cs = changeset.ChangeSet()
        for withFiles, trv in self.cache.values():
            # we just assume everything in the cache is w/o files. it's
            # fine for system model, safe, and we don't need the cache
            # anywhere else
            cs.newTrove(trv.diff(None, absolute = True)[0])

        # NB: "fileid" and pathid got reversed here by mistake, try not to
        # think too hard about it.
        cs.addFileContents(
                           self._fileId,
                           self._troveCacheVersionPathId,
                           changeset.ChangedFileTypes.file,
                           filecontents.FromString("%d %d" % self.VERSION),
                           False)
        self._cs = cs
        self._saveTimestamps()
        self._saveDeps()
        self._saveDepSolutions()
        self._saveFileCache()
        self._cs = None

        try:
            try:
                cs.writeToFile(cacheName)
                if util.exists(path):
                    os.chmod(cacheName, os.stat(path).st_mode)
                else:
                    os.chmod(cacheName, 0644)
                os.rename(cacheName, path)
            except (IOError, OSError):
                # may not have permissions; say, not running as root
                pass
        finally:
            try:
                if os.path.exists(cacheName):
                    os.remove(cacheName)
            except OSError:
                pass
Ejemplo n.º 13
0
    def save(self, path):
        # return early if we aren't going to have permission to save
        try:
            fd, cacheName = tempfile.mkstemp(
                    prefix=os.path.basename(path) + '.',
                    dir=os.path.dirname(path))
            os.close(fd)
        except (IOError, OSError):
            # may not have permissions; say, not running as root
            return

        cs = changeset.ChangeSet()
        for withFiles, trv in self.cache.values():
            # we just assume everything in the cache is w/o files. it's
            # fine for system model, safe, and we don't need the cache
            # anywhere else
            cs.newTrove(trv.diff(None, absolute = True)[0])

        # NB: "fileid" and pathid got reversed here by mistake, try not to
        # think too hard about it.
        cs.addFileContents(
                           self._fileId,
                           self._troveCacheVersionPathId,
                           changeset.ChangedFileTypes.file,
                           filecontents.FromString("%d %d" % self.VERSION),
                           False)
        self._cs = cs
        self._saveTimestamps()
        self._saveDeps()
        self._saveDepSolutions()
        self._saveFileCache()
        self._cs = None

        try:
            try:
                cs.writeToFile(cacheName)
                if util.exists(path):
                    os.chmod(cacheName, os.stat(path).st_mode)
                else:
                    os.chmod(cacheName, 0644)
                os.rename(cacheName, path)
            except (IOError, OSError):
                # may not have permissions; say, not running as root
                pass
        finally:
            try:
                if os.path.exists(cacheName):
                    os.remove(cacheName)
            except OSError:
                pass
Ejemplo n.º 14
0
    def restore(self, fileContents, root, target, journal=None, nameLookup=True,
                **kwargs):
        if util.exists(target):
            # we have something in the way
            sb = os.lstat(target)
            if not stat.S_ISDIR(sb.st_mode):
                # it's not a directory so remove it; if it is a directory,
                # we just need to change the metadata
                os.unlink(target)
                util.mkdirChain(target)
        else:
            util.mkdirChain(target)

        return File.restore(self, root, target, journal=journal,
            nameLookup=nameLookup, **kwargs)
Ejemplo n.º 15
0
    def doFile(self, filename):
        if hasattr(self.recipe, '_getCapsulePathsForFile'):
            if self.recipe._getCapsulePathsForFile(filename):
                return

        libdir = self.recipe.macros.libdir
        destdir = self.recipe.macros.destdir
        basename = os.path.basename(filename)
        if not filename.startswith(libdir):
            dest = util.joinPaths(destdir, libdir, 'pkgconfig', basename)
            if util.exists(dest):
                self.error('%s and %s/%s/%s both exist',
                           filename, libdir, 'pkgconfig', basename)
                return
            util.mkdirChain(os.path.dirname(dest))
            util.rename(destdir+filename, dest)
            try:
                self.recipe.recordMove(destdir+filename, dest)
            except AttributeError:
                pass
Ejemplo n.º 16
0
    def doFile(self, filename):
        if hasattr(self.recipe, '_getCapsulePathsForFile'):
            if self.recipe._getCapsulePathsForFile(filename):
                return

        libdir = self.recipe.macros.libdir
        destdir = self.recipe.macros.destdir
        basename = os.path.basename(filename)
        if not filename.startswith(libdir):
            dest = util.joinPaths(destdir, libdir, 'pkgconfig', basename)
            if util.exists(dest):
                self.error('%s and %s/%s/%s both exist', filename, libdir,
                           'pkgconfig', basename)
                return
            util.mkdirChain(os.path.dirname(dest))
            util.rename(destdir + filename, dest)
            try:
                self.recipe.recordMove(destdir + filename, dest)
            except AttributeError:
                pass
Ejemplo n.º 17
0
    def doFile(self, path):
        if hasattr(self.recipe, '_getCapsulePathsForFile'):
            if self.recipe._getCapsulePathsForFile(path):
                return

        d = self.macros.destdir
        f = util.joinPaths(d, path)
        if not os.path.islink(f):
            return

        contents = os.readlink(f)
        builddir = self.recipe.macros.builddir
        if contents.startswith(builddir):
            newContents = os.path.normpath(contents[len(builddir):])
            n = util.joinPaths(d, newContents)
            if not util.exists(n):
                return
            self.info('removing builddir from symlink %s: %s becomes %s',
                      path, contents, newContents)
            os.unlink(f)
            os.symlink(newContents, f)
Ejemplo n.º 18
0
    def doFile(self, path):
        if hasattr(self.recipe, '_getCapsulePathsForFile'):
            if self.recipe._getCapsulePathsForFile(path):
                return

        d = self.macros.destdir
        f = util.joinPaths(d, path)
        if not os.path.islink(f):
            return

        contents = os.readlink(f)
        builddir = self.recipe.macros.builddir
        if contents.startswith(builddir):
            newContents = os.path.normpath(contents[len(builddir):])
            n = util.joinPaths(d, newContents)
            if not util.exists(n):
                return
            self.info('removing builddir from symlink %s: %s becomes %s', path,
                      contents, newContents)
            os.unlink(f)
            os.symlink(newContents, f)
Ejemplo n.º 19
0
    def write(self, fileName=None):
        '''
        Writes the current system model to the specified file (relative
        to the configured root), or overwrites the previously-specified
        file if no filename is provided.
        @param fileName: (optional) name of file to which to write the model
        @type fileName: string
        '''
        if fileName == None:
            fileName = self.fileName
        fileFullName = self.model.cfg.root+fileName
        if util.exists(fileFullName):
            fileMode = stat.S_IMODE(os.stat(fileFullName)[stat.ST_MODE])
        else:
            fileMode = 0644

        dirName = os.path.dirname(fileFullName)
        fd, tmpName = tempfile.mkstemp(prefix='system-model', dir=dirName)
        f = os.fdopen(fd, 'w')
        self.model.write(f)
        os.chmod(tmpName, fileMode)
        os.rename(tmpName, fileFullName)
Ejemplo n.º 20
0
    def write(self, fileName=None):
        '''
        Writes the current system model to the specified file (relative
        to the configured root), or overwrites the previously-specified
        file if no filename is provided.
        @param fileName: (optional) name of file to which to write the model
        @type fileName: string
        '''
        if fileName == None:
            fileName = self.fileName
        fileFullName = self.model.cfg.root + fileName
        if util.exists(fileFullName):
            fileMode = stat.S_IMODE(os.stat(fileFullName)[stat.ST_MODE])
        else:
            fileMode = 0644

        dirName = os.path.dirname(fileFullName)
        fd, tmpName = tempfile.mkstemp(prefix='system-model', dir=dirName)
        f = os.fdopen(fd, 'w')
        self.model.write(f)
        os.chmod(tmpName, fileMode)
        os.rename(tmpName, fileFullName)
Ejemplo n.º 21
0
def loadMacros(paths):
    """
    Load default macros from a series of I{paths}.

    @rtype: dict
    @return: A dictionary of default macros
    """

    baseMacros = {}
    loadPaths = []
    for path in paths:
        globPaths = sorted(list(glob.glob(path)))
        loadPaths.extend(globPaths)

    for path in loadPaths:
        compiledPath = path + "c"
        deleteCompiled = not util.exists(compiledPath)
        macroModule = imp.load_source("tmpmodule", path)
        if deleteCompiled:
            util.removeIfExists(compiledPath)
        baseMacros.update(x for x in macroModule.__dict__.iteritems() if not x[0].startswith("__"))

    return baseMacros
Ejemplo n.º 22
0
    def _removeNonRecipeFilesFromCheckout(self, recipePath):
        recipeDir = os.path.dirname(recipePath)
        recipeName = os.path.basename(recipePath)
        repos =  self._getRepositoryClient()
        statePath = os.path.join(recipeDir, 'CONARY')
        conaryState = state.ConaryStateFromFile(statePath, repos)
        sourceState = conaryState.getSourceState()

        for (pathId, path, _, _) in list(sourceState.iterFileList()):
            if path == recipeName:
                continue
            path = os.path.join(recipeDir, path)
            sourceState.removeFile(pathId)

            if util.exists(path):
                statInfo = os.lstat(path)
                try:
                    if statInfo.st_mode & stat.S_IFDIR:
                        os.rmdir(path)
                    else:
                        os.unlink(path)
                except OSError, e:
                    self._handle.ui.warning(
                                "cannot remove %s: %s", path, e.strerror)
Ejemplo n.º 23
0
def loadMacros(paths):
    '''
    Load default macros from a series of I{paths}.

    @rtype: dict
    @return: A dictionary of default macros
    '''

    baseMacros = {}
    loadPaths = []
    for path in paths:
        globPaths = sorted(list(glob.glob(path)))
        loadPaths.extend(globPaths)

    for path in loadPaths:
        compiledPath = path + 'c'
        deleteCompiled = not util.exists(compiledPath)
        macroModule = imp.load_source('tmpmodule', path)
        if deleteCompiled:
            util.removeIfExists(compiledPath)
        baseMacros.update(x for x in macroModule.__dict__.iteritems()
                          if not x[0].startswith('__'))

    return baseMacros
Ejemplo n.º 24
0
    def _removeNonRecipeFilesFromCheckout(self, recipePath):
        recipeDir = os.path.dirname(recipePath)
        recipeName = os.path.basename(recipePath)
        repos = self._getRepositoryClient()
        statePath = os.path.join(recipeDir, 'CONARY')
        conaryState = state.ConaryStateFromFile(statePath, repos)
        sourceState = conaryState.getSourceState()

        for (pathId, path, _, _) in list(sourceState.iterFileList()):
            if path == recipeName:
                continue
            path = os.path.join(recipeDir, path)
            sourceState.removeFile(pathId)

            if util.exists(path):
                statInfo = os.lstat(path)
                try:
                    if statInfo.st_mode & stat.S_IFDIR:
                        os.rmdir(path)
                    else:
                        os.unlink(path)
                except OSError, e:
                    self._handle.ui.warning("cannot remove %s: %s", path,
                                            e.strerror)
Ejemplo n.º 25
0
    def do(self):
        # For the purposes of this policy, the transitive buildRequires
        # includes suggestions already made for handling shared libraries,
        # since this policy is explicitly a fallback for the unusual
        # case of static linking outside of the package being built.
        transitiveBuildRequires = self.transitiveBuildRequires.union(self.warnedSoNames)
        cfg = self.recipe.cfg
        db = database.Database(cfg.root, cfg.dbPath)

        foundLibNames = set()
        allPossibleProviders = set()
        missingBuildRequires = set()
        self.buildDirLibNames = None
        destdir = self.recipe.macros.destdir
        builddir = self.recipe.macros.builddir
        tooManyChoices = {}
        noTroveFound = {}
        noLibraryFound = set()

        components = self.recipe.autopkg.components
        pathMap = self.recipe.autopkg.pathMap
        reqDepSet = deps.DependencySet()
        sharedLibraryRequires = set()
        for pkg in components.values():
            reqDepSet.union(pkg.requires)
        for dep in reqDepSet.iterDepsByClass(deps.SonameDependencies):
            soname = os.path.basename(dep.name).split('.')[0]
            sharedLibraryRequires.add(soname)
            if soname.startswith('lib'):
                sharedLibraryRequires.add(soname[3:])
            else:
                sharedLibraryRequires.add('lib%s' %soname)
        troveLibraries = set()
        for path in pathMap.iterkeys():
            basename = os.path.basename(path)
            if basename.startswith('lib') and basename.find('.') >= 0:
                troveLibraries.add(basename[3:].split('.')[0])

        self.recipe.synchronizeLogs()
        f = file(self.recipe.getSubscribeLogPath())

        libRe = re.compile('^-l[a-zA-Z]+$')
        libDirRe = re.compile('^-L/..*$')

        def logLineTokens():
            for logLine in f:
                logLine = logLine.strip()
                if not self.r.match(logLine):
                    continue
                yield logLine.split()
            for logLine in self.logLines:
                yield logLine.split()

        def pathSetToTroveSet(pathSet):
            troveSet = set()
            for path in pathSet:
                for pathReq in set(trove.getName()
                                   for trove in db.iterTrovesByPath(path)):
                    pathReqCandidates = _providesNames(pathReq)
                    # remove any recursive or non-existing buildreqs
                    pathReqCandidates = [x for x in pathReqCandidates 
                                         if db.hasTroveByName(x)]
                    if not pathReqCandidates:
                        continue
                    allPossibleProviders.update(pathReqCandidates)
                    # only the best option
                    pathReqCandidates = pathReqCandidates[0:1]
                    # now apply exceptions
                    pathReqCandidates = self._removeExceptionsFromList(
                        pathReqCandidates)
                    troveSet.add(pathReqCandidates[0])
            return troveSet

        def buildDirContains(libName):
            # If we can find this library built somewhere in the
            # builddir, chances are that the internal library is
            # what is being linked to in any case.
            if self.buildDirLibNames is None:
                # walk builddir once, the first time this is called
                self.buildDirLibNames = set()
                for dirpath, dirnames, filenames in os.walk(builddir):
                    for fileName in filenames:
                        if fileName.startswith('lib') and '.' in fileName:
                            self.buildDirLibNames.add(fileName[3:].split('.')[0])
            return libName in self.buildDirLibNames

        for tokens in logLineTokens():
            libNames = set(x[2:] for x in tokens if libRe.match(x))
            # Add to this set, for this line only, system library dirs,
            # nothing in destdir or builddir
            libDirs = self.libDirs.copy()
            for libDir in set(x[2:].rstrip('/') for x in tokens
                              if libDirRe.match(x) and
                                 not x[2:].startswith(destdir) and
                                 not x[2:].startswith(builddir)):
                libDir = util.normpath(libDir)
                libDirs.setdefault(util.normpath('%s%s' %(cfg.root, libDir)), libDir)
                libDirs.setdefault(libDir, libDir)
            for libName in sorted(list(libNames)):
                if libName not in foundLibNames:
                    if libName in sharedLibraryRequires:
                        foundLibNames.add(libName)
                        continue
                    if libName in troveLibraries:
                        foundLibNames.add(libName)
                        continue
                    if buildDirContains(libName):
                        foundLibNames.add(libName)
                        continue

                    foundLibs = set()
                    for libDirRoot, libDir in libDirs.iteritems():
                        for ext in ('a', 'so'):
                            # If there is no .a, look for the .so in case
                            # no shared library dependency is found from
                            # packaged files (CNP-132)
                            if util.exists('%s/lib%s.%s' %(libDirRoot, libName, ext)):
                                foundLibs.add('%s/lib%s.%s' %(libDir, libName, ext))
                                break
                    troveSet = pathSetToTroveSet(foundLibs)

                    if len(troveSet) == 1:
                        # found just one, we can confidently recommend it
                        recommended = list(troveSet)[0]
                        if recommended not in transitiveBuildRequires:
                            self.info("Add '%s' to buildRequires for -l%s (%s)",
                                      recommended, libName,
                                      ', '.join(sorted(list(foundLibs))))
                            missingBuildRequires.add(recommended)
                            foundLibNames.add(libName)

                    elif len(troveSet):
                        # found more, we might need to recommend a choice
                        tooManyChoices.setdefault(libName, [
                                  ' '.join(sorted(list(foundLibs))),
                                  "', '".join(sorted(list(troveSet)))])

                    elif foundLibs:
                        # found files on system, but no troves providing them
                        noTroveFound.setdefault(libName,
                                  ' '.join(sorted(list(foundLibs))))
                        
                    else:
                        # note that this does not prevent us from
                        # *looking* again, because the next time
                        # there might be a useful -L in the link line
                        noLibraryFound.add(libName)
                            
        if tooManyChoices:
            for libName in sorted(list(tooManyChoices.keys())):
                if libName not in foundLibNames:
                    # Found multiple choices for libName, and never came
                    # up with a better recommendation, so recommend a choice.
                    # Note: perhaps someday this can become an error
                    # when we have a better sense of how frequently
                    # it is wrong...
                    foundLibNames.add(libName)
                    foundLibs, troveSet = tooManyChoices[libName]
                    self.warn('Multiple troves match files %s for -l%s:'
                              ' choose one of the following entries'
                              " for buildRequires: '%s'",
                              foundLibs, libName, troveSet)

        if noTroveFound:
            for libName in sorted(list(noTroveFound.keys())):
                if libName not in foundLibNames:
                    # Never found any trove containing these libraries,
                    # not even a file in the builddir
                    foundLibNames.add(libName)
                    foundLibs = noTroveFound[libName]
                    self.info('No trove found matching any of files'
                              ' %s for -l%s:'
                              ' possible missing buildRequires',
                              foundLibs, libName)

        if noLibraryFound:
            for libName in sorted(list(noLibraryFound)):
                if libName not in foundLibNames:
                    # Note: perhaps someday this can become an error
                    # when we have a better sense of how frequently
                    # it is wrong...
                    self.info('No files found matching -l%s:'
                              ' possible missing buildRequires', libName)

        if missingBuildRequires:
            self.talk('add to buildRequires: %s',
                       str(sorted(list(missingBuildRequires))))
            reportMissingBuildRequires(self.recipe, missingBuildRequires)

        if allPossibleProviders:
            reportFoundBuildRequires(self.recipe, allPossibleProviders)

        f.close()
Ejemplo n.º 26
0
 def exists(self):
     return util.exists(self.fileFullName)
Ejemplo n.º 27
0
 def snapshotExists(self):
     return util.exists(self.snapFullName)
Ejemplo n.º 28
0
 def exists(self):
     return util.exists(self.fileFullName)
Ejemplo n.º 29
0
from conary.lib import util, sha1helper
from conary.lib.ext import file_utils
from conary.lib.ext import digest_uncompress


class MiscTest(rephelp.RepositoryHelper):
    def testMkdirIfMissing(self):
        umask = os.umask(022)
        os.umask(umask)

        assert (not util.exists(self.workDir + '/newdir'))
        file_utils.mkdirIfMissing(self.workDir + '/newdir')
        assert ((os.stat(self.workDir + '/newdir').st_mode
                 & 0777) == (0777 & ~umask))
        assert (util.exists(self.workDir + '/newdir'))
        file_utils.mkdirIfMissing(self.workDir + '/newdir')

        try:
            file_utils.mkdirIfMissing(self.workDir + '/first/second')
        except OSError, e:
            assert (e.errno == errno.ENOENT)
        else:
            raise AssertionError, "mkdir should fail"

        self.writeFile(self.workDir + '/dirent', '')
        file_utils.mkdirIfMissing(self.workDir + '/dirent')

    def _testSha1CopyAndUncompress(self, offset):
        infd = -1
        outfd = -1
Ejemplo n.º 30
0
def derive(repos, cfg, targetLabel, troveSpec, checkoutDir=None,
           extract=False, info=False, callback=None):
    """
        Performs all the commands necessary to create a derived recipe.
        First it shadows the package, then it creates a checkout of the shadow
        and converts the checkout to a derived recipe package.

        Finally if extract = True, it installs an version of the binary
        package into a root.

        @param repos: trovesource to search for and derive packages from
        @param cfg: configuration to use when deriving the package
        @type cfg: ConaryConfiguration object
        @param targetLabel: label to derive from
        @type targetLabel: versions.Label
        @param checkoutDir: directory to create the checkout in.  If None,
                             defaults to currentDir + packageName.
        @param extract: If True, creates a subdirectory of the checkout named
                         _ROOT_ with the contents of the binary of the derived
                         package.
        @param info: If true, only display the information about the shadow
                      that would be performed if the derive command were
                      completed.
        @param callback:
    """

    origDir = os.getcwd()
    try:
        if callback is None:
            callback = DeriveCallback()

        if isinstance(troveSpec, tuple):
            troveName, versionSpec, flavor = troveSpec
            versionSpec = str(versionSpec)
            troveSpec = cmdline.toTroveSpec(troveName, versionSpec, flavor)
        else:
            troveName, versionSpec, flavor = cmdline.parseTroveSpec(troveSpec)

        if isinstance(targetLabel, str):
            targetLabel = Label(targetLabel)

        troveName, versionSpec, flavor = cmdline.parseTroveSpec(troveSpec)
        result = repos.findTrove(cfg.buildLabel,
                                 (troveName, versionSpec, flavor),
                                 cfg.flavor)
        # findTrove shouldn't return multiple items for one package anymore
        # when a flavor is specified.
        troveToDerive, = result
        # displaying output along the screen allows there to be a record
        # of what operations were performed.  Since this command is
        # an aggregate of several commands I think that is appropriate,
        # rather than simply using a progress callback.
        log.info('Shadowing %s=%s[%s] onto %s' % (troveToDerive[0],
                                                 troveToDerive[1],
                                                 troveToDerive[2],
                                                 targetLabel))
        if info:
            cfg.interactive = False

        error = branch.branch(repos, cfg, str(targetLabel),
                              ['%s=%s[%s]'%troveToDerive],
                              makeShadow=True, sourceOnly=True,
                              binaryOnly=False, allowEmptyShadow=True,
                              info=info)
        if info or error:
            return
        shadowedVersion = troveToDerive[1].createShadow(targetLabel)
        shadowedVersion = shadowedVersion.getSourceVersion(False)
        troveName = troveName.split(':')[0]

        checkoutDir = checkoutDir or troveName
        checkin.checkout(repos, cfg, checkoutDir,
                         ["%s=%s" % (troveName, shadowedVersion)],
                         callback=callback)
        os.chdir(checkoutDir)

        nvfs = repos.getTrovesBySource(troveToDerive[0]+':source',
                                       troveToDerive[1].getSourceVersion())
        trvs = repos.getTroves(nvfs)
        hasCapsule = [ x for x in trvs if x.troveInfo.capsule.type() ]
        if hasCapsule:
            derivedRecipeType = 'DerivedCapsuleRecipe'
            removeText = ''
        else:
            derivedRecipeType = 'DerivedPackageRecipe'
            removeText = \
"""
        # This appliance uses PHP as a command interpreter but does
        # not include a web server, so remove the file that creates
        # a dependency on the web server
        r.Remove('/etc/httpd/conf.d/php.conf')
"""

        log.info('Rewriting recipe file')
        recipeName = troveName + '.recipe'
        className = util.convertPackageNameToClassName(troveName)

        derivedRecipe = """
class %(className)sRecipe(%(recipeBaseClass)s):
    name = '%(name)s'
    version = '%(version)s'

    def setup(r):
        '''
        In this recipe, you can make modifications to the package.

        Examples:

        # This appliance has high-memory-use PHP scripts
        r.Replace('memory_limit = 8M', 'memory_limit = 32M', '/etc/php.ini')
%(removeText)s
        # This appliance requires that a few binaries be replaced
        # with binaries built from a custom archive that includes
        # a Makefile that honors the DESTDIR variable for its
        # install target.
        r.addArchive('foo.tar.gz')
        r.Make()
        r.MakeInstall()

        # This appliance requires an extra configuration file
        r.Create('/etc/myconfigfile', contents='some data')
        '''
""" % dict(className=className,
           name=troveName,
           version=shadowedVersion.trailingRevision().getVersion(),
           recipeBaseClass=derivedRecipeType,
           removeText=removeText)

        open(recipeName, 'w').write(derivedRecipe)

        log.info('Removing extra files from checkout')

        conaryState = state.ConaryStateFromFile('CONARY', repos)
        sourceState = conaryState.getSourceState()
        # clear the factory since we don't care about how the parent trove was
        # created
        sourceState.setFactory('')

        addRecipe=True
        for (pathId, path, fileId, version) in list(sourceState.iterFileList()):
            if path == recipeName:
                addRecipe = False
                continue
            sourceState.removeFile(pathId)
            if util.exists(path):
                statInfo = os.lstat(path)
                try:
                    if statInfo.st_mode & stat.S_IFDIR:
                        os.rmdir(path)
                    else:
                        os.unlink(path)
                except OSError, e:
                    log.warning("cannot remove %s: %s" % (path, e.strerror))

        conaryState.write('CONARY')

        if addRecipe:
            checkin.addFiles([recipeName])

        if extract:
            log.info('extracting files from %s=%s[%s]' % (troveToDerive))
            # extract to _ROOT_
            extractDir = os.path.join(os.getcwd(), '_ROOT_')
            ts = [ (troveToDerive[0], (None, None),
                    (troveToDerive[1], troveToDerive[2]), True) ]
            cs = repos.createChangeSet(ts, recurse = True)
            ChangesetExploder(cs, extractDir)
            # extract to _OLD_ROOT_
            secondDir = os.path.join(os.getcwd(), '_OLD_ROOT_')
            cs = repos.createChangeSet(ts, recurse = True)
            ChangesetExploder(cs, secondDir)
Ejemplo n.º 31
0
    def addPluggableRequirements(self, path, fullpath, pkgFiles, macros):
        if hasattr(self.recipe, '_getCapsulePathsForFile'):
            if self.recipe._getCapsulePathsForFile(path):
                # since capsules do not convert to relative symlinks,
                # we cannot depend on getting the realpath.  Unless
                # we resolve that, assume that capsule-provided
                # dependencies will be sufficient for pkgconfig files.
                return

        # parse pkgconfig file
        variables = {}
        requirements = set()
        libDirs = []
        libraries = set()
        variableLineRe = re.compile('^[a-zA-Z0-9]+=')
        filesRequired = []

        pcContents = [x.strip() for x in file(fullpath).readlines()]
        for pcLine in pcContents:
            # interpolate variables: assume variables are interpreted
            # line-by-line while processing
            pcLineIter = pcLine
            while True:
                for var in variables:
                    pcLineIter = pcLineIter.replace(var, variables[var])
                if pcLine == pcLineIter:
                    break
                pcLine = pcLineIter
            pcLine = pcLineIter

            if variableLineRe.match(pcLine):
                key, val = pcLine.split('=', 1)
                variables['${%s}' %key] = val
            else:
                if (pcLine.startswith('Requires') or
                    pcLine.startswith('Lib')) and ':' in pcLine:
                    keyWord, args = pcLine.split(':', 1)
                    # split on ',' and ' '
                    argList = itertools.chain(*[x.split(',')
                                                for x in args.split()])
                    argList = [x for x in argList if x]
                    if keyWord.startswith('Requires'):
                        versionNext = False
                        for req in argList:
                            if [x for x in '<=>' if x in req]:
                                versionNext = True
                                continue
                            if versionNext:
                                versionNext = False
                                continue
                            requirements.add(req)
                    elif keyWord.startswith('Lib'):
                        for lib in argList:
                            if lib.startswith('-L'):
                                libDirs.append(lib[2:])
                            elif lib.startswith('-l'):
                                libraries.add(lib[2:])
                            else:
                                pass

        # find referenced pkgconfig files and add requirements
        for req in requirements:
            candidateFileNames = [
                '%(destdir)s%(libdir)s/pkgconfig/'+req+'.pc',
                '%(destdir)s%(datadir)s/pkgconfig/'+req+'.pc',
                '%(libdir)s/pkgconfig/'+req+'.pc',
                '%(datadir)s/pkgconfig/'+req+'.pc',
            ]
            candidateFileNames = [ x % macros for x in candidateFileNames ]
            candidateFiles = [ util.exists(x) for x in candidateFileNames ]
            if True in candidateFiles:
                filesRequired.append(
                    (candidateFileNames[candidateFiles.index(True)], 'pkg-config'))
            else:
                self.warn('pkg-config file %s.pc not found', req)
                continue

        # find referenced library files and add requirements
        libraryPaths = sorted(list(self.systemLibPaths))
        for libDir in libDirs:
            if libDir not in libraryPaths:
                libraryPaths.append(libDir)
        for library in libraries:
            found = False
            for libDir in libraryPaths:
                candidateFileNames = [
                    macros.destdir+libDir+'/lib'+library+'.so',
                    macros.destdir+libDir+'/lib'+library+'.a',
                    libDir+'/lib'+library+'.so',
                    libDir+'/lib'+library+'.a',
                ]
                candidateFiles = [ util.exists(x) for x in candidateFileNames ]
                if True in candidateFiles:
                    filesRequired.append(
                        (candidateFileNames[candidateFiles.index(True)], 'library'))
                    found = True
                    break

            if not found:
                self.warn('library file lib%s not found', library)
                continue


        for fileRequired, fileType in filesRequired:
            if fileRequired.startswith(macros.destdir):
                # find requirement in packaging
                fileRequired = util.normpath(os.path.realpath(fileRequired))
                fileRequired = fileRequired[len(util.normpath(os.path.realpath(macros.destdir))):]
                autopkg = self.recipe.autopkg
                troveName = autopkg.componentMap[fileRequired].name
                package, component = troveName.split(':', 1)
                if component in ('devellib', 'lib'):
                    for preferredComponent in ('devel', 'devellib'):
                        develTroveName = ':'.join((package, preferredComponent))
                        if develTroveName in autopkg.components and autopkg.components[develTroveName]:
                            # found a non-empty :devel compoment
                            troveName = develTroveName
                            break
                self._addRequirement(path, troveName, [], pkgFiles,
                                     deps.TroveDependencies)
            else:
                troveName = self._enforceProvidedPath(fileRequired,
                                                      fileType=fileType,
                                                      unmanagedError=True)
                if troveName:
                    self._addRequirement(path, troveName, [], pkgFiles,
                                         deps.TroveDependencies)
Ejemplo n.º 32
0
def magic(path, basedir=''):
    """
    Returns a magic class with information about the file mentioned
    """
    if basedir and not basedir.endswith('/'):
        basedir += '/'

    n = basedir+path
    if not util.exists(n) or not util.isregular(n):
        return None

    oldmode = None
    mode = os.lstat(n)[stat.ST_MODE]
    if (mode & 0400) != 0400:
        oldmode = mode
        os.chmod(n, mode | 0400)

    f = file(n)
    if oldmode is not None:
        os.chmod(n, oldmode)

    b = f.read(4096)
    f.close()

    if len(b) > 4 and b[0] == '\x7f' and b[1:4] == "ELF":
        return ELF(path, basedir, b)
    elif len(b) > 14 and b[0:14] == '!<arch>\ndebian':
        return deb(path, basedir)
    elif len(b) > 7 and b[0:7] == "!<arch>":
        return ar(path, basedir, b)
    elif len(b) > 2 and b[0] == '\x1f' and b[1] == '\x8b':
        try:
            uncompressedBuffer = gzip_module.GzipFile(n).read(4096)
            if _tarMagic(uncompressedBuffer):
                return tar_gz(path, basedir, b, uncompressedBuffer)
        except (IOError, zlib.error):
            # gzip sometimes raises IOError instead of any module-specific
            # errors; in either error case just do not consider this a
            # gzip file.
            # Note that gzip or tar_gz magic does not imply that the
            # entire file has been tested to have no compression errors!
            pass
        return gzip(path, basedir, b)
    elif len(b) > 3 and b[0:3] == "BZh":
        try:
            uncompressedBuffer = bz2.BZ2File(n).read(4096)
            if _tarMagic(uncompressedBuffer):
                return tar_bz2(path, basedir, b, uncompressedBuffer)
        except IOError:
            # bz2 raises IOError instead of any module specific errors
            pass
        return bzip(path, basedir, b)
    elif len(b) > 6 and b[0:6] == "\xFD\x37\x7A\x58\x5A\x00":
        # http://tukaani.org/xz/xz-file-format.txt
        return xz(path, basedir, b)
    elif len(b) > 4 and b[0:4] == "\xEA\x3F\x81\xBB":
        return changeset(path, basedir, b)
    elif len(b) > 4 and b[0:4] == "PK\x03\x04":
        # Zip file. Peek inside the file to extract the file list
        try:
            zf = zipfile.ZipFile(n)
            namelist = set(i.filename for i in zf.infolist()
                         if not i.filename.endswith('/') and i.file_size > 0)
        except (IOError, zipfile.BadZipfile):
            # zipfile raises IOError on some malformed zip files
            # We are producing a dummy jar or ZIP with no contents
            if path.endswith('.jar'):
                return jar(path, basedir)
            return ZIP(path, basedir)
        except RuntimeError:
            # not a proper zip archive -- likely a .car archive CNY-2871
            namelist = None
        if namelist is not None:
            if 'META-INF/application.xml' in namelist:
                return EAR(path, basedir, zipFileObj = zf, fileList = namelist)
            elif 'WEB-INF/web.xml' in namelist:
                return WAR(path, basedir, zipFileObj = zf, fileList = namelist)
            elif 'META-INF/MANIFEST.MF' in namelist:
                return jar(path, basedir, zipFileObj = zf, fileList = namelist)
            #elif path.endswith('.par'):
            #    perl archive
            else:
                return ZIP(path, basedir, zipFileObj = zf, fileList = namelist)
    elif _javaMagic(b):
        return java(path, basedir, b)
    elif len(b) > 4 and b[0:2] == "#!":
        if b.find(
            '# This wrapper script should never be moved out of the build directory.\n'
            '# If it is, it will not operate correctly.') > 0:
            return ltwrapper(path, basedir, b)
        return script(path, basedir, _line(b))
    elif (len(b) > 130
          and b[0:2] == 'MZ'
          and b[78:117] == "This program cannot be run in DOS mode."
          and b[128:130] == "PE"):
        # FIXME - this is not sufficient to detect a CIL file this
        # will match all PE executables.  See ECMA-335, partition ii,
        # section 25
        return CIL(path, basedir, b)
    elif (len(b) > 4 and b[:4] == "\xed\xab\xee\xdb"):
        return RPM(path, basedir)
    elif (len(b) > len(MSI_MAGIC_STRINGS[0]) and
            [ x for x in MSI_MAGIC_STRINGS if b[:len(x)] == x ]):
        return MSI(path,basedir)
    elif len(b) > len(WIM_MAGIC_STRING) and \
            b[:len(WIM_MAGIC_STRING)] == WIM_MAGIC_STRING:
        return WIM(path,basedir)
    elif _tarMagic(b):
        return tar(path, basedir, b)

    return None
Ejemplo n.º 33
0
def magic(path, basedir=''):
    """
    Returns a magic class with information about the file mentioned
    """
    if basedir and not basedir.endswith('/'):
        basedir += '/'

    n = basedir + path
    if not util.exists(n) or not util.isregular(n):
        return None

    oldmode = None
    mode = os.lstat(n)[stat.ST_MODE]
    if (mode & 0400) != 0400:
        oldmode = mode
        os.chmod(n, mode | 0400)

    f = file(n)
    if oldmode is not None:
        os.chmod(n, oldmode)

    b = f.read(4096)
    f.close()

    if len(b) > 4 and b[0] == '\x7f' and b[1:4] == "ELF":
        return ELF(path, basedir, b)
    elif len(b) > 14 and b[0:14] == '!<arch>\ndebian':
        return deb(path, basedir)
    elif len(b) > 7 and b[0:7] == "!<arch>":
        return ar(path, basedir, b)
    elif len(b) > 2 and b[0] == '\x1f' and b[1] == '\x8b':
        try:
            uncompressedBuffer = gzip_module.GzipFile(n).read(4096)
            if _tarMagic(uncompressedBuffer):
                return tar_gz(path, basedir, b, uncompressedBuffer)
        except (IOError, zlib.error):
            # gzip sometimes raises IOError instead of any module-specific
            # errors; in either error case just do not consider this a
            # gzip file.
            # Note that gzip or tar_gz magic does not imply that the
            # entire file has been tested to have no compression errors!
            pass
        return gzip(path, basedir, b)
    elif len(b) > 3 and b[0:3] == "BZh":
        try:
            uncompressedBuffer = bz2.BZ2File(n).read(4096)
            if _tarMagic(uncompressedBuffer):
                return tar_bz2(path, basedir, b, uncompressedBuffer)
        except IOError:
            # bz2 raises IOError instead of any module specific errors
            pass
        return bzip(path, basedir, b)
    elif len(b) > 6 and b[0:6] == "\xFD\x37\x7A\x58\x5A\x00":
        # http://tukaani.org/xz/xz-file-format.txt
        return xz(path, basedir, b)
    elif len(b) > 4 and b[0:4] == "\xEA\x3F\x81\xBB":
        return changeset(path, basedir, b)
    elif len(b) > 4 and b[0:4] == "PK\x03\x04":
        # Zip file. Peek inside the file to extract the file list
        try:
            zf = zipfile.ZipFile(n)
            namelist = set(i.filename for i in zf.infolist()
                           if not i.filename.endswith('/') and i.file_size > 0)
        except (IOError, zipfile.BadZipfile):
            # zipfile raises IOError on some malformed zip files
            # We are producing a dummy jar or ZIP with no contents
            if path.endswith('.jar'):
                return jar(path, basedir)
            return ZIP(path, basedir)
        except RuntimeError:
            # not a proper zip archive -- likely a .car archive CNY-2871
            namelist = None
        if namelist is not None:
            if 'META-INF/application.xml' in namelist:
                return EAR(path, basedir, zipFileObj=zf, fileList=namelist)
            elif 'WEB-INF/web.xml' in namelist:
                return WAR(path, basedir, zipFileObj=zf, fileList=namelist)
            elif 'META-INF/MANIFEST.MF' in namelist:
                return jar(path, basedir, zipFileObj=zf, fileList=namelist)
            #elif path.endswith('.par'):
            #    perl archive
            else:
                return ZIP(path, basedir, zipFileObj=zf, fileList=namelist)
    elif _javaMagic(b):
        return java(path, basedir, b)
    elif len(b) > 4 and b[0:2] == "#!":
        if b.find(
                '# This wrapper script should never be moved out of the build directory.\n'
                '# If it is, it will not operate correctly.') > 0:
            return ltwrapper(path, basedir, b)
        return script(path, basedir, _line(b))
    elif (len(b) > 130 and b[0:2] == 'MZ'
          and b[78:117] == "This program cannot be run in DOS mode."
          and b[128:130] == "PE"):
        # FIXME - this is not sufficient to detect a CIL file this
        # will match all PE executables.  See ECMA-335, partition ii,
        # section 25
        return CIL(path, basedir, b)
    elif (len(b) > 4 and b[:4] == "\xed\xab\xee\xdb"):
        return RPM(path, basedir)
    elif (len(b) > len(MSI_MAGIC_STRINGS[0])
          and [x for x in MSI_MAGIC_STRINGS if b[:len(x)] == x]):
        return MSI(path, basedir)
    elif len(b) > len(WIM_MAGIC_STRING) and \
            b[:len(WIM_MAGIC_STRING)] == WIM_MAGIC_STRING:
        return WIM(path, basedir)
    elif _tarMagic(b):
        return tar(path, basedir, b)

    return None
Ejemplo n.º 34
0
def derive(repos,
           cfg,
           targetLabel,
           troveSpec,
           checkoutDir=None,
           extract=False,
           info=False,
           callback=None):
    """
        Performs all the commands necessary to create a derived recipe.
        First it shadows the package, then it creates a checkout of the shadow
        and converts the checkout to a derived recipe package.

        Finally if extract = True, it installs an version of the binary
        package into a root.

        @param repos: trovesource to search for and derive packages from
        @param cfg: configuration to use when deriving the package
        @type cfg: ConaryConfiguration object
        @param targetLabel: label to derive from
        @type targetLabel: versions.Label
        @param checkoutDir: directory to create the checkout in.  If None,
                             defaults to currentDir + packageName.
        @param extract: If True, creates a subdirectory of the checkout named
                         _ROOT_ with the contents of the binary of the derived
                         package.
        @param info: If true, only display the information about the shadow
                      that would be performed if the derive command were
                      completed.
        @param callback:
    """

    origDir = os.getcwd()
    try:
        if callback is None:
            callback = DeriveCallback()

        if isinstance(troveSpec, tuple):
            troveName, versionSpec, flavor = troveSpec
            versionSpec = str(versionSpec)
            troveSpec = cmdline.toTroveSpec(troveName, versionSpec, flavor)
        else:
            troveName, versionSpec, flavor = cmdline.parseTroveSpec(troveSpec)

        if isinstance(targetLabel, str):
            targetLabel = Label(targetLabel)

        troveName, versionSpec, flavor = cmdline.parseTroveSpec(troveSpec)
        result = repos.findTrove(cfg.buildLabel,
                                 (troveName, versionSpec, flavor), cfg.flavor)
        # findTrove shouldn't return multiple items for one package anymore
        # when a flavor is specified.
        troveToDerive, = result
        # displaying output along the screen allows there to be a record
        # of what operations were performed.  Since this command is
        # an aggregate of several commands I think that is appropriate,
        # rather than simply using a progress callback.
        log.info('Shadowing %s=%s[%s] onto %s' %
                 (troveToDerive[0], troveToDerive[1], troveToDerive[2],
                  targetLabel))
        if info:
            cfg.interactive = False

        error = branch.branch(repos,
                              cfg,
                              str(targetLabel), ['%s=%s[%s]' % troveToDerive],
                              makeShadow=True,
                              sourceOnly=True,
                              binaryOnly=False,
                              allowEmptyShadow=True,
                              info=info)
        if info or error:
            return
        shadowedVersion = troveToDerive[1].createShadow(targetLabel)
        shadowedVersion = shadowedVersion.getSourceVersion(False)
        troveName = troveName.split(':')[0]

        checkoutDir = checkoutDir or troveName
        checkin.checkout(repos,
                         cfg,
                         checkoutDir, ["%s=%s" % (troveName, shadowedVersion)],
                         callback=callback)
        os.chdir(checkoutDir)

        nvfs = repos.getTrovesBySource(troveToDerive[0] + ':source',
                                       troveToDerive[1].getSourceVersion())
        trvs = repos.getTroves(nvfs)
        hasCapsule = [x for x in trvs if x.troveInfo.capsule.type()]
        if hasCapsule:
            derivedRecipeType = 'DerivedCapsuleRecipe'
            removeText = ''
        else:
            derivedRecipeType = 'DerivedPackageRecipe'
            removeText = \
"""
        # This appliance uses PHP as a command interpreter but does
        # not include a web server, so remove the file that creates
        # a dependency on the web server
        r.Remove('/etc/httpd/conf.d/php.conf')
"""

        log.info('Rewriting recipe file')
        recipeName = troveName + '.recipe'
        className = util.convertPackageNameToClassName(troveName)

        derivedRecipe = """
class %(className)sRecipe(%(recipeBaseClass)s):
    name = '%(name)s'
    version = '%(version)s'

    def setup(r):
        '''
        In this recipe, you can make modifications to the package.

        Examples:

        # This appliance has high-memory-use PHP scripts
        r.Replace('memory_limit = 8M', 'memory_limit = 32M', '/etc/php.ini')
%(removeText)s
        # This appliance requires that a few binaries be replaced
        # with binaries built from a custom archive that includes
        # a Makefile that honors the DESTDIR variable for its
        # install target.
        r.addArchive('foo.tar.gz')
        r.Make()
        r.MakeInstall()

        # This appliance requires an extra configuration file
        r.Create('/etc/myconfigfile', contents='some data')
        '''
""" % dict(className=className,
           name=troveName,
           version=shadowedVersion.trailingRevision().getVersion(),
           recipeBaseClass=derivedRecipeType,
           removeText=removeText)

        open(recipeName, 'w').write(derivedRecipe)

        log.info('Removing extra files from checkout')

        conaryState = state.ConaryStateFromFile('CONARY', repos)
        sourceState = conaryState.getSourceState()
        # clear the factory since we don't care about how the parent trove was
        # created
        sourceState.setFactory('')

        addRecipe = True
        for (pathId, path, fileId,
             version) in list(sourceState.iterFileList()):
            if path == recipeName:
                addRecipe = False
                continue
            sourceState.removeFile(pathId)
            if util.exists(path):
                statInfo = os.lstat(path)
                try:
                    if statInfo.st_mode & stat.S_IFDIR:
                        os.rmdir(path)
                    else:
                        os.unlink(path)
                except OSError, e:
                    log.warning("cannot remove %s: %s" % (path, e.strerror))

        conaryState.write('CONARY')

        if addRecipe:
            checkin.addFiles([recipeName])

        if extract:
            log.info('extracting files from %s=%s[%s]' % (troveToDerive))
            # extract to _ROOT_
            extractDir = os.path.join(os.getcwd(), '_ROOT_')
            ts = [(troveToDerive[0], (None, None), (troveToDerive[1],
                                                    troveToDerive[2]), True)]
            cs = repos.createChangeSet(ts, recurse=True)
            ChangesetExploder(cs, extractDir)
            # extract to _OLD_ROOT_
            secondDir = os.path.join(os.getcwd(), '_OLD_ROOT_')
            cs = repos.createChangeSet(ts, recurse=True)
            ChangesetExploder(cs, secondDir)
Ejemplo n.º 35
0
from conary.lib import util, sha1helper
from conary.lib.ext import file_utils
from conary.lib.ext import digest_uncompress

class MiscTest(rephelp.RepositoryHelper):

    def testMkdirIfMissing(self):
        umask = os.umask(022)
        os.umask(umask)

        assert(not util.exists(self.workDir + '/newdir'))
        file_utils.mkdirIfMissing(self.workDir + '/newdir')
        assert((os.stat(self.workDir + '/newdir').st_mode & 0777)==
                (0777 & ~umask))
        assert(util.exists(self.workDir + '/newdir'))
        file_utils.mkdirIfMissing(self.workDir + '/newdir')

        try:
            file_utils.mkdirIfMissing(self.workDir + '/first/second')
        except OSError, e:
            assert(e.errno == errno.ENOENT)
        else:
            raise AssertionError, "mkdir should fail"

        self.writeFile(self.workDir + '/dirent', '')
        file_utils.mkdirIfMissing(self.workDir + '/dirent')

    def _testSha1CopyAndUncompress(self, offset):
        infd = -1
        outfd = -1
Ejemplo n.º 36
0
 def snapshotExists(self):
     return util.exists(self.snapFullName)
Ejemplo n.º 37
0
    def addPluggableRequirements(self, path, fullpath, pkgFiles, macros):
        if hasattr(self.recipe, '_getCapsulePathsForFile'):
            if self.recipe._getCapsulePathsForFile(path):
                # since capsules do not convert to relative symlinks,
                # we cannot depend on getting the realpath.  Unless
                # we resolve that, assume that capsule-provided
                # dependencies will be sufficient for pkgconfig files.
                return

        # parse pkgconfig file
        variables = {}
        requirements = set()
        libDirs = []
        libraries = set()
        variableLineRe = re.compile('^[a-zA-Z0-9]+=')
        filesRequired = []

        pcContents = [x.strip() for x in file(fullpath).readlines()]
        for pcLine in pcContents:
            # interpolate variables: assume variables are interpreted
            # line-by-line while processing
            pcLineIter = pcLine
            while True:
                for var in variables:
                    pcLineIter = pcLineIter.replace(var, variables[var])
                if pcLine == pcLineIter:
                    break
                pcLine = pcLineIter
            pcLine = pcLineIter

            if variableLineRe.match(pcLine):
                key, val = pcLine.split('=', 1)
                variables['${%s}' % key] = val
            else:
                if (pcLine.startswith('Requires')
                        or pcLine.startswith('Lib')) and ':' in pcLine:
                    keyWord, args = pcLine.split(':', 1)
                    # split on ',' and ' '
                    argList = itertools.chain(
                        *[x.split(',') for x in args.split()])
                    argList = [x for x in argList if x]
                    if keyWord.startswith('Requires'):
                        versionNext = False
                        for req in argList:
                            if [x for x in '<=>' if x in req]:
                                versionNext = True
                                continue
                            if versionNext:
                                versionNext = False
                                continue
                            requirements.add(req)
                    elif keyWord.startswith('Lib'):
                        for lib in argList:
                            if lib.startswith('-L'):
                                libDirs.append(lib[2:])
                            elif lib.startswith('-l'):
                                libraries.add(lib[2:])
                            else:
                                pass

        # find referenced pkgconfig files and add requirements
        for req in requirements:
            candidateFileNames = [
                '%(destdir)s%(libdir)s/pkgconfig/' + req + '.pc',
                '%(destdir)s%(datadir)s/pkgconfig/' + req + '.pc',
                '%(libdir)s/pkgconfig/' + req + '.pc',
                '%(datadir)s/pkgconfig/' + req + '.pc',
            ]
            candidateFileNames = [x % macros for x in candidateFileNames]
            candidateFiles = [util.exists(x) for x in candidateFileNames]
            if True in candidateFiles:
                filesRequired.append(
                    (candidateFileNames[candidateFiles.index(True)],
                     'pkg-config'))
            else:
                self.warn('pkg-config file %s.pc not found', req)
                continue

        # find referenced library files and add requirements
        libraryPaths = sorted(list(self.systemLibPaths))
        for libDir in libDirs:
            if libDir not in libraryPaths:
                libraryPaths.append(libDir)
        for library in libraries:
            found = False
            for libDir in libraryPaths:
                candidateFileNames = [
                    macros.destdir + libDir + '/lib' + library + '.so',
                    macros.destdir + libDir + '/lib' + library + '.a',
                    libDir + '/lib' + library + '.so',
                    libDir + '/lib' + library + '.a',
                ]
                candidateFiles = [util.exists(x) for x in candidateFileNames]
                if True in candidateFiles:
                    filesRequired.append(
                        (candidateFileNames[candidateFiles.index(True)],
                         'library'))
                    found = True
                    break

            if not found:
                self.warn('library file lib%s not found', library)
                continue

        for fileRequired, fileType in filesRequired:
            if fileRequired.startswith(macros.destdir):
                # find requirement in packaging
                fileRequired = util.normpath(os.path.realpath(fileRequired))
                fileRequired = fileRequired[
                    len(util.normpath(os.path.realpath(macros.destdir))):]
                autopkg = self.recipe.autopkg
                troveName = autopkg.componentMap[fileRequired].name
                package, component = troveName.split(':', 1)
                if component in ('devellib', 'lib'):
                    for preferredComponent in ('devel', 'devellib'):
                        develTroveName = ':'.join(
                            (package, preferredComponent))
                        if develTroveName in autopkg.components and autopkg.components[
                                develTroveName]:
                            # found a non-empty :devel compoment
                            troveName = develTroveName
                            break
                self._addRequirement(path, troveName, [], pkgFiles,
                                     deps.TroveDependencies)
            else:
                troveName = self._enforceProvidedPath(fileRequired,
                                                      fileType=fileType,
                                                      unmanagedError=True)
                if troveName:
                    self._addRequirement(path, troveName, [], pkgFiles,
                                         deps.TroveDependencies)