Beispiel #1
0
    def testContentsChanged(self):
        f = files.RegularFile(None)
        f.inode.perms.set(0444)
        f.inode.mtime.set(0100)
        f.inode.owner.set("root")
        f.inode.group.set("root")
        f.contents = files.RegularFileStream()
        s = 'hi'
        f.contents.size.set(len(s))
        f.contents.sha1.set(sha1helper.sha1String(s))
        f.flags.set(0)

        # this file stream diff has no contents change. verify that
        # contentsChanged returns the correct value
        diff = f.diff(f)
        assert(files.contentsChanged(diff) == False)

        f2 = f.copy()
        s = 'bye'
        f2.contents.size.set(len(s))
        f2.contents.sha1.set(sha1helper.sha1String(s))

        # this diff should have changed contents
        diff = f.diff(f2)
        assert(files.contentsChanged(diff) == True)

        # non-regular files should always return False
        s = files.SymbolicLink(None)
        s.inode.perms.set(0604)
        s.inode.mtime.set(0100)
        s.inode.owner.set('daemon')
        s.inode.group.set('uucp')
        # to make sure that referenced names "exist"
        files.userCache.nameCache['daemon'] = 2
        files.groupCache.nameCache['uucp'] = 14
        s.flags.set(0)
        s.target.set("/some/target")

        s2 = s.copy()
        s2.target.set("/some/other/target")
        diff = s.diff(s2)
        assert(files.contentsChanged(diff) == False)
        diff = s.diff(s)
        assert(files.contentsChanged(diff) == False)

        # test some pre-generated diffs - no change
        diff = '\x01-\x01\x00\x00\x05\x00"\x01\x00\x02\x01\xa4\x02\x00\x04B\x82=4\x03\x00\x08kvandine\x04\x00\x08kvandine'
        assert(files.contentsChanged(diff) == False)
        # this one has contents changed
        diff = '\x01-\x01\x00"\x01\x00\x08\x00\x00\x00\x00\x00\x00\x02q\x02\x00\x14\xac\x87%\xeb1a/&\xdf\x81\xb9O\xee\xf9\x895\xd4\xb8i\xd4\x05\x00\x1d\x02\x00\x04B\x82Ec\x03\x00\x08kvandine\x04\x00\x08kvandine'
        assert(files.contentsChanged(diff) == True)
Beispiel #2
0
    def _getChrootFingerprint(self, client):
        job = (sorted(self.jobList) + sorted(self.crossJobList) +
               sorted(self.bootstrapJobList))
        fingerprints = client.repos.getChangeSetFingerprints(
            job,
            recurse=False,
            withFiles=True,
            withFileContents=True,
            excludeAutoSource=True,
            mirrorMode=False)

        a = len(self.jobList)
        b = a + len(self.crossJobList)

        # Make backwards-compatible chroot fingerprints by only appending more
        # info if it is set.

        # version 1 or later fingerprint
        blob = ''.join(fingerprints[:a])  # jobList
        if (self.crossJobList or self.bootstrapJobList
                or self.cfg.rpmRequirements):
            # version 2 or later fingerprint
            blob += '\n'
            blob += ''.join(fingerprints[a:b]) + '\n'  # crossJobList
            blob += ''.join(fingerprints[b:]) + '\n'  # bootstrapJobList
            blob += '\t'.join(str(x) for x in self.cfg.rpmRequirements) + '\n'
        return sha1helper.sha1String(blob)
Beispiel #3
0
 def hashTrove(self, name, version, flavor, withFiles, withFileContents):
     # we add extra delimiters here because we can be sure they they
     # will result in a unique string for each n,v,f
     return sha1helper.sha1ToString(
         sha1helper.sha1String(
             '%s=%s[%s]%s%s' %
             (name, version, flavor, withFiles, withFileContents)))
Beispiel #4
0
    def __init__(self, f, sha1 = None, isSource = False, sigBlock = False):
        intro = f.read(16)
        (mag1, mag2, mag3, ver, reserved, entries, size) = \
            struct.unpack("!BBBBiii", intro)

        if mag1 != 0x8e or mag2 != 0xad or mag3 != 0xe8  or ver != 01:
            raise IOError, "bad magic for header"

        entryTable = f.read(entries * 16)

        self.isSource = isSource
        self.entries = {}
        self.data = f.read(size)
        assert len(self.data) == size

        if sha1 is not None:
            computedSha1 = sha1helper.sha1ToString(
                sha1helper.sha1String(intro + entryTable + self.data))
            if computedSha1 != sha1:
                raise IOError, "bad header sha1"

        for i in range(entries):
            (tag, dataType, offset, count) = struct.unpack("!iiii",
                                            entryTable[i * 16: i * 16 + 16])

            self.entries[tag] = (dataType, offset, count)

        if sigBlock:
            # We need to align to an 8-byte boundary.
            # So far we read the intro (which is 16 bytes) and the entry table
            # (which is a multiple of 16 bytes). So we only have to worry
            # about the actual header data not being aligned.
            alignment = size % 8
            if alignment:
                f.read(8 - alignment)
Beispiel #5
0
 def _getJobCachePath(self, applyList):
     applyStr = '\0'.join([
         '%s=%s[%s]--%s[%s]%s' %
         (x[0], x[1][0], x[1][1], x[2][0], x[2][1], x[3]) for x in applyList
     ])
     return self.jobPath + '/' + sha1helper.sha1ToString(
         sha1helper.sha1String(applyStr))
Beispiel #6
0
    def testDiff(self):
        f = files.RegularFile(None)
        f.inode.perms.set(0444)
        f.inode.mtime.set(0100)
        f.inode.owner.set("root")
        f.inode.group.set("root")
        f.contents = files.RegularFileStream()
        s = 'hi'
        f.contents.size.set(len(s))
        f.contents.sha1.set(sha1helper.sha1String(s))
        f.flags.set(0)

        s = files.SymbolicLink(None)
        s.inode.perms.set(0604)
        s.inode.mtime.set(0100)
        s.inode.owner.set('daemon')
        s.inode.group.set('uucp')
        # to make sure that referenced names "exist"
        files.userCache.nameCache['daemon'] = 2
        files.groupCache.nameCache['uucp'] = 14
        s.flags.set(0)
        s.target.set("/some/target")

        # when the lsTag is different, the diff should just the frozen
        # file object
        assert(s.diff(f) == s.freeze())
Beispiel #7
0
    def __init__(self, f, sha1=None, isSource=False, sigBlock=False):
        intro = f.read(16)
        (mag1, mag2, mag3, ver, reserved, entries, size) = \
            struct.unpack("!BBBBiii", intro)

        if mag1 != 0x8e or mag2 != 0xad or mag3 != 0xe8 or ver != 01:
            raise IOError, "bad magic for header"

        entryTable = f.read(entries * 16)

        self.isSource = isSource
        self.entries = {}
        self.data = f.read(size)
        assert len(self.data) == size

        if sha1 is not None:
            computedSha1 = sha1helper.sha1ToString(
                sha1helper.sha1String(intro + entryTable + self.data))
            if computedSha1 != sha1:
                raise IOError, "bad header sha1"

        for i in range(entries):
            (tag, dataType, offset,
             count) = struct.unpack("!iiii", entryTable[i * 16:i * 16 + 16])

            self.entries[tag] = (dataType, offset, count)

        if sigBlock:
            # We need to align to an 8-byte boundary.
            # So far we read the intro (which is 16 bytes) and the entry table
            # (which is a multiple of 16 bytes). So we only have to worry
            # about the actual header data not being aligned.
            alignment = size % 8
            if alignment:
                f.read(8 - alignment)
Beispiel #8
0
    def getAuthorizedRoles(self, cu, serverName, remoteIp,
                           entitlementClass, entitlement):
        """
        Given an entitlement, return the list of roles that the
        credentials authorize.
        """
        cacheEntry = sha1helper.sha1String("%s%s%s" % (
            serverName, entitlementClass, entitlement))
        roleIds, timeout, autoRetry = \
                self.cache.get(cacheEntry, (None, None, None))
        if (timeout is not None) and time.time() < timeout:
            return roleIds
        elif (timeout is not None):
            del self.cache[cacheEntry]
            if autoRetry is not True:
                raise errors.EntitlementTimeout([entitlement])

        if self.entCheckUrl:
            if entitlementClass is not None:
                url = "%s?server=%s;class=%s;key=%s" \
                        % (self.entCheckUrl, urllib.quote(serverName),
                           urllib.quote(entitlementClass),
                           urllib.quote(entitlement))
            else:
                url = "%s?server=%s;key=%s" \
                        % (self.entCheckUrl, urllib.quote(serverName),
                           urllib.quote(entitlement))

            if remoteIp is not None:
                url += ';remote_ip=%s' % urllib.quote(remoteIp)

            try:
                f = urllib2.urlopen(url)
                xmlResponse = f.read()
            except Exception, e:
                return set()

            p = conarycfg.EntitlementParser()

            try:
                p.parse(xmlResponse)
            except:
                return set()

            if p['server'] != serverName:
                return set()

            entitlementClass = p['class']
            entitlement = p['key']
            entitlementRetry = p['retry']
            if p['timeout'] is None:
                entitlementTimeout = self.cacheTimeout
            else:
                entitlementTimeout = p['timeout']

            if entitlementTimeout is None:
                entitlementTimeout = -1
Beispiel #9
0
 def hashGroupDeps(self, groupTroves, depClass, dependency):
     depSet = deps.DependencySet()
     depSet.addDep(depClass, dependency)
     frz = depSet.freeze()
     troveList = sorted(self.hashTrove(withFiles=False,
                                       withFileContents=False,
                                       *x.getNameVersionFlavor())
                        for x in groupTroves)
     str = '[1]%s%s%s' % (len(frz), frz, ''.join(troveList))
     return sha1helper.sha1ToString(sha1helper.sha1String(str))
Beispiel #10
0
 def hashGroupDeps(self, groupTroves, depClass, dependency):
     depSet = deps.DependencySet()
     depSet.addDep(depClass, dependency)
     frz = depSet.freeze()
     troveList = sorted(
         self.hashTrove(withFiles=False,
                        withFileContents=False,
                        *x.getNameVersionFlavor()) for x in groupTroves)
     str = '[1]%s%s%s' % (len(frz), frz, ''.join(troveList))
     return sha1helper.sha1ToString(sha1helper.sha1String(str))
Beispiel #11
0
    def testChangeSetDumpOffset(self):
        """Stress test offset arg to dumpIter"""
        # Make a changeset with one regular file
        cs = changeset.ChangeSet()
        pathId = '0' * 16
        fileId = '0' * 20
        contents = 'contents'
        store = datastore.FlatDataStore(self.workDir)
        sha1 = sha1helper.sha1String(contents)
        store.addFile(StringIO(contents), sha1)
        rawFile = store.openRawFile(sha1)
        rawSize = os.fstat(rawFile.fileno()).st_size
        contObj = filecontents.CompressedFromDataStore(store, sha1)
        cs.addFileContents(pathId, fileId, changeset.ChangedFileTypes.file,
                contObj, cfgFile=False, compressed=True)

        # Test dumping a fully populated changeset with every possible resume
        # point
        path = os.path.join(self.workDir, 'full.ccs')
        size = cs.writeToFile(path)
        expected = open(path).read()
        self.assertEqual(len(expected), size)
        fc = filecontainer.FileContainer(util.ExtendedFile(path,
            'r', buffering=False))
        def noop(name, tag, size, subfile):
            assert tag[2:] != changeset.ChangedFileTypes.refr[4:]
            return tag, size, subfile
        for offset in range(size + 1):
            fc.reset()
            actual = ''.join(fc.dumpIter(noop, (), offset))
            self.assertEqual(actual, expected[offset:])

        # Test dumping a changeset with contents stripped out
        path = os.path.join(self.workDir, 'stubby.ccs')
        size2 = cs.writeToFile(path, withReferences=True)
        self.assertEqual(size2, size)
        fc = filecontainer.FileContainer(util.ExtendedFile(path,
            'r', buffering=False))
        expect_reference = '%s %d' % (sha1.encode('hex'), rawSize)
        def addfile(name, tag, size, subfile, dummy):
            self.assertEqual(dummy, 'dummy')
            if name == 'CONARYCHANGESET':
                return tag, size, subfile
            elif name == pathId + fileId:
                self.assertEqual(tag[2:], changeset.ChangedFileTypes.refr[4:])
                self.assertEqual(subfile.read(), expect_reference)
                tag = tag[0:2] + changeset.ChangedFileTypes.file[4:]
                rawFile.seek(0)
                return tag, rawSize, rawFile
            else:
                assert False
        for offset in range(size + 1):
            fc.reset()
            actual = ''.join(fc.dumpIter(addfile, ('dummy',), offset))
            self.assertEqual(actual, expected[offset:])
Beispiel #12
0
    def _getSignature(self, troveCache):
        if self._sig is None:
            troveTupCollection = trove.TroveTupleList()

            for troveTup, inInstall, isExplicit in \
                        self._walk(troveCache, newGroups = False,
                                   recurse = True):
                if isExplicit:
                    troveTupCollection.add(*troveTup)

            s = troveTupCollection.freeze()
            self._sig = sha1helper.sha1String(s)

        return self._sig
Beispiel #13
0
    def _getSignature(self, troveCache):
        if self._sig is None:
            troveTupCollection = trove.TroveTupleList()

            for troveTup, inInstall, isExplicit in \
                        self._walk(troveCache, newGroups = False,
                                   recurse = True):
                if isExplicit:
                    troveTupCollection.add(*troveTup)

            s = troveTupCollection.freeze()
            self._sig = sha1helper.sha1String(s)

        return self._sig
Beispiel #14
0
 def _testSha1CopyAndUncompress(self, offset):
     infd = -1
     outfd = -1
     try:
         # set up some constants
         teststr = ' ' * 1000
         path = self.workDir + '/testfile'
         # open a sparse file and seek out to the requested offset
         f = open(path, 'w')
         f.seek(offset)
         # write a gzip file containing the test string
         gz = util.BoundedStringIO()
         compressor = gzip.GzipFile(None, "w", fileobj=gz)
         compressor.write(teststr)
         compressor.close()
         gz.seek(0)
         s = gz.read()
         f.write(s)
         f.close()
         # open using unbuffered io
         infd = os.open(path, os.O_RDONLY)
         outfd = os.open(path + '-copy', os.O_CREAT | os.O_WRONLY)
         # copy from the large sparse file to the output file,
         # decompressing the data and returning a sha1 of the uncompressed
         # contents
         sha = digest_uncompress.sha1Copy((infd, offset, len(s)), [outfd])
         # also decompress to a target file, while performing a sha1sum
         # of the uncompressed contents
         target = path + '-uncompressed'
         sha2, tmpname = digest_uncompress.sha1Uncompress(
             infd, offset, len(s), os.path.dirname(target),
             os.path.basename(target))
         # make sure the sha matches what we expect
         expected = sha1helper.sha1String(teststr)
         self.assertEqual(sha, expected)
         self.assertEqual(sha2, expected)
         # make sure that the copied file matches the gzip compressed
         # string
         f = open(path + '-copy')
         self.assertEqual(f.read(), s)
         # and that it also is correctly uncompressed
         f = open(tmpname)
         self.assertEqual(f.read(), teststr)
     finally:
         if infd > 0:
             os.close(infd)
         if outfd > 0:
             os.close(outfd)
         file_utils.removeIfExists(path)
         file_utils.removeIfExists(path + '-copy')
Beispiel #15
0
 def _testSha1CopyAndUncompress(self, offset):
     infd = -1
     outfd = -1
     try:
         # set up some constants
         teststr = ' ' * 1000
         path = self.workDir + '/testfile'
         # open a sparse file and seek out to the requested offset
         f = open(path, 'w')
         f.seek(offset)
         # write a gzip file containing the test string
         gz = util.BoundedStringIO()
         compressor = gzip.GzipFile(None, "w", fileobj = gz)
         compressor.write(teststr)
         compressor.close()
         gz.seek(0)
         s = gz.read()
         f.write(s)
         f.close()
         # open using unbuffered io
         infd = os.open(path, os.O_RDONLY)
         outfd = os.open(path + '-copy', os.O_CREAT | os.O_WRONLY)
         # copy from the large sparse file to the output file,
         # decompressing the data and returning a sha1 of the uncompressed
         # contents
         sha = digest_uncompress.sha1Copy((infd, offset, len(s)), [outfd])
         # also decompress to a target file, while performing a sha1sum
         # of the uncompressed contents
         target = path + '-uncompressed'
         sha2, tmpname = digest_uncompress.sha1Uncompress(infd, offset,
                 len(s), os.path.dirname(target), os.path.basename(target))
         # make sure the sha matches what we expect
         expected = sha1helper.sha1String(teststr)
         self.assertEqual(sha, expected)
         self.assertEqual(sha2, expected)
         # make sure that the copied file matches the gzip compressed
         # string
         f = open(path + '-copy')
         self.assertEqual(f.read(), s)
         # and that it also is correctly uncompressed
         f = open(tmpname)
         self.assertEqual(f.read(), teststr)
     finally:
         if infd > 0:
             os.close(infd)
         if outfd > 0:
             os.close(outfd)
         file_utils.removeIfExists(path)
         file_utils.removeIfExists(path + '-copy')
Beispiel #16
0
 def testFileId(self):
     # this test verifies that the value produced as the fileId
     # of a known stream matches its pre-calculated value.
     f = files.RegularFile(None)
     f.inode.perms.set(0604)
     f.inode.mtime.set(0100)
     f.inode.owner.set("daemon")
     f.inode.group.set("uucp")
     s = "hello world"
     contents = filecontents.FromString(s)
     f.contents = files.RegularFileStream()
     f.contents.size.set(len(s))
     f.contents.sha1.set(sha1helper.sha1String(s))
     f.flags.set(0)
     expectedId = '567355867fbbcb2be55d35c3d229a7df8152fdbc'
     self.assertEqual(f.freeze(), '-\x01\x00"\x01\x00\x08\x00\x00\x00\x00\x00\x00\x00\x0b\x02\x00\x14*\xael5\xc9O\xcf\xb4\x15\xdb\xe9_@\x8b\x9c\xe9\x1e\xe8F\xed\x03\x00\x04\x00\x00\x00\x00\x05\x00\x1c\x01\x00\x02\x01\x84\x02\x00\x04\x00\x00\x00@\x03\x00\x06daemon\x04\x00\x04uucp')
     self.assertEqual(sha1helper.sha1ToString(f.fileId()), expectedId)
Beispiel #17
0
    def splitFile(self, dir):
        while self.tarfh.tell() < self.tarEnd:
            size, chunk = self._getChunk()
            chunkfh = open(os.path.join(dir, self._formatFileName()), 'w')
            chunkfh.write(chunk)
            chunkfh.close()

            fileName = self._formatFileName()
            sha1sum = sha1ToString(sha1String(chunk))

            self.files.append(fileName)

            # Add both lines to the tblist for backwards compatibility with
            # older versions of Anaconda.
            self.tblist.append('%s %s %s' % (fileName, size, 1))
            self.tblist.append('%s %s %s %s' % (fileName, size, 1, sha1sum))

            self.count += 1
Beispiel #18
0
def _troveFp(troveTup, sig, meta):
    if not sig and not meta:
        # we don't have sig or metadata info; just use the trove tuple
        # itself
        t = troveTup
    else:
        (sigPresent, sigBlock) = sig
        l = []
        if sigPresent >= 1:
            l.append(base64.decodestring(sigBlock))
        (metaPresent, metaBlock) = meta
        if metaPresent >= 1:
            l.append(base64.decodestring(metaBlock))
        if sigPresent or metaPresent:
            t = tuple(l)
        else:
            t = ("missing", ) + troveTup

    return sha1helper.sha1String("\0".join(t))
Beispiel #19
0
    def _checkPassword(self, user, salt, password, challenge, remoteIp = None):
        if challenge is ValidPasswordToken:
            # Short-circuit for shim-using code that does its own
            # authentication, e.g. through one-time tokens or session
            # data.
            return True

        if self.cacheTimeout:
            cacheEntry = sha1helper.sha1String("%s%s" % (user, challenge))
            timeout = self.pwCache.get(cacheEntry, None)
            if timeout is not None and time.time() < timeout:
                return True

        if self.pwCheckUrl:
            try:
                url = "%s?user=%s;password=%s" \
                        % (self.pwCheckUrl, urllib.quote(user),
                           urllib.quote(challenge))

                if remoteIp is not None:
                    url += ';remote_ip=%s' % urllib.quote(remoteIp)

                f = urllib2.urlopen(url)
                xmlResponse = f.read()
            except:
                return False

            p = PasswordCheckParser()
            p.parse(xmlResponse)

            isValid = p.validPassword()
        else:
            m = digestlib.md5()
            m.update(salt)
            m.update(challenge)
            isValid = m.hexdigest() == password

        if isValid and self.cacheTimeout:
            # cacheEntry is still around from above
            self.pwCache[cacheEntry] = time.time() + self.cacheTimeout

        return isValid
Beispiel #20
0
    def _checkPassword(self, user, salt, password, challenge, remoteIp=None):
        if challenge is ValidPasswordToken:
            # Short-circuit for shim-using code that does its own
            # authentication, e.g. through one-time tokens or session
            # data.
            return True

        if self.cacheTimeout:
            cacheEntry = sha1helper.sha1String("%s%s" % (user, challenge))
            timeout = self.pwCache.get(cacheEntry, None)
            if timeout is not None and time.time() < timeout:
                return True

        if self.pwCheckUrl:
            try:
                url = "%s?user=%s;password=%s" \
                        % (self.pwCheckUrl, urllib.quote(user),
                           urllib.quote(challenge))

                if remoteIp is not None:
                    url += ';remote_ip=%s' % urllib.quote(remoteIp)

                f = urllib2.urlopen(url)
                xmlResponse = f.read()
            except:
                return False

            p = PasswordCheckParser()
            p.parse(xmlResponse)

            isValid = p.validPassword()
        else:
            m = digestlib.md5()
            m.update(salt)
            m.update(challenge)
            isValid = m.hexdigest() == password

        if isValid and self.cacheTimeout:
            # cacheEntry is still around from above
            self.pwCache[cacheEntry] = time.time() + self.cacheTimeout

        return isValid
Beispiel #21
0
 def testFileId(self):
     # this test verifies that the value produced as the fileId
     # of a known stream matches its pre-calculated value.
     f = files.RegularFile(None)
     f.inode.perms.set(0604)
     f.inode.mtime.set(0100)
     f.inode.owner.set("daemon")
     f.inode.group.set("uucp")
     # to make sure that referenced names "exist"
     files.userCache.nameCache['daemon'] = 2
     files.groupCache.nameCache['uucp'] = 14
     s = "hello world"
     contents = filecontents.FromString(s)
     f.contents = files.RegularFileStream()
     f.contents.size.set(len(s))
     f.contents.sha1.set(sha1helper.sha1String(s))
     f.flags.set(0)
     expectedId = '567355867fbbcb2be55d35c3d229a7df8152fdbc'
     self.assertEqual(f.freeze(), '-\x01\x00"\x01\x00\x08\x00\x00\x00\x00\x00\x00\x00\x0b\x02\x00\x14*\xael5\xc9O\xcf\xb4\x15\xdb\xe9_@\x8b\x9c\xe9\x1e\xe8F\xed\x03\x00\x04\x00\x00\x00\x00\x05\x00\x1c\x01\x00\x02\x01\x84\x02\x00\x04\x00\x00\x00@\x03\x00\x06daemon\x04\x00\x04uucp')
     self.assertEqual(sha1helper.sha1ToString(f.fileId()), expectedId)
Beispiel #22
0
    def _getChrootFingerprint(self, client):
        job = (sorted(self.jobList) + sorted(self.crossJobList) +
                sorted(self.bootstrapJobList))
        fingerprints = client.repos.getChangeSetFingerprints(job,
                recurse=False, withFiles=True, withFileContents=True,
                excludeAutoSource=True, mirrorMode=False)

        a = len(self.jobList)
        b = a + len(self.crossJobList)

        # Make backwards-compatible chroot fingerprints by only appending more
        # info if it is set.

        # version 1 or later fingerprint
        blob = ''.join(fingerprints[:a])  # jobList
        if (self.crossJobList or self.bootstrapJobList or
                self.cfg.rpmRequirements):
            # version 2 or later fingerprint
            blob += '\n'
            blob += ''.join(fingerprints[a:b]) + '\n'  # crossJobList
            blob += ''.join(fingerprints[b:]) + '\n'  # bootstrapJobList
            blob += '\t'.join(str(x) for x in self.cfg.rpmRequirements) + '\n'
        return sha1helper.sha1String(blob)
Beispiel #23
0
 def verify(self, message):
     return self() == sha1helper.sha1String(message)
Beispiel #24
0
 def compute(self, message):
     self.set(sha1helper.sha1String(message))
Beispiel #25
0
 def compute(self, message):
     self.set(sha1helper.sha1String(message))
Beispiel #26
0
 def verify(self, message):
     return self() == sha1helper.sha1String(message)
Beispiel #27
0
 def hashFile(self, fileId, fileVersion):
     # we add extra delimiters here because we can be sure they they
     # will result in a unique string for each n,v,f
     return sha1helper.sha1ToString(
                 sha1helper.sha1String('[0]%s=%s' % (fileId, fileVersion)))
Beispiel #28
0
 def hashTroveInfo(self, jobId, name, version, flavor):
     return sha1helper.sha1ToString(
             sha1helper.sha1String('%s %s=%s[%s]' % (jobId, name, version, flavor)))
Beispiel #29
0
 def hashFile(self, fileId, fileVersion):
     # we add extra delimiters here because we can be sure they they
     # will result in a unique string for each n,v,f
     return sha1helper.sha1ToString(
         sha1helper.sha1String('[0]%s=%s' % (fileId, fileVersion)))
Beispiel #30
0
    def testOwnership(self):
        f = files.RegularFile(None)
        f.inode.perms.set(0604)
        f.inode.mtime.set(0100)
        f.inode.owner.set("daemon")
        f.inode.group.set("uucp")
        # to make sure that referenced names "exist"
        files.userCache.nameCache['daemon'] = 2
        files.userCache.nameCache['uucp'] = 10
        files.groupCache.nameCache['uucp'] = 14

        s = "hello world"
        contents = filecontents.FromString(s)
        f.contents = files.RegularFileStream()
        f.contents.size.set(len(s))
        f.contents.sha1.set(sha1helper.sha1String(s))

        f.flags.set(0)

        # and setuid root
        fr = files.RegularFile(None)
        fr.inode.perms.set(06755)
        fr.inode.mtime.set(0100)
        fr.inode.owner.set("root")
        fr.inode.group.set("root")
        fr.contents = files.RegularFileStream()
        fr.contents.size.set(len(s))
        fr.contents.sha1.set(sha1helper.sha1String(s))
        fr.flags.set(0)

        # and unwriteable
        fo = files.RegularFile(None)
        fo.inode.perms.set(0444)
        fo.inode.mtime.set(0100)
        fo.inode.owner.set("root")
        fo.inode.group.set("root")
        fo.contents = files.RegularFileStream()
        fo.contents.size.set(len(s))
        fo.contents.sha1.set(sha1helper.sha1String(s))
        fo.flags.set(0)

        # and secret
        fs = files.RegularFile(None)
        fs.inode.perms.set(0400)
        fs.inode.mtime.set(0100)
        fs.inode.owner.set("root")
        fs.inode.group.set("root")
        fs.contents = files.RegularFileStream()
        fs.contents.size.set(len(s))
        fs.contents.sha1.set(sha1helper.sha1String(s))
        fs.flags.set(0)

        f2 = f.copy()
        assert(f == f2)
        d = tempfile.mkdtemp()

        # before we mimic root, test a non-root of setu/gid file
        pr = d+"/setuid"
        fr.restore(contents, d, pr)
        assert not os.stat(pr).st_mode & 04000

        try:
            self.mimicRoot()
            p = d + "/file"
            f.restore(contents, d, p)
            assert self.compareChownLog([ (p, 2, 14) ])
            self.chownLog = []

            f.inode.owner.set("rootroot")
            self.logCheck(f.restore, (contents, d, p),
                          "warning: user rootroot does not exist - using root")
            assert self.compareChownLog([ (p, 0, 14) ])
            self.chownLog = []

            f.inode.owner.set("uucp")
            f.inode.group.set("grpgrp")
            self.logCheck(f.restore, (contents, d, p),
                          "warning: group grpgrp does not exist - using root")
            assert self.compareChownLog([ (p, 10, 0) ])

            self.chmodLog = []
            pr = d+"/setuid"
            fr.restore(contents, d, pr)
            assert self.compareChmodLog([ (pr, 06755) ])
            assert os.stat(pr).st_mode & 07777 == 06755
Beispiel #31
0
 def hashTroveInfo(self, jobId, name, version, flavor):
     return sha1helper.sha1ToString(
         sha1helper.sha1String('%s %s=%s[%s]' %
                               (jobId, name, version, flavor)))
Beispiel #32
0
 def hashTrove(self, trove):
     return sha1helper.sha1ToString(
         sha1helper.sha1String('%s %s=%s[%s]' %
                               (trove.jobId, trove.getName(),
                                trove.getVersion(), trove.getFlavor())))
Beispiel #33
0
 def hashTrove(self, name, version, flavor, withFiles, withFileContents):
     # we add extra delimiters here because we can be sure they they
     # will result in a unique string for each n,v,f
     return sha1helper.sha1ToString(
             sha1helper.sha1String('%s=%s[%s]%s%s' % (name, version, flavor, withFiles, withFileContents)))
Beispiel #34
0
 def _getJobCachePath(self, applyList):
     applyStr = '\0'.join(['%s=%s[%s]--%s[%s]%s' % (x[0], x[1][0], x[1][1], x[2][0], x[2][1], x[3]) for x in applyList])
     return self.jobPath + '/' + sha1helper.sha1ToString(sha1helper.sha1String(applyStr))
Beispiel #35
0
 def _makeSessionId(self, authItemList):
     return sha1helper.sha1String('\0'.join([str(x) for x in authItemList]))
Beispiel #36
0
 def fileId(self):
     return sha1helper.sha1String(self.freeze(skipSet = { 'mtime' : True }))
Beispiel #37
0
    def getAuthorizedRoles(self, cu, serverName, remoteIp,
                           entitlementClass, entitlement):
        """
        Given an entitlement, return the list of roles that the
        credentials authorize.
        """
        cacheEntry = sha1helper.sha1String("%s%s%s" % (
            serverName, entitlementClass, entitlement))
        roleIds, timeout, autoRetry = \
                self.cache.get(cacheEntry, (None, None, None))
        if (timeout is not None) and time.time() < timeout:
            return roleIds
        elif (timeout is not None):
            del self.cache[cacheEntry]
            if autoRetry is not True:
                raise errors.EntitlementTimeout([entitlement])

        if self.entCheckUrl:
            if entitlementClass is not None:
                url = "%s?server=%s;class=%s;key=%s" \
                        % (self.entCheckUrl, urllib.quote(serverName),
                           urllib.quote(entitlementClass),
                           urllib.quote(entitlement))
            else:
                url = "%s?server=%s;key=%s" \
                        % (self.entCheckUrl, urllib.quote(serverName),
                           urllib.quote(entitlement))

            if remoteIp is not None:
                url += ';remote_ip=%s' % urllib.quote(remoteIp)

            try:
                f = urllib2.urlopen(url)
                xmlResponse = f.read()
            except Exception:
                return set()

            p = conarycfg.EntitlementParser()

            try:
                p.parse(xmlResponse)
            except:
                return set()

            if p['server'] != serverName:
                return set()

            entitlementClass = p['class']
            entitlement = p['key']
            entitlementRetry = p['retry']
            if p['timeout'] is None:
                entitlementTimeout = self.cacheTimeout
            else:
                entitlementTimeout = p['timeout']

            if entitlementTimeout is None:
                entitlementTimeout = -1

        # look up entitlements
        cu.execute("""
        SELECT UserGroups.userGroupId, UserGroups.accept_flags
        FROM Entitlements
        JOIN EntitlementAccessMap USING (entGroupId)
        JOIN UserGroups USING (userGroupId)
        WHERE entitlement=?
        """, entitlement)

        roleIds = dict((x[0], deps.ThawFlavor(x[1])) for x in cu)
        if self.entCheckUrl:
            # cacheEntry is still set from the cache check above
            self.cache[cacheEntry] = (roleIds,
                                      time.time() + entitlementTimeout,
                                      entitlementRetry)

        return roleIds
Beispiel #38
0
    def testOwnership(self):
        f = files.RegularFile(None)
        f.inode.perms.set(0604)
        f.inode.mtime.set(0100)
        f.inode.owner.set("daemon")
        f.inode.group.set("uucp")
        # to make sure that referenced names "exist"
        files.userCache.nameCache['daemon'] = 2
        files.userCache.nameCache['uucp'] = 10
        files.groupCache.nameCache['uucp'] = 14

        s = "hello world"
        contents = filecontents.FromString(s)
        f.contents = files.RegularFileStream()
        f.contents.size.set(len(s))
        f.contents.sha1.set(sha1helper.sha1String(s))

        f.flags.set(0)

        # and setuid root
        fr = files.RegularFile(None)
        fr.inode.perms.set(06755)
        fr.inode.mtime.set(0100)
        fr.inode.owner.set("root")
        fr.inode.group.set("root")
        fr.contents = files.RegularFileStream()
        fr.contents.size.set(len(s))
        fr.contents.sha1.set(sha1helper.sha1String(s))
        fr.flags.set(0)

        # and unwriteable
        fo = files.RegularFile(None)
        fo.inode.perms.set(0444)
        fo.inode.mtime.set(0100)
        fo.inode.owner.set("root")
        fo.inode.group.set("root")
        fo.contents = files.RegularFileStream()
        fo.contents.size.set(len(s))
        fo.contents.sha1.set(sha1helper.sha1String(s))
        fo.flags.set(0)

        # and secret
        fs = files.RegularFile(None)
        fs.inode.perms.set(0400)
        fs.inode.mtime.set(0100)
        fs.inode.owner.set("root")
        fs.inode.group.set("root")
        fs.contents = files.RegularFileStream()
        fs.contents.size.set(len(s))
        fs.contents.sha1.set(sha1helper.sha1String(s))
        fs.flags.set(0)

        f2 = f.copy()
        assert(f == f2)
        d = tempfile.mkdtemp()

        # before we mimic root, test a non-root of setu/gid file
        pr = d+"/setuid"
        fr.restore(contents, d, pr)
        assert not os.stat(pr).st_mode & 04000

        try:
            self.mimicRoot()
            p = d + "/file"
            f.restore(contents, d, p)
            assert self.compareChownLog([ (p, 2, 14) ])
            self.chownLog = []

            f.inode.owner.set("rootroot")
            self.logCheck(f.restore, (contents, d, p),
                          "warning: user rootroot does not exist - using root")
            assert self.compareChownLog([ (p, 0, 14) ])
            self.chownLog = []

            f.inode.owner.set("uucp")
            f.inode.group.set("grpgrp")
            self.logCheck(f.restore, (contents, d, p),
                          "warning: group grpgrp does not exist - using root")
            assert self.compareChownLog([ (p, 10, 0) ])

            self.chmodLog = []
            pr = d+"/setuid"
            fr.restore(contents, d, pr)
            assert self.compareChmodLog([ (pr, 06755) ])
            assert os.stat(pr).st_mode & 07777 == 06755

            self.chmodLog = []
            po = d+"/unwriteable"
            fo.restore(contents, d, po)
            assert self.compareChmodLog([ (po, 0444) ])
            assert os.stat(po).st_mode & 07777 == 0444

            self.chmodLog = []
            ps = d+"/secret"
            fs.restore(contents, d, ps)
            assert self.compareChmodLog([ (ps, 0400) ])
            assert os.stat(ps).st_mode & 07777 == 0400
            self.chmodLog = []
        finally:
            self.realRoot()
            shutil.rmtree(d)
Beispiel #39
0
    def getAuthorizedRoles(self, cu, serverName, remoteIp, entitlementClass,
                           entitlement):
        """
        Given an entitlement, return the list of roles that the
        credentials authorize.
        """
        cacheEntry = sha1helper.sha1String(
            "%s%s%s" % (serverName, entitlementClass, entitlement))
        roleIds, timeout, autoRetry = \
                self.cache.get(cacheEntry, (None, None, None))
        if (timeout is not None) and time.time() < timeout:
            return roleIds
        elif (timeout is not None):
            del self.cache[cacheEntry]
            if autoRetry is not True:
                raise errors.EntitlementTimeout([entitlement])

        if self.entCheckUrl:
            if entitlementClass is not None:
                url = "%s?server=%s;class=%s;key=%s" \
                        % (self.entCheckUrl, urllib.quote(serverName),
                           urllib.quote(entitlementClass),
                           urllib.quote(entitlement))
            else:
                url = "%s?server=%s;key=%s" \
                        % (self.entCheckUrl, urllib.quote(serverName),
                           urllib.quote(entitlement))

            if remoteIp is not None:
                url += ';remote_ip=%s' % urllib.quote(remoteIp)

            try:
                f = urllib2.urlopen(url)
                xmlResponse = f.read()
            except Exception:
                return set()

            p = conarycfg.EntitlementParser()

            try:
                p.parse(xmlResponse)
            except:
                return set()

            if p['server'] != serverName:
                return set()

            entitlementClass = p['class']
            entitlement = p['key']
            entitlementRetry = p['retry']
            if p['timeout'] is None:
                entitlementTimeout = self.cacheTimeout
            else:
                entitlementTimeout = p['timeout']

            if entitlementTimeout is None:
                entitlementTimeout = -1

        # look up entitlements
        cu.execute(
            """
        SELECT UserGroups.userGroupId, UserGroups.accept_flags
        FROM Entitlements
        JOIN EntitlementAccessMap USING (entGroupId)
        JOIN UserGroups USING (userGroupId)
        WHERE entitlement=?
        """, entitlement)

        roleIds = dict((x[0], deps.ThawFlavor(x[1])) for x in cu)
        if self.entCheckUrl:
            # cacheEntry is still set from the cache check above
            self.cache[cacheEntry] = (roleIds,
                                      time.time() + entitlementTimeout,
                                      entitlementRetry)

        return roleIds
Beispiel #40
0
    def testChangeSetDumpOffset(self):
        """Stress test offset arg to dumpIter"""
        # Make a changeset with one regular file
        cs = changeset.ChangeSet()
        pathId = '0' * 16
        fileId = '0' * 20
        contents = 'contents'
        store = datastore.FlatDataStore(self.workDir)
        sha1 = sha1helper.sha1String(contents)
        store.addFile(StringIO(contents), sha1)
        rawFile = store.openRawFile(sha1)
        rawSize = os.fstat(rawFile.fileno()).st_size
        contObj = filecontents.CompressedFromDataStore(store, sha1)
        cs.addFileContents(pathId,
                           fileId,
                           changeset.ChangedFileTypes.file,
                           contObj,
                           cfgFile=False,
                           compressed=True)

        # Test dumping a fully populated changeset with every possible resume
        # point
        path = os.path.join(self.workDir, 'full.ccs')
        size = cs.writeToFile(path)
        expected = open(path).read()
        self.assertEqual(len(expected), size)
        fc = filecontainer.FileContainer(
            util.ExtendedFile(path, 'r', buffering=False))

        def noop(name, tag, size, subfile):
            assert tag[2:] != changeset.ChangedFileTypes.refr[4:]
            return tag, size, subfile

        for offset in range(size + 1):
            fc.reset()
            actual = ''.join(fc.dumpIter(noop, (), offset))
            self.assertEqual(actual, expected[offset:])

        # Test dumping a changeset with contents stripped out
        path = os.path.join(self.workDir, 'stubby.ccs')
        size2 = cs.writeToFile(path, withReferences=True)
        self.assertEqual(size2, size)
        fc = filecontainer.FileContainer(
            util.ExtendedFile(path, 'r', buffering=False))
        expect_reference = '%s %d' % (sha1.encode('hex'), rawSize)

        def addfile(name, tag, size, subfile, dummy):
            self.assertEqual(dummy, 'dummy')
            if name == 'CONARYCHANGESET':
                return tag, size, subfile
            elif name == pathId + fileId:
                self.assertEqual(tag[2:], changeset.ChangedFileTypes.refr[4:])
                self.assertEqual(subfile.read(), expect_reference)
                tag = tag[0:2] + changeset.ChangedFileTypes.file[4:]
                rawFile.seek(0)
                return tag, rawSize, rawFile
            else:
                assert False

        for offset in range(size + 1):
            fc.reset()
            actual = ''.join(fc.dumpIter(addfile, ('dummy', ), offset))
            self.assertEqual(actual, expected[offset:])
Beispiel #41
0
def confirmString():
    """
    Generate a confirmation string
    """
    hash = sha1helper.sha1String(str(random.random()) + str(time.time()))
    return sha1helper.sha1ToString(hash)
Beispiel #42
0
 def hashTrove(self, trove):
     return sha1helper.sha1ToString(
             sha1helper.sha1String('%s %s=%s[%s]' % (trove.jobId,
                   trove.getName(), trove.getVersion(), trove.getFlavor())))