Пример #1
0
 def _keys(self, items, args, kwargs):
     if kwargs is None:
         kwargs = ()
     else:
         kwargs = tuple(sorted(kwargs.items()))
     common = digestlib.sha1(str(args + kwargs)).digest()
     return [digestlib.sha1(common + str(x)).hexdigest() for x in items]
Пример #2
0
    def _create(self, certs, purpose, desc, issuer=None, common=None):
        """Create and store one certificate."""
        if purpose in certs:
            return
        cu = self.db.cursor()

        subject = X509.X509_Name()
        subject.O = desc
        subject.OU = 'Created at ' + time.strftime('%F %T%z')
        if common is not None:
            subject.CN = common

        issuer_pkey = issuer_subject = issuer_fingerprint = serial = None
        if issuer is None:
            isCA = True
        else:
            isCA = False
            issuer_x509 = X509.load_cert_string(issuer[0])
            issuer_pkey = EVP.load_key_string(issuer[1])
            issuer_subject = issuer_x509.get_subject()
            issuer_fingerprint = digestlib.sha1(
                    issuer_x509.as_der()).hexdigest()

            cu.execute("""UPDATE pki_certificates
                SET ca_serial_index = ca_serial_index + 1
                WHERE fingerprint = %s
                RETURNING ca_serial_index
                """, (issuer_fingerprint,))
            serial, = cu.fetchone()

        # Create certificates with a 'not before' date 1 day in the past, just
        # in case initial setup sets the clock backwards.
        rsa, x509 = gencert.new_cert(KEY_LENGTH, subject, EXPIRY,
                issuer=issuer_subject, issuer_evp=issuer_pkey, isCA=isCA,
                serial=serial, timestamp_offset=-86400)

        fingerprint = digestlib.sha1(x509.as_der()).hexdigest()
        pkey_pem = rsa.as_pem(None)
        x509_pem = x509.as_pem()

        cu.execute("""INSERT INTO pki_certificates (
                fingerprint, purpose, is_ca, x509_pem, pkey_pem,
                issuer_fingerprint, ca_serial_index, time_issued, time_expired
                )
            VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)""",
            (fingerprint, purpose, isCA, x509_pem, pkey_pem,
                issuer_fingerprint, 0,
                str(x509.get_not_before()), str(x509.get_not_after()),
                ))

        log.info("Created certificate %s for purpose %r%s%s",
                fingerprint, purpose,
                (issuer_fingerprint and (" (issuer %s)" % issuer_fingerprint)
                    or ""),
                self.dry_run and " (dry run)" or "")

        certs[purpose] = x509_pem, pkey_pem
Пример #3
0
    def incrementCount(self, hash, fileObj=None, precompressed=True):
        """
        Increments the count by one.  If it becomes one (the file is
        new), the contents of fileObj are stored into that path.
        """
        if len(hash) != 40:
            hash = sha1helper.sha1ToString(hash)
        cu = self.db.cursor()
        cu.execute("SELECT COUNT(*) FROM DataStore WHERE hash=?", hash)
        exists = cu.next()[0]

        if exists:
            cu.execute("UPDATE DataStore SET count=count+1 WHERE hash=?", hash)
        else:
            if precompressed:
                # it's precompressed as a gzip stream, and we need a
                # zlib stream. just decompress it.
                gzObj = gzip.GzipFile(mode="r", fileobj=fileObj)
                rawData = gzObj.read()
                del gzObj
            else:
                rawData = fileObj.read()

            data = zlib.compress(rawData)
            digest = digestlib.sha1()
            digest.update(rawData)
            if digest.hexdigest() != hash:
                raise errors.IntegrityError

            cu.execute("INSERT INTO DataStore VALUES(?, 1, ?)", hash, data)
Пример #4
0
    def _writeFile(cls, fileObj, outFds, precompressed, computeSha1):
        if precompressed and hasattr(fileObj, '_fdInfo'):
            (fd, start, size) = fileObj._fdInfo()
            pid = os.getpid()
            realHash = digest_uncompress.sha1Copy((fd, start, size), outFds)
            for x in outFds:
                cls._fchmod(x)
                os.close(x)

            return realHash
        else:
            for fd in outFds:
                outFileObj = os.fdopen(fd, "w")
                contentSha1 = digestlib.sha1()
                if precompressed and computeSha1:
                    tee = Tee(fileObj, outFileObj)
                    uncompObj = gzip.GzipFile(mode = "r", fileobj = tee)
                    s = uncompObj.read(128 * 1024)
                    while s:
                        contentSha1.update(s)
                        s = uncompObj.read(128 * 1024)
                    uncompObj.close()
                elif precompressed:
                    util.copyfileobj(fileObj, outFileObj)
                else:
                    dest = gzip.GzipFile(mode = "w", fileobj = outFileObj)
                    util.copyfileobj(fileObj, dest, digest = contentSha1)
                    dest.close()

                # this closes tmpFd for us
                cls._fchmod(fd)
                outFileObj.close()
                fileObj.seek(0)

            return contentSha1.digest()
Пример #5
0
    def restore(self,
                fileContents,
                root,
                target,
                journal=None,
                sha1=None,
                nameLookup=True,
                **kwargs):

        keepTempfile = kwargs.get('keepTempfile', False)
        destTarget = target

        if fileContents is not None:
            # this is first to let us copy the contents of a file
            # onto itself; the unlink helps that to work
            src = fileContents.get()
            inFd = None

            if fileContents.isCompressed() and hasattr(src, '_fdInfo'):
                # inFd is None if we can't figure this information out
                # (for _LazyFile for instance)
                (inFd, inStart, inSize) = src._fdInfo()

            path, name = os.path.split(target)
            if not os.path.isdir(path):
                util.mkdirChain(path)

            # Uncompress to a temporary file, using the accelerated
            # implementation if possible.
            if inFd is not None and util.sha1Uncompress is not None:
                actualSha1, tmpname = util.sha1Uncompress(
                    inFd, inStart, inSize, path, name)
            else:
                if fileContents.isCompressed():
                    src = gzip.GzipFile(mode='r', fileobj=src)
                tmpfd, tmpname = tempfile.mkstemp(name, '.ct', path)
                try:
                    d = digestlib.sha1()
                    f = os.fdopen(tmpfd, 'w')
                    util.copyfileobj(src, f, digest=d)
                    f.close()
                    actualSha1 = d.digest()
                except:
                    os.unlink(tmpname)
                    raise

            if keepTempfile:
                # Make a hardlink "copy" for the caller to use
                destTarget = tmpname + '.ptr'
                os.link(tmpname, destTarget)
            try:
                os.rename(tmpname, target)
            except OSError, err:
                if err.args[0] != errno.EISDIR:
                    raise
                os.rmdir(target)
                os.rename(tmpname, target)

            if (sha1 is not None and sha1 != actualSha1):
                raise Sha1Exception(target)
Пример #6
0
    def incrementCount(self, hash, fileObj = None, precompressed = True):
        """
        Increments the count by one.  If it becomes one (the file is
        new), the contents of fileObj are stored into that path.
        """
        if len(hash) != 40:
            hash = sha1helper.sha1ToString(hash)
        cu = self.db.cursor()
        cu.execute("SELECT COUNT(*) FROM DataStore WHERE hash=?", hash)
        exists = cu.next()[0]

        if exists:
            cu.execute("UPDATE DataStore SET count=count+1 WHERE hash=?",
                       hash)
        else:
            if precompressed:
                # it's precompressed as a gzip stream, and we need a
                # zlib stream. just decompress it.
                gzObj = gzip.GzipFile(mode = "r", fileobj = fileObj)
                rawData = gzObj.read()
                del gzObj
            else:
                rawData = fileObj.read()

            data = zlib.compress(rawData)
            digest = digestlib.sha1()
            digest.update(rawData)
            if digest.hexdigest() != hash:
                raise errors.IntegrityError

            cu.execute("INSERT INTO DataStore VALUES(?, 1, ?)",
                       hash, data)
Пример #7
0
    def _writeFile(cls, fileObj, outFds, precompressed, computeSha1):
        if precompressed and hasattr(fileObj, '_fdInfo'):
            (fd, start, size) = fileObj._fdInfo()
            pid = os.getpid()
            realHash = digest_uncompress.sha1Copy((fd, start, size), outFds)
            for x in outFds:
                cls._fchmod(x)
                os.close(x)

            return realHash
        else:
            for fd in outFds:
                outFileObj = os.fdopen(fd, "w")
                contentSha1 = digestlib.sha1()
                if precompressed and computeSha1:
                    tee = Tee(fileObj, outFileObj)
                    uncompObj = gzip.GzipFile(mode="r", fileobj=tee)
                    s = uncompObj.read(128 * 1024)
                    while s:
                        contentSha1.update(s)
                        s = uncompObj.read(128 * 1024)
                    uncompObj.close()
                elif precompressed:
                    util.copyfileobj(fileObj, outFileObj)
                else:
                    dest = gzip.GzipFile(mode="w", fileobj=outFileObj)
                    util.copyfileobj(fileObj, dest, digest=contentSha1)
                    dest.close()

                # this closes tmpFd for us
                cls._fchmod(fd)
                outFileObj.close()
                fileObj.seek(0)

            return contentSha1.digest()
Пример #8
0
    def apply(self,
              justDatabase=False,
              noScripts=False,
              capsuleChangeSet=None):
        if capsuleChangeSet:
            # Previous jobs will have moved the pointer in the auxilliary
            # changeset, so reset it at the start of each job.
            capsuleChangeSet.reset()
        tmpDir = os.path.join(self.root, 'var/tmp')
        if not os.path.isdir(tmpDir):
            # For empty roots or roots that are not systems (e.g. source
            # checkouts), just put capsules in the root directory.
            tmpDir = self.root
        fileDict = {}
        for kind, obj in sorted(self.capsuleClasses.items()):
            fileDict.update(
                dict(((x[0], x[2], x[3]), x[1]) for x in obj._filesNeeded()))

        try:
            for ((pathId, fileId, sha1), path) in sorted(fileDict.items()):
                tmpfd, tmpname = tempfile.mkstemp(dir=tmpDir,
                                                  prefix=path,
                                                  suffix='.conary')
                fType, fContents = self.changeSet.getFileContents(
                    pathId, fileId)
                if (fType == changeset.ChangedFileTypes.hldr):
                    if (capsuleChangeSet):
                        try:
                            result = capsuleChangeSet.getFileContents(
                                pathId, fileId)
                            fObj = result[1].get()
                        except KeyError:
                            raise errors.MissingRollbackCapsule(
                                'Cannot find '
                                'RPM %s to perform local rollback' % path)

                else:
                    fObj = fContents.get()

                d = digestlib.sha1()
                util.copyfileobj(fObj, os.fdopen(tmpfd, "w"), digest=d)
                actualSha1 = d.digest()
                if actualSha1 != sha1:
                    raise files.Sha1Exception(path)

                # tmpfd is closed when the file object created by os.fdopen
                # disappears
                fileDict[(pathId, fileId)] = tmpname

            for kind, obj in sorted(self.capsuleClasses.items()):
                obj.apply(fileDict,
                          justDatabase=justDatabase,
                          noScripts=noScripts)
        finally:
            for tmpPath in fileDict.values():
                try:
                    os.unlink(tmpPath)
                except:
                    pass
Пример #9
0
 def oldtest(self):
     cpioPath = self._createCpio()
     sha1sum = digestlib.sha1(file(cpioPath).read()).hexdigest()
     resultFilePath = os.path.join(self.workDir, 'result.cpio')
     # Use a variety of sizes, to try to come up with different chunking
     # solutions
     for bufferSize in [1001, 1003, 3001]:
         f = file(resultFilePath, "w")
         src = cpiostream.CpioStream(file(cpioPath))
         while 1:
             buf = src.read(bufferSize)
             if not buf:
                 break
             f.write(buf)
         f.close()
         nsha1sum = digestlib.sha1(file(resultFilePath).read()).hexdigest()
         self.assertEqual(nsha1sum, sha1sum)
Пример #10
0
    def testExpansion(self):
        cpioPath = self._createCpio()
        target = self.workDir + '/root'
        expander = cpiostream.CpioExploder(file(cpioPath))
        expander.explode(target)
        sha1sum = digestlib.sha1(file(
            target + '/usr/lib/perl5/5.10.0/Archive/Tar.pm').read()).hexdigest()
        self.assertEquals(sha1sum, 'cbe78d8a0d26a86436e4fc56f8581ffd3db4bd83')

        shutil.rmtree(self.workDir)
        os.mkdir(self.workDir)

        cpioPath = self._createCpio(rpmName = 'simple-1.1-1.i386.rpm')
        expander = cpiostream.CpioExploder(file(cpioPath))
        expander.explode(target)
        assert(os.path.isdir(target + '/dir'))
        sha1sum = digestlib.sha1(file(target + '/normal').read()).hexdigest()
        self.assertEquals(sha1sum, '5662cdf7d378e7505362c59239f73107b6edf1d3')
Пример #11
0
 def getFingerprint(self):
     d = digestlib.sha1()
     for fplist in [
             self.jobFingerprints,
             self.bootstrapFingerprints,
             self.crossFingerprints,
             ]:
         d.update(''.join(sorted(fplist)) + '\0\0')
     d.update('\0'.join(x.freeze() for x in sorted(self.rpmRequirements)))
     return d.digest()
Пример #12
0
 def getFingerprint(self):
     d = digestlib.sha1()
     for fplist in [
             self.jobFingerprints,
             self.bootstrapFingerprints,
             self.crossFingerprints,
     ]:
         d.update(''.join(sorted(fplist)) + '\0\0')
     d.update('\0'.join(x.freeze() for x in sorted(self.rpmRequirements)))
     return d.digest()
Пример #13
0
    def testPrelink(self):
        user = pwd.getpwuid(os.getuid()).pw_name
        group = grp.getgrgid(os.getgid()).gr_name
        archivePath = resources.get_archive()
        self.addComponent('test:foo=1',
                          fileContents=[
                              ('/prelinktest',
                               rephelp.RegularFile(
                                   contents=open(archivePath + '/prelinktest'),
                                   owner=user,
                                   group=group,
                                   mode=0755)),
                              ('/prelinktest-orig',
                               rephelp.RegularFile(
                                   contents=open(archivePath + '/prelinktest'),
                                   owner=user,
                                   group=group,
                                   mode=0755))
                          ])
        self.updatePkg('test:foo=1')

        db = database.Database(self.rootDir, self.cfg.dbPath)
        rc, str = self.captureOutput(verify.verify, ['test:foo'], db, self.cfg)
        self.assertEquals(str, '')

        binary = self.rootDir + '/prelinktest'
        # the test suite can't set the mtime on the file; we'll preserve
        # it ourself
        sb = os.stat(binary)
        os.system("cp %s/prelinktest-prelinked %s" % (archivePath, binary))
        os.utime(binary, (sb.st_atime, sb.st_mtime))

        self.assertEquals(files.PRELINK_CMD, ('/usr/sbin/prelink', ))
        oldCmd = files.PRELINK_CMD
        try:
            files.PRELINK_CMD = (archivePath + '/prelink', )
            files._havePrelink = None
            rc, str = self.captureOutput(verify.verify, ['test:foo'],
                                         db,
                                         self.cfg,
                                         forceHashCheck=True)
            self.assertEquals(str, '')

            # Also verify a path used by addCapsule
            f, nlinks, devino = files.FileFromFilesystem(binary,
                                                         pathId='\0' * 16,
                                                         inodeInfo=True)
            self.assertEquals(
                digestlib.sha1(open(binary).read()).hexdigest(),
                '1114f3a978b60d76d7618dc43aaf207bc999f997')
            self.assertEquals(f.contents.sha1().encode('hex'),
                              '23ad3a2c940a30809b68a5b8a13392196004efab')
        finally:
            files.PRELINK_CMD = oldCmd
            files._havePrelink = None
Пример #14
0
    def restore(self, fileContents, root, target, journal=None, sha1 = None,
                nameLookup=True, **kwargs):

        keepTempfile = kwargs.get('keepTempfile', False)
        destTarget = target

        if fileContents is not None:
            # this is first to let us copy the contents of a file
            # onto itself; the unlink helps that to work
            src = fileContents.get()
            inFd = None

            if fileContents.isCompressed() and hasattr(src, '_fdInfo'):
                # inFd is None if we can't figure this information out
                # (for _LazyFile for instance)
                (inFd, inStart, inSize) = src._fdInfo()

            path, name = os.path.split(target)
            if not os.path.isdir(path):
                util.mkdirChain(path)

            # Uncompress to a temporary file, using the accelerated
            # implementation if possible.
            if inFd is not None and util.sha1Uncompress is not None:
                actualSha1, tmpname = util.sha1Uncompress(
                        inFd, inStart, inSize, path, name)
            else:
                if fileContents.isCompressed():
                    src = gzip.GzipFile(mode='r', fileobj=src)
                tmpfd, tmpname = tempfile.mkstemp(name, '.ct', path)
                try:
                    d = digestlib.sha1()
                    f = os.fdopen(tmpfd, 'w')
                    util.copyfileobj(src, f, digest = d)
                    f.close()
                    actualSha1 = d.digest()
                except:
                    os.unlink(tmpname)
                    raise

            if keepTempfile:
                # Make a hardlink "copy" for the caller to use
                destTarget = tmpname + '.ptr'
                os.link(tmpname, destTarget)
            try:
                os.rename(tmpname, target)
            except OSError, err:
                if err.args[0] != errno.EISDIR:
                    raise
                os.rmdir(target)
                os.rename(tmpname, target)

            if (sha1 is not None and sha1 != actualSha1):
                raise Sha1Exception(target)
Пример #15
0
 def getElementTree(self, *args, **kwargs):
     eltree = xmllib.BaseNode.getElementTree(self, *args, **kwargs)
     if '_xmlNodeHash' not in self.__slots__ or self._xmlNodeHash is not None:
         return eltree
     # Compute the checksum
     csum = digestlib.sha1()
     csum.update(xmllib.etree.tostring(eltree, pretty_print = False,
                 xml_declaration = False, encoding = 'UTF-8'))
     self._xmlNodeHash = csum.hexdigest()
     eltree.attrib['xmlNodeHash'] = self._xmlNodeHash
     return eltree
Пример #16
0
    def apply(self, justDatabase = False, noScripts = False,
              capsuleChangeSet = None):
        if capsuleChangeSet:
            # Previous jobs will have moved the pointer in the auxilliary
            # changeset, so reset it at the start of each job.
            capsuleChangeSet.reset()
        tmpDir = os.path.join(self.root, 'var/tmp')
        if not os.path.isdir(tmpDir):
            # For empty roots or roots that are not systems (e.g. source
            # checkouts), just put capsules in the root directory.
            tmpDir = self.root
        fileDict = {}
        for kind, obj in sorted(self.capsuleClasses.items()):
            fileDict.update(
                dict(((x[0], x[2], x[3]), x[1]) for x in obj._filesNeeded()))

        try:
            for ((pathId, fileId, sha1), path) in sorted(fileDict.items()):
                tmpfd, tmpname = tempfile.mkstemp(dir=tmpDir, prefix=path,
                        suffix='.conary')
                fType, fContents = self.changeSet.getFileContents(pathId,
                                                                  fileId)
                if (fType == changeset.ChangedFileTypes.hldr):
                    if (capsuleChangeSet):
                        try:
                            result = capsuleChangeSet.getFileContents(pathId,
                                                                      fileId)
                            fObj = result[1].get()
                        except KeyError:
                            raise errors.MissingRollbackCapsule('Cannot find '
                                'RPM %s to perform local rollback' % path)

                else:
                    fObj = fContents.get()

                d = digestlib.sha1()
                util.copyfileobj(fObj, os.fdopen(tmpfd, "w"), digest = d)
                actualSha1 = d.digest()
                if actualSha1 != sha1:
                    raise files.Sha1Exception(path)

                # tmpfd is closed when the file object created by os.fdopen
                # disappears
                fileDict[(pathId, fileId)] = tmpname

            for kind, obj in sorted(self.capsuleClasses.items()):
                obj.apply(fileDict, justDatabase = justDatabase, noScripts = noScripts)
        finally:
            for tmpPath in fileDict.values():
                try:
                    os.unlink(tmpPath)
                except:
                    pass
Пример #17
0
 def getJobHash(self):
     # Disqualify anything other than a simple, isolated build.
     if self.inCycle or self.builtTroves or self.crossTroves:
         return None
     # Hash all the inputs to the resolver so that the result can be cached.
     inputs = [
             '\0'.join(sorted(self.trove.getBuildRequirements())),
             '\0'.join(sorted(self.trove.getCrossRequirements())),
             '\0'.join(str(x) for x in self.buildCfg.flavor),
             '\1'.join('\0'.join(sorted(str(y) for y in x))
                 for x in self.buildCfg.resolveTroveTups),
             ]
     return digestlib.sha1('\2'.join(inputs)).hexdigest()
Пример #18
0
 def getJobHash(self):
     # Disqualify anything other than a simple, isolated build.
     if self.inCycle or self.builtTroves or self.crossTroves:
         return None
     # Hash all the inputs to the resolver so that the result can be cached.
     inputs = [
             '\0'.join(sorted(self.trove.getBuildRequirements())),
             '\0'.join(sorted(self.trove.getCrossRequirements())),
             '\0'.join(str(x) for x in self.buildCfg.flavor),
             '\1'.join('\0'.join(sorted(str(y) for y in x))
                 for x in self.buildCfg.resolveTroveTups),
             ]
     return digestlib.sha1('\2'.join(inputs)).hexdigest()
Пример #19
0
    def testPrelink(self):
        user = pwd.getpwuid(os.getuid()).pw_name
        group = grp.getgrgid(os.getgid()).gr_name
        archivePath = resources.get_archive()
        self.addComponent(
            "test:foo=1",
            fileContents=[
                (
                    "/prelinktest",
                    rephelp.RegularFile(
                        contents=open(archivePath + "/prelinktest"), owner=user, group=group, mode=0755
                    ),
                ),
                (
                    "/prelinktest-orig",
                    rephelp.RegularFile(
                        contents=open(archivePath + "/prelinktest"), owner=user, group=group, mode=0755
                    ),
                ),
            ],
        )
        self.updatePkg("test:foo=1")

        db = database.Database(self.rootDir, self.cfg.dbPath)
        rc, str = self.captureOutput(verify.verify, ["test:foo"], db, self.cfg)
        self.assertEquals(str, "")

        binary = self.rootDir + "/prelinktest"
        # the test suite can't set the mtime on the file; we'll preserve
        # it ourself
        sb = os.stat(binary)
        os.system("cp %s/prelinktest-prelinked %s" % (archivePath, binary))
        os.utime(binary, (sb.st_atime, sb.st_mtime))

        self.assertEquals(files.PRELINK_CMD, ("/usr/sbin/prelink",))
        oldCmd = files.PRELINK_CMD
        try:
            files.PRELINK_CMD = (archivePath + "/prelink",)
            files._havePrelink = None
            rc, str = self.captureOutput(verify.verify, ["test:foo"], db, self.cfg, forceHashCheck=True)
            self.assertEquals(str, "")

            # Also verify a path used by addCapsule
            f, nlinks, devino = files.FileFromFilesystem(binary, pathId="\0" * 16, inodeInfo=True)
            self.assertEquals(
                digestlib.sha1(open(binary).read()).hexdigest(), "1114f3a978b60d76d7618dc43aaf207bc999f997"
            )
            self.assertEquals(f.contents.sha1().encode("hex"), "23ad3a2c940a30809b68a5b8a13392196004efab")
        finally:
            files.PRELINK_CMD = oldCmd
            files._havePrelink = None
Пример #20
0
    def _generate(self):
        self._log.info("Generating template %s from trove %s=%s[%s]",
                self._hash, *self._troveTup)

        self._installContents(self._contentsDir, [self._troveTup])

        # Copy "unified" directly into the output.
        os.mkdir(self._outputDir)
        util.copytree(self._contentsDir + '/unified', self._outputDir + '/')

        # Process the MANIFEST file.
        for line in open(self._contentsDir + '/MANIFEST'):
            line = line.rstrip()
            if not line or line[0] == '#':
                continue
            args = line.rstrip().split(',')
            command = args.pop(0)
            commandFunc = getattr(self, '_DO_' + command, None)
            if not commandFunc:
                raise RuntimeError("Unknown command %r in MANIFEST"
                        % (command,))
            commandFunc(args)

        # Archive the results.
        digest = digestlib.sha1()
        outFile = util.AtomicFile(self._outputPath)

        proc = call(['/bin/tar', '-cC', self._outputDir, '.'],
                stdout=subprocess.PIPE, captureOutput=False, wait=False)
        util.copyfileobj(proc.stdout, outFile, digest=digest)
        proc.wait()

        # Write metadata.
        metaFile = util.AtomicFile(self._outputPath + '.metadata')
        cPickle.dump({
            'sha1sum': digest.hexdigest(),
            'trovespec': '%s=%s[%s]' % self._troveTup,
            'kernel': (self._kernelTup and ('%s=%s[%s]' % self._kernelTup)
                or '<none>'),
            # Right now, we are going to hardcode this to an older version
            # of Netclient Protocol to hint to the Conary installed on the
            # jobslave to generate old filecontainers that are compatible
            # with all versions of Conary. (See RBL-1552.)
            'netclient_protocol_version': '38',
            }, metaFile)

        metaFile.commit()
        outFile.commit()

        self._log.info("Template %s created", self._hash)
Пример #21
0
 def getElementTree(self, *args, **kwargs):
     eltree = xmllib.BaseNode.getElementTree(self, *args, **kwargs)
     if '_xmlNodeHash' not in self.__slots__ or self._xmlNodeHash is not None:
         return eltree
     # Compute the checksum
     csum = digestlib.sha1()
     csum.update(
         xmllib.etree.tostring(eltree,
                               pretty_print=False,
                               xml_declaration=False,
                               encoding='UTF-8'))
     self._xmlNodeHash = csum.hexdigest()
     eltree.attrib['xmlNodeHash'] = self._xmlNodeHash
     return eltree
Пример #22
0
    def read(self):
        instanceId = self.ec2InstanceId
        if instanceId is not None:
            sha = digestlib.sha1(instanceId)
            self._uuid = GeneratedUuid.asString(sha.digest()[:16])
        else:
            dmidecodeUuid = self._getDmidecodeUuid().lower()
            self._uuid = dmidecodeUuid

        if os.path.exists(self.uuidFile):
            persistedUuid = self._readFile(self.uuidFile)
            if persistedUuid.lower() != self._uuid:
                self._writeDmidecodeUuid(self._uuid)
        else:
            self._writeDmidecodeUuid(self._uuid)
Пример #23
0
    def testCopyFileObjDigest(self):
        tmpDir = tempfile.mkdtemp()
        try:
            buf = 'test data'

            # rpepare source and destination files
            srcFn = os.path.join(tmpDir, 'srcfile')
            destFn = os.path.join(tmpDir, 'destfile')
            open(srcFn, 'w').write(buf)
            src = open(srcFn)
            dest = open(destFn, 'w')

            # filter the digest through copyfileobj
            sha1 = digestlib.sha1()
            util.copyfileobj(src, dest, digest=sha1, sizeLimit=len(buf))
            res = sha1.hexdigest()

            # now compare the resulting hash to reference data
            sha1 = digestlib.sha1()
            sha1.update(buf)
            ref = sha1.hexdigest()
            self.assertEquals(ref, res)
        finally:
            util.rmtree(tmpDir)
Пример #24
0
    def testCopyFileObjDigest(self):
        tmpDir = tempfile.mkdtemp()
        try:
            buf = "test data"

            # rpepare source and destination files
            srcFn = os.path.join(tmpDir, "srcfile")
            destFn = os.path.join(tmpDir, "destfile")
            open(srcFn, "w").write(buf)
            src = open(srcFn)
            dest = open(destFn, "w")

            # filter the digest through copyfileobj
            sha1 = digestlib.sha1()
            util.copyfileobj(src, dest, digest=sha1, sizeLimit=len(buf))
            res = sha1.hexdigest()

            # now compare the resulting hash to reference data
            sha1 = digestlib.sha1()
            sha1.update(buf)
            ref = sha1.hexdigest()
            self.assertEquals(ref, res)
        finally:
            util.rmtree(tmpDir)
Пример #25
0
def sha1FileBin(path):
    oldmode = None
    mode = os.lstat(path)[stat.ST_MODE]
    if (mode & 0400) != 0400:
        oldmode = mode
        os.chmod(path, mode | 0400)

    fd = os.open(path, os.O_RDONLY)
    if oldmode is not None:
        os.chmod(path, oldmode)

    m = digestlib.sha1()
    buf = os.read(fd, 40960)
    while len(buf):
        m.update(buf)
        buf = os.read(fd, 40960)
    os.close(fd)

    return m.digest()
Пример #26
0
    def setImageFiles(self, db, hostname, imageId, imageFiles=None):
        if imageFiles is None:
            digest = sha1()
            digest.update(str(imageId))
            digest = digest.hexdigest()

            imageFiles = models.ImageFileList(files=[models.ImageFile(
                title='Image File %s' % imageId,
                size=1024 * imageId,
                sha1=digest,
                fileName='imagefile_%s.iso' % imageId,
                )])

        for item in imageFiles.files:
            path = '%s/%s/%s/%s' % (self.mintCfg.imagesPath, hostname, imageId,
                    item.fileName)
            util.mkdirChain(os.path.dirname(path))
            open(path, 'w').write('image data')
        db.imageMgr.setFilesForImage(hostname, imageId, imageFiles)
Пример #27
0
def sha1FileBin(path):
    oldmode = None
    mode = os.lstat(path)[stat.ST_MODE]
    if (mode & 0400) != 0400:
        oldmode = mode
        os.chmod(path, mode | 0400)

    fd = os.open(path, os.O_RDONLY)
    if oldmode is not None:
        os.chmod(path, oldmode)

    m = digestlib.sha1()
    buf = os.read(fd, 40960)
    while len(buf):
        m.update(buf)
        buf = os.read(fd, 40960)
    os.close(fd)

    return m.digest()
Пример #28
0
    def get(self, pathId):
        f = self.fileClass(pathId)
        f.inode = files.InodeStream(self.perms & 07777, self.mtime, self.owner, self.group)
        self._touchupFileStream(f)
        if self.needSha1:
            sha1 = digestlib.sha1()
            contents = self.contents.get()
            devnull = open(os.devnull, "w")
            util.copyfileobj(contents, devnull, digest=sha1)
            devnull.close()

            f.contents = files.RegularFileStream()
            f.contents.size.set(contents.tell())
            f.contents.sha1.set(sha1.digest())
        f.provides.set(self.provides)
        f.requires.set(self.requires)
        f.flavor.set(self.flavor)
        for tag in self.tags:
            f.tags.set(tag)
        return f
Пример #29
0
    def get(self, pathId):
        f = self.fileClass(pathId)
        f.inode = files.InodeStream(self.perms & 07777, self.mtime, self.owner,
                                    self.group)
        self._touchupFileStream(f)
        if self.needSha1:
            sha1 = digestlib.sha1()
            contents = self.contents.get()
            devnull = open(os.devnull, 'w')
            util.copyfileobj(contents, devnull, digest=sha1)
            devnull.close()

            f.contents = files.RegularFileStream()
            f.contents.size.set(contents.tell())
            f.contents.sha1.set(sha1.digest())
        f.provides.set(self.provides)
        f.requires.set(self.requires)
        f.flavor.set(self.flavor)
        for tag in self.tags:
            f.tags.set(tag)
        return f
Пример #30
0
    def newCollection(self, kvdict):
        cu = self._db.cursor()
        jobType = kvdict['type']
        jobTypeId = self.jobTypes[jobType]
        jobStateId = self.jobStates['Queued']
        now = time.time()
        created = kvdict.setdefault('created', now)
        modified = kvdict.setdefault('modified', now)
        ttl = kvdict.setdefault('ttl', None)
        if ttl:
            expiration = modified + ttl
        else:
            expiration = None
        if 'jobUuid' not in kvdict:
            jobUuid = digestlib.sha1(file("/dev/urandom").read(16)).hexdigest()
            kvdict['jobUuid'] = jobUuid
        jobUuid = kvdict['jobUuid']

        extraCreateArgs = self._extraCreateArgs(kvdict)
        args = (jobUuid, jobTypeId, jobStateId, self._db.auth.userId, created,
            modified, expiration, ttl) + extraCreateArgs
        assert len(self.extra_fields_insert) == len(extraCreateArgs)
        if self.extra_fields_insert:
            extra_fields = ", " + ', '.join(self.extra_fields_insert)
            extra_bind_args = ", " + ', '.join('?'
                for x in self.extra_fields_insert)
        else:
            extra_fields = ""
            extra_bind_args = ""
        sql = """
            INSERT INTO jobs
                (job_uuid, job_type_id, job_state_id, created_by, created,
                modified, expiration, ttl%(extra_fields)s)
            VALUES (?, ?, ?, ?, ?, ?, ?, ?%(extra_bind_args)s)""" % dict(
                extra_fields = extra_fields,
                extra_bind_args = extra_bind_args)
        cu.execute(sql, *args)
        jobId = cu.lastid()
        self._postNewCollection(jobId, kvdict)
        return jobId
Пример #31
0
    def uploadBuild(self, auth):
        method = self.req.method.upper()
        if method != "PUT":
            raise web_exc.HTTPMethodNotAllowed(allow='PUT')

        client = shimclient.ShimMintClient(self.cfg,
                (self.cfg.authUser, self.cfg.authPass), self.db)

        buildId, fileName = self.req.path_info.split("/")[-2:]
        build = client.getBuild(int(buildId))
        project = client.getProject(build.projectId)

        # make sure the hash we receive from the slave matches
        # the hash we gave the slave in the first place.
        # this prevents slaves from overwriting arbitrary files
        # in the finished images directory.
        if not auth.admin:
            outputToken = self.req.headers.get('X-rBuilder-OutputToken')
            if outputToken != build.getDataValue('outputToken', validate = False):
                raise web_exc.HTTPForbidden()

        targetFn = os.path.join(self.cfg.imagesPath, project.hostname,
                str(buildId), fileName)
        util.mkdirChain(os.path.dirname(targetFn))
        fObj = AtomicFile(targetFn, 'wb+', prefix='img-', suffix='.tmp')
        ctx = digestlib.sha1()

        inFile = None
        if 'x-uploaded-file' in self.req.headers:
            # The frontend proxy has already saved the request body to a
            # temporary location, so first try to rename it into place.
            try:
                os.rename(self.req.headers['x-uploaded-file'], fObj.name)
            except OSError, err:
                if err.errno != errno.EXDEV:
                    raise
                # Upload dir is on a different filesystem.
                inFile = open(self.req.headers['x-uploaded-file'], 'rb')
Пример #32
0
def specHash(troveTups, buildTimes=None):
    """
    Create a unique identifier for the troves C{troveTups}.
    """
    if buildTimes:
        assert len(troveTups) == len(buildTimes)
        troveTups = zip(troveTups, buildTimes)
    else:
        troveTups = [(x, None) for x in troveTups]

    items = []
    for (name, version, flavor), buildTime in sorted(troveTups):
        items.append(name)
        if buildTime:
            items.append(version.trailingRevision().version)
            items.append(long(buildTime))
        elif version.trailingRevision().timeStamp:
            items.append(version.freeze())
        else:
            items.append(version.asString())
        items.append(flavor.freeze())
    items.append('')
    return digestlib.sha1('\0'.join(str(x) for x in items)).hexdigest()
Пример #33
0
    def get(self, fileName, computeShaDigest = False):
        """
        Download a file from the repository.
        @param fileName: relative path to file
        @type fileName: string
        @return open file instance
        """

        fobj = self._getTempFileObject()
        realUrl = self._getRealUrl(fileName)

        inf = self._opener.open(realUrl)
        if computeShaDigest:
            dig = digestlib.sha1()
        else:
            dig = None
        util.copyfileobj(inf, fobj, digest = dig)
        fobj.seek(0)

        if not os.path.basename(fileName).endswith('.gz'):
            return self.FileWrapper.create(fobj, dig)
        return self.FileWrapper.create(gzip.GzipFile(fileobj=fobj, mode="r"),
            dig)
Пример #34
0
 def __init__(self, fobj):
     self.fobj = fobj
     self.digest = digestlib.sha1()
Пример #35
0
def sha1String(buf):
    m = digestlib.sha1()
    m.update(buf)
    return m.digest()
Пример #36
0
    def restore(self,
                fileContents,
                root,
                target,
                journal=None,
                sha1=None,
                nameLookup=True,
                **kwargs):

        keepTempfile = kwargs.get('keepTempfile', False)

        if fileContents != None:
            # this is first to let us copy the contents of a file
            # onto itself; the unlink helps that to work
            src = fileContents.get()
            inFd = None

            if fileContents.isCompressed():
                if hasattr(src, '_fdInfo'):
                    # inFd is None if we can't figure this information out
                    # (for _LazyFile for instance)
                    (inFd, inStart, inSize) = src._fdInfo()
                else:
                    src = gzip.GzipFile(mode="r", fileobj=src)

            name = os.path.basename(target)
            path = os.path.dirname(target)
            if not os.path.isdir(path):
                util.mkdirChain(path)

            if inFd is not None:
                if keepTempfile:
                    tmpfd, destTarget = tempfile.mkstemp(name, '.ct', path)
                    os.close(tmpfd)
                    destName = os.path.basename(destTarget)
                else:
                    destName, destTarget = name, target
                actualSha1 = util.sha1Uncompress((inFd, inStart, inSize), path,
                                                 destName, destTarget)
                if keepTempfile:
                    # Set up the second temp file here. This makes
                    # sure we get through the next if branch.
                    inFd = None
                    src = file(destTarget)
            elif keepTempfile:
                tmpfd, destTarget = tempfile.mkstemp(name, '.ct', path)
                f = os.fdopen(tmpfd, 'w')
                util.copyfileobj(src, f)
                f.close()
                src = file(destTarget)
            else:
                destTarget = target

            if inFd is None:
                tmpfd, tmpname = tempfile.mkstemp(name, '.ct', path)
                try:
                    d = digestlib.sha1()
                    f = os.fdopen(tmpfd, 'w')
                    util.copyfileobj(src, f, digest=d)
                    f.close()
                    actualSha1 = d.digest()

                    # would be nice if util could do this w/ a single
                    # system call, but exists is better than an exception
                    # when the file doesn't already exist
                    if (os.path.exists(target)
                            and stat.S_ISDIR(os.lstat(target).st_mode)):
                        os.rmdir(target)
                    os.rename(tmpname, target)
                except:
                    # we've not renamed tmpname to target yet, we should
                    # clean up instead of leaving temp files around
                    os.unlink(tmpname)
                    if keepTempfile:
                        os.unlink(destTarget)
                    raise

            if (sha1 is not None and sha1 != actualSha1):
                raise Sha1Exception(target)

            File.restore(self,
                         root,
                         target,
                         journal=journal,
                         nameLookup=nameLookup,
                         **kwargs)
        else:
            destTarget = target
            File.restore(self,
                         root,
                         target,
                         journal=journal,
                         nameLookup=nameLookup,
                         **kwargs)
        return destTarget
Пример #37
0
def FileFromFilesystem(path, pathId, possibleMatch = None, inodeInfo = False,
        assumeRoot=False, statBuf=None, sha1FailOk=False):
    if statBuf:
        s = statBuf
    else:
        s = os.lstat(path)

    global userCache, groupCache, _havePrelink

    if assumeRoot:
        owner = 'root'
        group = 'root'
    elif isinstance(s.st_uid, basestring):
        # Already stringified -- some capsule code will fabricate a stat result
        # from e.g. a RPM header
        owner = s.st_uid
        group = s.st_gid
    else:
        # + is not a valid char in user/group names; if the uid is not mapped
        # to a user, prepend it with + and store it as a string
        try:
            owner = userCache.lookupId('/', s.st_uid)
        except KeyError:
            owner = '+%d' % s.st_uid

        try:
            group = groupCache.lookupId('/', s.st_gid)
        except KeyError:
            group = '+%d' % s.st_gid

    needsSha1 = 0
    inode = InodeStream(s.st_mode & 07777, s.st_mtime, owner, group)

    if (stat.S_ISREG(s.st_mode)):
        f = RegularFile(pathId)
        needsSha1 = 1
    elif (stat.S_ISLNK(s.st_mode)):
        f = SymbolicLink(pathId)
        if hasattr(s, 'linkto'):
            f.target.set(s.linkto)
        else:
            f.target.set(os.readlink(path))
    elif (stat.S_ISDIR(s.st_mode)):
        f = Directory(pathId)
    elif (stat.S_ISSOCK(s.st_mode)):
        f = Socket(pathId)
    elif (stat.S_ISFIFO(s.st_mode)):
        f = NamedPipe(pathId)
    elif (stat.S_ISBLK(s.st_mode)):
        f = BlockDevice(pathId)
        f.devt.major.set(s.st_rdev >> 8)
        f.devt.minor.set(s.st_rdev & 0xff)
    elif (stat.S_ISCHR(s.st_mode)):
        f = CharacterDevice(pathId)
        f.devt.major.set(s.st_rdev >> 8)
        f.devt.minor.set(s.st_rdev & 0xff)
    else:
        raise FilesError("unsupported file type for %s" % path)

    f.inode = inode
    f.flags = FlagsStream(0)

    # assume we have a match if the FileMode and object type match
    if possibleMatch and (possibleMatch.__class__ == f.__class__) \
                     and f.inode == possibleMatch.inode \
                     and f.inode.mtime() == possibleMatch.inode.mtime() \
                     and (not s.st_size or
                          (possibleMatch.hasContents and
                           s.st_size == possibleMatch.contents.size())):
        f.flags.set(possibleMatch.flags())
        return possibleMatch
    elif (possibleMatch and (isinstance(f, RegularFile) and
                             isinstance(possibleMatch, RegularFile))
                        and (f.inode.isExecutable())
                        and f.inode.mtime() == possibleMatch.inode.mtime()
                        and f.inode.owner == possibleMatch.inode.owner
                        and f.inode.group == possibleMatch.inode.group
                        and f.inode.perms == possibleMatch.inode.perms):
        # executable RegularFiles match even if there sizes are different
        # as long as everything else is the same; this is to stop size
        # changes from prelink from changing fileids
        return possibleMatch

    if needsSha1:
        f.contents = RegularFileStream()

        undoPrelink = False
        if _havePrelink != False and f.inode.isExecutable():
            try:
                from conary.lib import elf
                if elf.prelinked(path):
                    undoPrelink = True
            except:
                pass
        if undoPrelink and _havePrelink is None:
            _havePrelink = bool(os.access(PRELINK_CMD[0], os.X_OK))
        if undoPrelink and _havePrelink:
            prelink = subprocess.Popen(
                    PRELINK_CMD + ("-y", path),
                    stdout = subprocess.PIPE,
                    close_fds = True,
                    shell = False)
            d = digestlib.sha1()
            content = prelink.stdout.read()
            size = 0
            while content:
                d.update(content)
                size += len(content)
                content = prelink.stdout.read()

            prelink.wait()
            f.contents.size.set(size)
            sha1 = d.digest()
        else:
            try:
                sha1 = sha1helper.sha1FileBin(path)
            except OSError:
                if sha1FailOk:
                    sha1 = sha1helper.sha1Empty
                else:
                    raise
            f.contents.size.set(s.st_size)

        f.contents.sha1.set(sha1)

    if inodeInfo:
        return (f, s.st_nlink, (s.st_rdev, s.st_ino))

    return f
Пример #38
0
 def fingerprint(self):
     return digestlib.sha1(self.x509.as_der()).hexdigest()
Пример #39
0
 def fingerprint(self):
     return digestlib.sha1(self.x509.as_der()).hexdigest()
Пример #40
0
def sha1String(buf):
    m = digestlib.sha1()
    m.update(buf)
    return m.digest()
Пример #41
0
    def restore(self, fileContents, root, target, journal=None, sha1 = None,
                nameLookup=True, **kwargs):

        keepTempfile = kwargs.get('keepTempfile', False)

        if fileContents != None:
            # this is first to let us copy the contents of a file
            # onto itself; the unlink helps that to work
            src = fileContents.get()
            inFd = None

            if fileContents.isCompressed():
                if hasattr(src, '_fdInfo'):
                    # inFd is None if we can't figure this information out
                    # (for _LazyFile for instance)
                    (inFd, inStart, inSize) = src._fdInfo()
                else:
                    src = gzip.GzipFile(mode = "r", fileobj = src)

            name = os.path.basename(target)
            path = os.path.dirname(target)
            if not os.path.isdir(path):
                util.mkdirChain(path)

            if inFd is not None:
                if keepTempfile:
                    tmpfd, destTarget = tempfile.mkstemp(name, '.ct', path)
                    os.close(tmpfd)
                    destName = os.path.basename(destTarget)
                else:
                    destName, destTarget = name, target
                actualSha1 = util.sha1Uncompress((inFd, inStart, inSize),
                                                 path, destName, destTarget)
                if keepTempfile:
                    # Set up the second temp file here. This makes
                    # sure we get through the next if branch.
                    inFd = None
                    src = file(destTarget)
            elif keepTempfile:
                tmpfd, destTarget = tempfile.mkstemp(name, '.ct', path)
                f = os.fdopen(tmpfd, 'w')
                util.copyfileobj(src, f)
                f.close()
                src = file(destTarget)
            else:
                destTarget = target

            if inFd is None:
                tmpfd, tmpname = tempfile.mkstemp(name, '.ct', path)
                try:
                    d = digestlib.sha1()
                    f = os.fdopen(tmpfd, 'w')
                    util.copyfileobj(src, f, digest = d)
                    f.close()
                    actualSha1 = d.digest()

                    # would be nice if util could do this w/ a single
                    # system call, but exists is better than an exception
                    # when the file doesn't already exist
                    if (os.path.exists(target) and
                            stat.S_ISDIR(os.lstat(target).st_mode)):
                        os.rmdir(target)
                    os.rename(tmpname, target)
                except:
                    # we've not renamed tmpname to target yet, we should
                    # clean up instead of leaving temp files around
                    os.unlink(tmpname)
                    if keepTempfile:
                        os.unlink(destTarget)
                    raise

            if (sha1 is not None and sha1 != actualSha1):
                raise Sha1Exception(target)

            File.restore(self, root, target, journal=journal,
                nameLookup=nameLookup, **kwargs)
        else:
            destTarget = target
            File.restore(self, root, target, journal=journal,
                nameLookup=nameLookup, **kwargs)
        return destTarget
Пример #42
0
    def createCertificate(self, purpose, desc, issuer=None, common=None):
        """Create and store one certificate.

        @param purpose: Machine-readable string identifying the purpose of this
                certificate.
        @param desc: Human-readable description to put into the certificate.
        @param issuer: Optional tuple C{(x509, pkey)} of issuer cert pair.
        @param common: Optional common name (hostname) for subject.
        """
        # Don't let anyone else even read the table while we work. We wouldn't
        # want two processes to read in the same CA serial index, generate
        # different certificates using the same serial, write them out to disk,
        # then blow up when they try to commit.
        cu = self.db.cursor()
        cu.execute("LOCK TABLE pki_certificates")

        subject = X509.X509_Name()
        subject.O = desc
        subject.OU = 'Created at ' + time.strftime('%F %T%z')
        if common is not None:
            subject.CN = common

        issuer_pkey = issuer_subject = issuer_fingerprint = serial = None
        if issuer is None:
            isCA = True
        else:
            isCA = False
            if isinstance(issuer, basestring):
                # Look up CA by purpose
                issuer_x509, issuer_pkey = self.getCertificatePair(issuer)
            else:
                # Tuple provided
                issuer_x509, issuer_pkey = issuer
            issuer_x509 = X509.load_cert_string(issuer_x509)
            issuer_pkey = EVP.load_key_string(issuer_pkey)
            issuer_subject = issuer_x509.get_subject()
            issuer_fingerprint = digestlib.sha1(
                    issuer_x509.as_der()).hexdigest()

            cu.execute("""UPDATE pki_certificates
                SET ca_serial_index = ca_serial_index + 1
                WHERE fingerprint = ?
                RETURNING ca_serial_index
                """, issuer_fingerprint)
            serial, = cu.fetchone()

        # Create certificates with a 'not before' date 1 day in the past, just
        # in case initial setup sets the clock backwards.
        rsa, x509 = gencert.new_cert(KEY_LENGTH, subject, EXPIRY,
                issuer=issuer_subject, issuer_evp=issuer_pkey, isCA=isCA,
                serial=serial, timestamp_offset=-86400)

        fingerprint = digestlib.sha1(x509.as_der()).hexdigest()
        pkey_pem = rsa.as_pem(None)
        x509_pem = x509.as_pem()

        cu.execute("""INSERT INTO pki_certificates (
                fingerprint, purpose, is_ca, x509_pem, pkey_pem,
                issuer_fingerprint, ca_serial_index, time_issued, time_expired
                )
            VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)""",
            fingerprint, purpose, isCA, x509_pem, pkey_pem,
            issuer_fingerprint, 0,
            str(x509.get_not_before()), str(x509.get_not_after()),
            )

        log.info("Created certificate %s for purpose %r%s",
                fingerprint, purpose,
                (issuer_fingerprint and (" (issuer %s)" % issuer_fingerprint)
                    or ""))

        return x509_pem, pkey_pem
Пример #43
0
 def seek(self, where):
     # This allows the conary http client to rewind the body file, and
     # resets the digest along with it.
     assert where == 0
     self.digest = digestlib.sha1()
     self.fobj.seek(0)