def _unzipIterChunkyTest(self, compression, chunksize, lower, upper): """ unzipIterChunky should unzip the given number of bytes per iteration. """ junk = ' '.join([str(random.random()) for n in xrange(1000)]) junkmd5 = md5(junk).hexdigest() tempdir = filepath.FilePath(self.mktemp()) tempdir.makedirs() zfpath = tempdir.child('bigfile.zip').path self._makebigfile(zfpath, compression, junk) uziter = zipstream.unzipIterChunky(zfpath, tempdir.path, chunksize=chunksize) r = uziter.next() # test that the number of chunks is in the right ballpark; # this could theoretically be any number but statistically it # should always be in this range approx = lower < r < upper self.failUnless(approx) for r in uziter: pass self.assertEqual(r, 0) newmd5 = md5( tempdir.child("zipstreamjunk").open().read()).hexdigest() self.assertEqual(newmd5, junkmd5)
def setUp(self): """ Creates junk data that can be compressed and a test directory for any files that will be created """ self.testdir = filepath.FilePath(self.mktemp()) self.testdir.makedirs() self.unzipdir = self.testdir.child('unzipped') self.unzipdir.makedirs()
def setUp(self): self.dirname = filepath.FilePath(self.mktemp()) self.dirname.createDirectory() self.inotify = inotify.INotify() self.inotify.startReading() self.addCleanup(self.inotify.loseConnection)
def operation(path): path.moveTo(filepath.FilePath(self.mktemp()))
def operation(path): p = filepath.FilePath(self.mktemp()) p.touch() p.moveTo(path)
def operation(path): fObj = path.open("w") fObj.close() path.moveTo(filepath.FilePath(self.mktemp()))