def test_missing(self): files = [ os.path.join(BASE_PATH, 'isolateserver_archive', f) for f in ('small_file.txt', 'empty_file.txt') ] sha1s = map(isolateserver_archive.sha1_file, files) sha1encoded = ''.join(map(binascii.unhexlify, sha1s)) compressed = [ zlib.compress( open(f, 'rb').read(), isolateserver_archive.compression_level(f)) for f in files ] path = 'http://random/' self._requests = [ (path + 'content/get_token', {}, StringIO.StringIO('foo bar')), ( path + 'content/contains/default-gzip?token=foo%20bar', {'data': sha1encoded, 'content_type': 'application/octet-stream'}, StringIO.StringIO('\0\0'), ), ( path + 'content/store/default-gzip/%s?token=foo%%20bar' % sha1s[0], {'data': compressed[0], 'content_type': 'application/octet-stream'}, StringIO.StringIO('ok'), ), ( path + 'content/store/default-gzip/%s?token=foo%%20bar' % sha1s[1], {'data': compressed[1], 'content_type': 'application/octet-stream'}, StringIO.StringIO('ok'), ), ] result = isolateserver_archive.main(['--remote', path] + files) self.assertEqual(0, result)
def test_large(self): content = '' compressed = '' while ( len(compressed) <= isolateserver_archive.MIN_SIZE_FOR_DIRECT_BLOBSTORE): # The goal here is to generate a file, once compressed, is at least # MIN_SIZE_FOR_DIRECT_BLOBSTORE. content += ''.join(chr(random.randint(0, 255)) for _ in xrange(20*1024)) compressed = zlib.compress( content, isolateserver_archive.compression_level('foo.txt')) s = hashlib.sha1(content).hexdigest() infiles = { 'foo.txt': { 's': len(content), 'h': s, }, } path = 'http://random/' sha1encoded = binascii.unhexlify(s) content_type, body = isolateserver_archive.encode_multipart_formdata( [('token', 'foo bar')], [('content', s, compressed)]) self._requests = [ (path + 'content/get_token', {}, StringIO.StringIO('foo bar')), ( path + 'content/contains/default-gzip?token=foo%20bar', {'data': sha1encoded, 'content_type': 'application/octet-stream'}, StringIO.StringIO('\0'), ), ( path + 'content/generate_blobstore_url/default-gzip/%s' % s, {'data': [('token', 'foo bar')]}, StringIO.StringIO('an_url/'), ), ( 'an_url/', {'data': body, 'content_type': content_type, 'retry_50x': False}, StringIO.StringIO('ok'), ), ] old_read_and_compress = isolateserver_archive.read_and_compress try: isolateserver_archive.read_and_compress = lambda x, y: compressed result = isolateserver_archive.upload_sha1_tree( base_url=path, indir=os.getcwd(), infiles=infiles, namespace='default-gzip') finally: isolateserver_archive.read_and_compress = old_read_and_compress self.assertEqual(0, result)
def test_large(self): content = '' compressed = '' while ( len(compressed) <= isolateserver_archive.MIN_SIZE_FOR_DIRECT_BLOBSTORE): # The goal here is to generate a file, once compressed, is at least # MIN_SIZE_FOR_DIRECT_BLOBSTORE. content += ''.join(chr(random.randint(0, 255)) for _ in xrange(20*1024)) compressed = zlib.compress( content, isolateserver_archive.compression_level('foo.txt')) s = hashlib.sha1(content).hexdigest() infiles = { 'foo.txt': { 's': len(content), 'h': s, }, } path = 'http://random/' sha1encoded = binascii.unhexlify(s) content_type, body = isolateserver_archive.encode_multipart_formdata( [('token', 'foo bar')], [('content', s, compressed)]) self._requests = [ (path + 'content/get_token', {}, StringIO.StringIO('foo bar')), ( path + 'content/contains/default-gzip?token=foo%20bar', {'data': sha1encoded, 'content_type': 'application/octet-stream'}, StringIO.StringIO('\0'), ), ( path + 'content/generate_blobstore_url/default-gzip/%s' % s, {'data': [('token', 'foo bar')]}, StringIO.StringIO('an_url/'), ), ( 'an_url/', {'data': body, 'content_type': content_type, 'retry_50x': False}, StringIO.StringIO('ok'), ), ] self.mock(isolateserver_archive, 'read_and_compress', lambda x, y: compressed) result = isolateserver_archive.upload_sha1_tree( base_url=path, indir=os.getcwd(), infiles=infiles, namespace='default-gzip') self.assertEqual(0, result)