def _archive_given_files(self, files): """Given a list of files, call isolateserver_archive.py with them. Then verify they are all on the server.""" args = [ sys.executable, os.path.join(ROOT_DIR, 'isolateserver_archive.py'), '--remote', ISOLATE_SERVER, '--namespace', self.namespace ] if '-v' in sys.argv: args.append('--verbose') args.extend( os.path.join(TEST_DATA_DIR, filename) for filename in files) self.assertEqual(0, subprocess.call(args)) # Try to download the files from the server. file_hashes = [ isolateserver_archive.sha1_file(os.path.join(TEST_DATA_DIR, f)) for f in files ] for i in range(len(files)): download_url = '%scontent/retrieve/%s/%s' % ( ISOLATE_SERVER, self.namespace, file_hashes[i]) downloaded_file = isolateserver_archive.url_open(download_url, retry_404=True), self.assertTrue(downloaded_file is not None, 'File %s was missing from the server' % files[i]) # Ensure the files are listed as present on the server. contains_hash_url = '%scontent/contains/%s?token=%s&from_smoke_test=1' % ( ISOLATE_SERVER, self.namespace, self.token) body = ''.join(binascii.unhexlify(h) for h in file_hashes) expected = chr(1) * len(files) MAX_ATTEMPTS = 10 for i in xrange(MAX_ATTEMPTS): # AppEngine's database is eventually consistent and isolateserver do not # use transaction for performance reasons, so even if one request was able # to retrieve the file, an subsequent may not see it! So retry a few time # until the database becomes consistent with regard to these entities. response = isolateserver_archive.url_open( contains_hash_url, data=body, content_type='application/octet-stream').read() if response == expected: break # GAE is exposing its internal data inconsistency. if i != (MAX_ATTEMPTS - 1): print('Visible datastore inconsistency, retrying.') time.sleep(0.1) self.assertEqual(expected, response)
def setUp(self): # The namespace must end in '-gzip' since all files are now compressed # before being uploaded. self.namespace = ('temporary' + str(long(time.time())).split('.', 1)[0] + '-gzip') url = ISOLATE_SERVER + '/content/get_token?from_smoke_test=1' self.token = urllib.quote(isolateserver_archive.url_open(url).read())
def _archive_given_files(self, files): """Given a list of files, call isolateserver_archive.py with them. Then verify they are all on the server.""" args = [ sys.executable, os.path.join(ROOT_DIR, "isolateserver_archive.py"), "--remote", ISOLATE_SERVER, "--namespace", self.namespace, ] if "-v" in sys.argv: args.append("--verbose") args.extend(os.path.join(TEST_DATA_DIR, filename) for filename in files) self.assertEqual(0, subprocess.call(args)) # Try to download the files from the server. file_hashes = [isolateserver_archive.sha1_file(os.path.join(TEST_DATA_DIR, f)) for f in files] for i in range(len(files)): download_url = "%scontent/retrieve/%s/%s" % (ISOLATE_SERVER, self.namespace, file_hashes[i]) downloaded_file = (isolateserver_archive.url_open(download_url, retry_404=True),) self.assertTrue(downloaded_file is not None, "File %s was missing from the server" % files[i]) # Ensure the files are listed as present on the server. contains_hash_url = "%scontent/contains/%s?token=%s&from_smoke_test=1" % ( ISOLATE_SERVER, self.namespace, self.token, ) body = "".join(binascii.unhexlify(h) for h in file_hashes) expected = chr(1) * len(files) for _ in xrange(10): # AppEngine's database is eventually consistent and isolateserver do not # use transaction for performance reasons, so even if one request was able # to retrieve the file, an subsequent may not see it! So retry a few time # until the database becomes consistent with regard to these entities. response = isolateserver_archive.url_open( contains_hash_url, data=body, content_type="application/octet-stream" ).read() if response == expected: break self.assertEquals(expected, response)
def setUp(self): # The namespace must end in '-gzip' since all files are now compressed # before being uploaded. self.namespace = "temporary" + str(long(time.time())).split(".", 1)[0] + "-gzip" url = ISOLATE_SERVER + "/content/get_token?from_smoke_test=1" self.token = urllib.quote(isolateserver_archive.url_open(url).read())