def _get_5_caches(self): # Add items from size 1 to 101 randomly into 5 caches. caches = [ local_caching.MemoryContentAddressedCache(), local_caching.MemoryContentAddressedCache(), local_caching.MemoryContentAddressedCache(), local_caching.MemoryContentAddressedCache(), local_caching.MemoryContentAddressedCache(), ] for i in xrange(100): self._add_one_item(caches[random.randint(0, len(caches) - 1)], i + 1) self._now += 1 return caches
def _run_tha_test(self, isolated_hash=None, files=None, command=None): files = files or {} make_tree_call = [] def add(i, _): make_tree_call.append(i) for i in ('make_tree_read_only', 'make_tree_files_read_only', 'make_tree_deleteable', 'make_tree_writeable'): self.mock(file_path, i, functools.partial(add, i)) data = run_isolated.TaskData( command=command or [], relative_cwd=None, extra_args=[], isolated_hash=isolated_hash, storage=StorageFake(files), isolate_cache=local_caching.MemoryContentAddressedCache(), outputs=None, install_named_caches=init_named_caches_stub, leak_temp_dir=False, root_dir=None, hard_timeout=60, grace_period=30, bot_file=None, switch_to_account=False, install_packages_fn=run_isolated.noop_install_packages, use_symlinks=False, env={}, env_prefix={}) ret = run_isolated.run_tha_test(data, None) self.assertEqual(0, ret) return make_tree_call
def run_push_and_fetch_test(self, namespace): storage = isolateserver.get_storage( isolate_storage.ServerRef(self.server.url, namespace)) # Upload items. items = [ isolateserver.BufferItem('item %d' % i, storage.server_ref.hash_algo) for i in range(10) ] uploaded = storage.upload_items(items) self.assertEqual(set(items), set(uploaded)) # Fetch them all back into local memory cache. cache = local_caching.MemoryContentAddressedCache() queue = isolateserver.FetchQueue(storage, cache) # Start fetching. pending = set() for item in items: pending.add(item.digest) queue.add(item.digest) queue.wait_on(item.digest) # Wait for fetch to complete. while pending: fetched = queue.wait() pending.discard(fetched) # Ensure fetched same data as was pushed. actual = [] for i in items: with cache.getfileobj(i.digest) as f: actual.append(f.read()) self.assertEqual([''.join(i.content()) for i in items], actual)
def send_and_receive(random_pool, storage, progress, size): """Sends a random file and gets it back. # TODO(maruel): Add a batching argument of value [1, 500] to batch requests. Returns (delay, size) """ # Create a file out of the pool. start = time.time() batch = 1 items = [ isolateserver.BufferItem(random_pool.gen(size), False) for _ in xrange(batch) ] try: # len(_uploaded) may be < len(items) happen if the items is not random # enough or value of --mid-size is very low compared to --items. _uploaded = storage.upload_items(items) start = time.time() cache = local_caching.MemoryContentAddressedCache() queue = isolateserver.FetchQueue(storage, cache) for i in items: queue.add(i.digest, i.size) waiting = [i.digest for i in items] while waiting: waiting.remove(queue.wait(waiting)) expected = {i.digest: ''.join(i.content()) for i in items} for d in cache.cached_set(): actual = cache.read(d) assert expected.pop(d) == actual assert not expected, expected duration = max(0, time.time() - start) except isolateserver.MappingError as e: duration = str(e) if isinstance(duration, float): progress.update_item('', index=1, data=size) else: progress.update_item('', index=1) return (duration, size)
def get_cache(self, policies): return local_caching.MemoryContentAddressedCache(policies)
class RunIsolatedTestRun(RunIsolatedTestBase): # Runs the actual command requested. def test_output(self): # Starts a full isolate server mock and have run_tha_test() uploads results # back after the task completed. server = isolateserver_mock.MockIsolateServer() try: script = ('import sys\n' 'open(sys.argv[1], "w").write("bar")\n') script_hash = isolateserver_mock.hash_content(script) isolated = { u'algo': u'sha-1', u'command': [u'cmd.py', u'${ISOLATED_OUTDIR}/foo'], u'files': { u'cmd.py': { u'h': script_hash, u'm': 0700, u's': len(script), }, }, u'version': isolated_format.ISOLATED_FILE_VERSION, } if sys.platform == 'win32': isolated[u'files'][u'cmd.py'].pop(u'm') isolated_data = json_dumps(isolated) isolated_hash = isolateserver_mock.hash_content(isolated_data) server.add_content('default-store', script) server.add_content('default-store', isolated_data) store = isolateserver.get_storage(server.url, 'default-store') self.mock(sys, 'stdout', StringIO.StringIO()) data = run_isolated.TaskData( command=[], relative_cwd=None, extra_args=[], isolated_hash=isolated_hash, storage=store, isolate_cache=local_caching.MemoryContentAddressedCache(), outputs=None, install_named_caches=init_named_caches_stub, leak_temp_dir=False, root_dir=None, hard_timeout=60, grace_period=30, bot_file=None, switch_to_account=False, install_packages_fn=run_isolated.noop_install_packages, use_symlinks=False, env={}, env_prefix={}) ret = run_isolated.run_tha_test(data, None) self.assertEqual(0, ret) # It uploaded back. Assert the store has a new item containing foo. hashes = {isolated_hash, script_hash} output_hash = isolateserver_mock.hash_content('bar') hashes.add(output_hash) isolated = { u'algo': u'sha-1', u'files': { u'foo': { u'h': output_hash, # TODO(maruel): Handle umask. u'm': 0640, u's': 3, }, }, u'version': isolated_format.ISOLATED_FILE_VERSION, }
class RunIsolatedTestOutputFiles(RunIsolatedTestBase): # Like RunIsolatedTestRun, but ensures that specific output files # (as opposed to anything in $(ISOLATED_OUTDIR)) are returned. def _run_test(self, isolated, command, extra_args): # Starts a full isolate server mock and have run_tha_test() uploads results # back after the task completed. server = isolateserver_mock.MockIsolateServer() try: # Output the following structure: # # foo1 # foodir --> foo2_sl (symlink to "foo2_content" file) # bardir --> bar1 # # Create the symlinks only on Linux. script = ('import os\n' 'import sys\n' 'open(sys.argv[1], "w").write("foo1")\n' 'bar1_path = os.path.join(sys.argv[3], "bar1")\n' 'open(bar1_path, "w").write("bar1")\n' 'if sys.platform.startswith("linux"):\n' ' foo_realpath = os.path.abspath("foo2_content")\n' ' open(foo_realpath, "w").write("foo2")\n' ' os.symlink(foo_realpath, sys.argv[2])\n' 'else:\n' ' open(sys.argv[2], "w").write("foo2")\n') script_hash = isolateserver_mock.hash_content(script) isolated['files']['cmd.py'] = { 'h': script_hash, 'm': 0700, 's': len(script), } if sys.platform == 'win32': isolated['files']['cmd.py'].pop('m') isolated_data = json_dumps(isolated) isolated_hash = isolateserver_mock.hash_content(isolated_data) server.add_content('default-store', script) server.add_content('default-store', isolated_data) store = isolateserver.get_storage(server.url, 'default-store') self.mock(sys, 'stdout', StringIO.StringIO()) data = run_isolated.TaskData( command=command, relative_cwd=None, extra_args=extra_args, isolated_hash=isolated_hash, storage=store, isolate_cache=local_caching.MemoryContentAddressedCache(), outputs=['foo1', 'foodir/foo2_sl', 'bardir/'], install_named_caches=init_named_caches_stub, leak_temp_dir=False, root_dir=None, hard_timeout=60, grace_period=30, bot_file=None, switch_to_account=False, install_packages_fn=run_isolated.noop_install_packages, use_symlinks=False, env={}, env_prefix={}) ret = run_isolated.run_tha_test(data, None) self.assertEqual(0, ret) # It uploaded back. Assert the store has a new item containing foo. hashes = {isolated_hash, script_hash} foo1_output_hash = isolateserver_mock.hash_content('foo1') foo2_output_hash = isolateserver_mock.hash_content('foo2') bar1_output_hash = isolateserver_mock.hash_content('bar1') hashes.add(foo1_output_hash) hashes.add(foo2_output_hash) hashes.add(bar1_output_hash) isolated = { u'algo': u'sha-1', u'files': { u'foo1': { u'h': foo1_output_hash, # TODO(maruel): Handle umask. u'm': 0640, u's': 4, }, u'foodir/foo2_sl': { u'h': foo2_output_hash, # TODO(maruel): Handle umask. u'm': 0640, u's': 4, }, u'bardir/bar1': { u'h': bar1_output_hash, # TODO(maruel): Handle umask. u'm': 0640, u's': 4, }, }, u'version': isolated_format.ISOLATED_FILE_VERSION, }