def run_push_and_fetch_test(self, namespace): storage = isolateserver.get_storage( isolate_storage.ServerRef(self.server.url, namespace)) # Upload items. items = [ isolateserver.BufferItem('item %d' % i, storage.server_ref.hash_algo) for i in range(10) ] uploaded = storage.upload_items(items) self.assertEqual(set(items), set(uploaded)) # Fetch them all back into local memory cache. cache = local_caching.MemoryContentAddressedCache() queue = isolateserver.FetchQueue(storage, cache) # Start fetching. pending = set() for item in items: pending.add(item.digest) queue.add(item.digest) queue.wait_on(item.digest) # Wait for fetch to complete. while pending: fetched = queue.wait() pending.discard(fetched) # Ensure fetched same data as was pushed. actual = [] for i in items: with cache.getfileobj(i.digest) as f: actual.append(f.read()) self.assertEqual([''.join(i.content()) for i in items], actual)
def run_push_and_fetch_test(self, namespace): storage = isolateserver.get_storage(self.server.url, namespace) # Upload items. items = [isolateserver.BufferItem('item %d' % i) for i in xrange(10)] uploaded = storage.upload_items(items) self.assertEqual(set(items), set(uploaded)) # Fetch them all back into local memory cache. cache = isolateserver.MemoryCache() queue = isolateserver.FetchQueue(storage, cache) # Start fetching. pending = set() for item in items: pending.add(item.digest) queue.add(item.digest) # Wait for fetch to complete. while pending: fetched = queue.wait(pending) pending.discard(fetched) # Ensure fetched same data as was pushed. self.assertEqual([i.buffer for i in items], [cache.read(i.digest) for i in items])
def send_and_receive(random_pool, storage, progress, size): """Sends a random file and gets it back. # TODO(maruel): Add a batching argument of value [1, 500] to batch requests. Returns (delay, size) """ # Create a file out of the pool. start = time.time() batch = 1 items = [ isolateserver.BufferItem(random_pool.gen(size), False) for _ in xrange(batch) ] try: # len(_uploaded) may be < len(items) happen if the items is not random # enough or value of --mid-size is very low compared to --items. _uploaded = storage.upload_items(items) start = time.time() cache = isolateserver.MemoryCache() queue = isolateserver.FetchQueue(storage, cache) for i in items: queue.add(i.digest, i.size) waiting = [i.digest for i in items] while waiting: waiting.remove(queue.wait(waiting)) expected = {i.digest: ''.join(i.content()) for i in items} for d in cache.cached_set(): actual = cache.read(d) assert expected.pop(d) == actual assert not expected, expected duration = max(0, time.time() - start) except isolateserver.MappingError as e: duration = str(e) if isinstance(duration, float): progress.update_item('', index=1, data=size) else: progress.update_item('', index=1) return (duration, size)