def test_cas_batch_update_blobs(mocked, instance): storage = SimpleStorage() cas_instance = ContentAddressableStorageInstance(storage) servicer = ContentAddressableStorageService(server) servicer.add_instance(instance, cas_instance) update_requests = [ re_pb2.BatchUpdateBlobsRequest.Request( digest=re_pb2.Digest(hash=HASH(b'abc').hexdigest(), size_bytes=3), data=b'abc'), re_pb2.BatchUpdateBlobsRequest.Request( digest=re_pb2.Digest(hash="invalid digest!", size_bytes=1000), data=b'wrong data') ] request = re_pb2.BatchUpdateBlobsRequest( instance_name=instance, requests=update_requests) response = servicer.BatchUpdateBlobs(request, context) assert len(response.responses) == 2 for blob_response in response.responses: if blob_response.digest == update_requests[0].digest: assert blob_response.status.code == 0 elif blob_response.digest == update_requests[1].digest: assert blob_response.status.code != 0 else: raise Exception("Unexpected blob response") assert len(storage.data) == 1 assert (update_requests[0].digest.hash, 3) in storage.data assert storage.data[(update_requests[0].digest.hash, 3)] == b'abc'
def test_cas_capabilities(instance): cas = ContentAddressableStorageInstance(None) with serve_capabilities_service([instance], cas_instance=cas) as server: server_interface = ServerInterface(server.remote) response = server_interface.get_capabilities(instance) assert len(response.cache_capabilities.digest_function) == 1 assert response.cache_capabilities.digest_function[0] assert response.cache_capabilities.symlink_absolute_path_strategy assert response.cache_capabilities.max_batch_total_size_bytes
def test_cas_find_missing_blobs(mocked, instance): storage = SimpleStorage([b'abc', b'def']) cas_instance = ContentAddressableStorageInstance(storage) servicer = ContentAddressableStorageService(server) servicer.add_instance(instance, cas_instance) digests = [ re_pb2.Digest(hash=HASH(b'def').hexdigest(), size_bytes=3), re_pb2.Digest(hash=HASH(b'ghij').hexdigest(), size_bytes=4) ] request = re_pb2.FindMissingBlobsRequest( instance_name=instance, blob_digests=digests) response = servicer.FindMissingBlobs(request, context) assert len(response.missing_blob_digests) == 1 assert response.missing_blob_digests[0] == digests[1]
def serve(cls, queue, instances, storage_path): pytest_cov.embed.cleanup_on_sigterm() # Use max_workers default from Python 3.5+ max_workers = (os.cpu_count() or 1) * 5 server = grpc.server(futures.ThreadPoolExecutor(max_workers)) port = server.add_insecure_port('localhost:0') storage = DiskStorage(storage_path) bs_service = ByteStreamService(server) cas_service = ContentAddressableStorageService(server) for name in instances: bs_service.add_instance(name, ByteStreamInstance(storage)) cas_service.add_instance( name, ContentAddressableStorageInstance(storage)) server.start() queue.put(port) signal.pause()
def test_cas_get_tree(mocked, instance): '''Directory Structure: |--root |--subEmptyDir |--subParentDir |--subChildDir ''' root = re_pb2.Digest(hash=HASH(b'abc').hexdigest(), size_bytes=3) rootDir = re_pb2.DirectoryNode(name=b'abc', digest=root) digest1 = re_pb2.Digest(hash=HASH(b'def').hexdigest(), size_bytes=3) subEmptyDir = re_pb2.DirectoryNode(name=b'def', digest=digest1) digest2 = re_pb2.Digest(hash=HASH(b'ghi').hexdigest(), size_bytes=3) subParentDir = re_pb2.DirectoryNode(name=b'ghi', digest=digest2) digest3 = re_pb2.Digest(hash=HASH(b'xyz').hexdigest(), size_bytes=3) subChildDir = re_pb2.DirectoryNode(name=b'xyz', digest=digest3) storage = SimpleStorage({b'abc': [subEmptyDir, subParentDir], b'def': [], b'ghi': [subChildDir], b'xyz': []}) cas_instance = ContentAddressableStorageInstance(storage) servicer = ContentAddressableStorageService(server) servicer.add_instance(instance, cas_instance) request = re_pb2.GetTreeRequest( instance_name=instance, root_digest=root) result = [] for response in servicer.GetTree(request, context): result.extend(response.directories) expectedRoot = re_pb2.Directory() expectedRoot.directories.extend([subEmptyDir, subParentDir]) expectedEmpty = re_pb2.Directory() expectedParent = re_pb2.Directory() expectedParent.directories.extend([subChildDir]) expectedChild = re_pb2.Directory() expected = [expectedRoot, expectedEmpty, expectedParent, expectedChild] assert result == expected
def test_cas_batch_read_blobs(mocked, instance): data = set([b'abc', b'defg', b'hij', b'klmnop']) storage = SimpleStorage(data) cas_instance = ContentAddressableStorageInstance(storage) servicer = ContentAddressableStorageService(server) servicer.add_instance(instance, cas_instance) bloblists_to_request = [ [b'abc', b'defg'], [b'defg', b'missing_blob'], [b'missing_blob'] ] digest_lists = [ [ re_pb2.Digest(hash=HASH(blob).hexdigest(), size_bytes=len(blob)) for blob in bloblist ] for bloblist in bloblists_to_request ] read_requests = [ re_pb2.BatchReadBlobsRequest( instance_name=instance, digests=digest_list ) for digest_list in digest_lists ] for request, bloblist in zip(read_requests, bloblists_to_request): batched_responses = servicer.BatchReadBlobs(request, context) for response, blob in zip(batched_responses.responses, bloblist): if blob in data: assert response.status.code == code_pb2.OK assert response.data == blob else: assert response.status.code == code_pb2.NOT_FOUND
def __new__(cls, storage): return ContentAddressableStorageInstance(storage)