Exemple #1
0
def test_cas_batch_update_blobs(mocked, instance):
    storage = SimpleStorage()

    cas_instance = ContentAddressableStorageInstance(storage)
    servicer = ContentAddressableStorageService(server)
    servicer.add_instance(instance, cas_instance)

    update_requests = [
        re_pb2.BatchUpdateBlobsRequest.Request(
            digest=re_pb2.Digest(hash=HASH(b'abc').hexdigest(), size_bytes=3), data=b'abc'),
        re_pb2.BatchUpdateBlobsRequest.Request(
            digest=re_pb2.Digest(hash="invalid digest!", size_bytes=1000),
            data=b'wrong data')
    ]

    request = re_pb2.BatchUpdateBlobsRequest(
        instance_name=instance, requests=update_requests)
    response = servicer.BatchUpdateBlobs(request, context)
    assert len(response.responses) == 2

    for blob_response in response.responses:
        if blob_response.digest == update_requests[0].digest:
            assert blob_response.status.code == 0

        elif blob_response.digest == update_requests[1].digest:
            assert blob_response.status.code != 0

        else:
            raise Exception("Unexpected blob response")

    assert len(storage.data) == 1
    assert (update_requests[0].digest.hash, 3) in storage.data
    assert storage.data[(update_requests[0].digest.hash, 3)] == b'abc'
Exemple #2
0
def test_disabled_cache_failed_actions(cas, context):
    disabled_failed_actions = ActionCache(cas, 50, True, False)
    with mock.patch.object(service, 'remote_execution_pb2_grpc'):
        ac_service = ActionCacheService(server)
        ac_service.add_instance("", disabled_failed_actions)

    failure_action_digest = remote_execution_pb2.Digest(hash='failure', size_bytes=4)

    # Add a non-zero exit code ActionResult to the cache
    action_result = remote_execution_pb2.ActionResult(stdout_raw=b'Failed', exit_code=1)
    request = remote_execution_pb2.UpdateActionResultRequest(action_digest=failure_action_digest,
                                                             action_result=action_result)
    ac_service.UpdateActionResult(request, context)

    # Check that before adding the ActionResult, attempting to fetch it fails
    request = remote_execution_pb2.GetActionResultRequest(instance_name="",
                                                          action_digest=failure_action_digest)
    ac_service.GetActionResult(request, context)
    context.set_code.assert_called_once_with(grpc.StatusCode.NOT_FOUND)

    success_action_digest = remote_execution_pb2.Digest(hash='success', size_bytes=4)

    # Now add a zero exit code Action result to the cache, and check that fetching
    # it is successful
    success_action_result = remote_execution_pb2.ActionResult(stdout_raw=b'Successful')
    request = remote_execution_pb2.UpdateActionResultRequest(action_digest=success_action_digest,
                                                             action_result=success_action_result)
    ac_service.UpdateActionResult(request, context)
    request = remote_execution_pb2.GetActionResultRequest(instance_name="",
                                                          action_digest=success_action_digest)
    fetched_result = ac_service.GetActionResult(request, context)
    assert fetched_result.stdout_raw == success_action_result.stdout_raw
Exemple #3
0
def test_checks_cas(acType, cas):
    if acType == 'memory':
        cache = ActionCache(cas, 50)
    elif acType == 's3':
        auth_args = {
            "aws_access_key_id": "access_key",
            "aws_secret_access_key": "secret_key"
        }
        boto3.resource('s3', **auth_args).create_bucket(Bucket='cachebucket')
        cache = S3ActionCache(cas,
                              allow_updates=True,
                              cache_failed_actions=True,
                              bucket='cachebucket',
                              access_key="access_key",
                              secret_key="secret_key")

    action_digest1 = remote_execution_pb2.Digest(hash='alpha', size_bytes=4)
    action_digest2 = remote_execution_pb2.Digest(hash='bravo', size_bytes=4)
    action_digest3 = remote_execution_pb2.Digest(hash='charlie', size_bytes=4)

    # Create a tree that actions digests in CAS
    sample_digest = cas.put_message(
        remote_execution_pb2.Command(arguments=["sample"]))
    tree = remote_execution_pb2.Tree()
    tree.root.files.add().digest.CopyFrom(sample_digest)
    tree.children.add().files.add().digest.CopyFrom(sample_digest)
    tree_digest = cas.put_message(tree)

    # Add an ActionResult that actions real digests to the cache
    action_result1 = remote_execution_pb2.ActionResult()
    action_result1.output_directories.add().tree_digest.CopyFrom(tree_digest)
    action_result1.output_files.add().digest.CopyFrom(sample_digest)
    action_result1.stdout_digest.CopyFrom(sample_digest)
    action_result1.stderr_digest.CopyFrom(sample_digest)
    cache.update_action_result(action_digest1, action_result1)

    # Add ActionResults that action fake digests to the cache
    action_result2 = remote_execution_pb2.ActionResult()
    action_result2.output_directories.add().tree_digest.hash = "nonexistent"
    action_result2.output_directories[0].tree_digest.size_bytes = 8
    cache.update_action_result(action_digest2, action_result2)

    action_result3 = remote_execution_pb2.ActionResult()
    action_result3.stdout_digest.hash = "nonexistent"
    action_result3.stdout_digest.size_bytes = 8
    cache.update_action_result(action_digest3, action_result3)

    # Verify we can get the first ActionResult but not the others
    fetched_result1 = cache.get_action_result(action_digest1)
    assert fetched_result1.output_directories[
        0].tree_digest.hash == tree_digest.hash
    with pytest.raises(NotFoundError):
        cache.get_action_result(action_digest2)
        cache.get_action_result(action_digest3)
Exemple #4
0
def test_cas_find_missing_blobs(mocked, instance):
    storage = SimpleStorage([b'abc', b'def'])
    cas_instance = ContentAddressableStorageInstance(storage)
    servicer = ContentAddressableStorageService(server)
    servicer.add_instance(instance, cas_instance)

    digests = [
        re_pb2.Digest(hash=HASH(b'def').hexdigest(), size_bytes=3),
        re_pb2.Digest(hash=HASH(b'ghij').hexdigest(), size_bytes=4)
    ]
    request = re_pb2.FindMissingBlobsRequest(
        instance_name=instance, blob_digests=digests)
    response = servicer.FindMissingBlobs(request, context)
    assert len(response.missing_blob_digests) == 1
    assert response.missing_blob_digests[0] == digests[1]
Exemple #5
0
    def write(self, digest_hash, digest_size, first_block, other_blocks):
        if len(digest_hash) != HASH_LENGTH or not digest_size.isdigit():
            raise InvalidArgumentError("Invalid digest [{}/{}]"
                                       .format(digest_hash, digest_size))

        digest = re_pb2.Digest(hash=digest_hash, size_bytes=int(digest_size))

        write_session = self.__storage.begin_write(digest)

        # Start the write session and write the first request's data.
        write_session.write(first_block)

        computed_hash = HASH(first_block)
        bytes_written = len(first_block)

        # Handle subsequent write requests.
        for next_block in other_blocks:
            write_session.write(next_block)

            computed_hash.update(next_block)
            bytes_written += len(next_block)

        # Check that the data matches the provided digest.
        if bytes_written != digest.size_bytes:
            raise NotImplementedError(
                "Cannot close stream before finishing write")

        elif computed_hash.hexdigest() != digest.hash:
            raise InvalidArgumentError("Data does not match hash")

        self.__storage.commit_write(digest, write_session)

        return bytestream_pb2.WriteResponse(committed_size=bytes_written)
Exemple #6
0
def test_upload_tree(instance, directory_paths):
    # Actual test function, to be run in a subprocess:
    def __test_upload_tree(queue, remote, instance, directory_paths):
        # Open a channel to the remote CAS server:
        channel = grpc.insecure_channel(remote)

        digests = []
        with upload(channel, instance) as uploader:
            if len(directory_paths) > 1:
                for directory_path in directory_paths:
                    digest = uploader.upload_tree(directory_path, queue=True)
                    digests.append(digest.SerializeToString())
            else:
                digest = uploader.upload_tree(directory_paths[0], queue=False)
                digests.append(digest.SerializeToString())

        queue.put(digests)

    # Start a minimal CAS server in a subprocess:
    with serve_cas([instance]) as server:
        digests = run_in_subprocess(__test_upload_tree, server.remote,
                                    instance, directory_paths)

        for directory_path, digest_blob in zip(directory_paths, digests):
            digest = remote_execution_pb2.Digest()
            digest.ParseFromString(digest_blob)

            assert server.has(digest)

            tree = remote_execution_pb2.Tree()
            tree.ParseFromString(server.get(digest))

            directory_digest = create_digest(tree.root.SerializeToString())

            assert server.compare_directories(directory_digest, directory_path)
Exemple #7
0
def test_upload_message(instance, messages):
    # Actual test function, to be run in a subprocess:
    def __test_upload_message(queue, remote, instance, messages):
        # Open a channel to the remote CAS server:
        channel = grpc.insecure_channel(remote)

        digests = []
        with upload(channel, instance) as uploader:
            if len(messages) > 1:
                for message in messages:
                    digest = uploader.put_message(message, queue=True)
                    digests.append(digest.SerializeToString())
            else:
                digest = uploader.put_message(messages[0], queue=False)
                digests.append(digest.SerializeToString())

        queue.put(digests)

    # Start a minimal CAS server in a subprocess:
    with serve_cas([instance]) as server:
        digests = run_in_subprocess(__test_upload_message, server.remote,
                                    instance, messages)

        for message, digest_blob in zip(messages, digests):
            digest = remote_execution_pb2.Digest()
            digest.ParseFromString(digest_blob)

            assert server.has(digest)
            assert server.compare_messages(digest, message)
Exemple #8
0
def test_simple_action_result(cache_instances, context):
    with mock.patch.object(service, 'remote_execution_pb2_grpc'):
        ac_service = ActionCacheService(server)

    for k, v in cache_instances.items():
        ac_service.add_instance(k, v)

    action_digest = remote_execution_pb2.Digest(hash='sample', size_bytes=4)

    # Check that before adding the ActionResult, attempting to fetch it fails
    request = remote_execution_pb2.GetActionResultRequest(instance_name="",
                                                          action_digest=action_digest)
    ac_service.GetActionResult(request, context)
    context.set_code.assert_called_once_with(grpc.StatusCode.NOT_FOUND)

    # Add an ActionResult to the cache
    action_result = remote_execution_pb2.ActionResult(stdout_raw=b'example output')
    request = remote_execution_pb2.UpdateActionResultRequest(action_digest=action_digest,
                                                             action_result=action_result)
    ac_service.UpdateActionResult(request, context)

    # Check that fetching it now works
    request = remote_execution_pb2.GetActionResultRequest(action_digest=action_digest)
    fetched_result = ac_service.GetActionResult(request, context)
    assert fetched_result.stdout_raw == action_result.stdout_raw
Exemple #9
0
    def read(self, digest_hash, digest_size, read_offset, read_limit):
        if len(digest_hash) != HASH_LENGTH or not digest_size.isdigit():
            raise InvalidArgumentError("Invalid digest [{}/{}]"
                                       .format(digest_hash, digest_size))

        digest = re_pb2.Digest(hash=digest_hash, size_bytes=int(digest_size))

        # Check the given read offset and limit.
        if read_offset < 0 or read_offset > digest.size_bytes:
            raise OutOfRangeError("Read offset out of range")

        elif read_limit == 0:
            bytes_remaining = digest.size_bytes - read_offset

        elif read_limit > 0:
            bytes_remaining = read_limit

        else:
            raise InvalidArgumentError("Negative read_limit is invalid")

        # Read the blob from storage and send its contents to the client.
        result = self.__storage.get_blob(digest)
        if result is None:
            raise NotFoundError("Blob not found")

        elif result.seekable():
            result.seek(read_offset)

        else:
            result.read(read_offset)

        while bytes_remaining > 0:
            yield bytestream_pb2.ReadResponse(
                data=result.read(min(self.BLOCK_SIZE, bytes_remaining)))
            bytes_remaining -= self.BLOCK_SIZE
Exemple #10
0
def test_update_leases_with_work(bot_session, context, instance):
    request = bots_pb2.CreateBotSessionRequest(parent='',
                                               bot_session=bot_session)

    action_digest = remote_execution_pb2.Digest(hash='gaff')
    _inject_work(instance._instances[""]._scheduler,
                 action_digest=action_digest)

    response = instance.CreateBotSession(request, context)

    assert len(response.leases) == 1
    response_action = remote_execution_pb2.Digest()
    response.leases[0].payload.Unpack(response_action)

    assert isinstance(response, bots_pb2.BotSession)
    assert response.leases[0].state == LeaseState.PENDING.value
    assert response_action == action_digest
Exemple #11
0
def _digestified_range(max):
    """ Generator for digests for bytestring representations of all numbers in
    the range [0, max)
    """
    for i in range(max):
        blob = bytes(i)
        yield remote_execution_pb2.Digest(hash=HASH(blob).hexdigest(),
                                          size_bytes=len(blob))
Exemple #12
0
def test_null_cas_action_cache(cas):
    cache = ActionCache(cas, 0)

    action_digest1 = remote_execution_pb2.Digest(hash='alpha', size_bytes=4)
    dummy_result = remote_execution_pb2.ActionResult()

    cache.update_action_result(action_digest1, dummy_result)
    with pytest.raises(NotFoundError):
        cache.get_action_result(action_digest1)
Exemple #13
0
    def __test_get_missing():
        with serve_cache(['testing']) as server:
            channel = grpc.insecure_channel(server.remote)
            cache = RemoteActionCache(channel, 'testing')

            action_digest = remote_execution_pb2.Digest(hash='alpha',
                                                        size_bytes=4)
            with pytest.raises(NotFoundError):
                cache.get_action_result(action_digest)
Exemple #14
0
    def __test_update_disallowed():
        with serve_cache(['testing'], allow_updates=False) as server:
            channel = grpc.insecure_channel(server.remote)
            cache = RemoteActionCache(channel, 'testing')

            action_digest = remote_execution_pb2.Digest(hash='alpha',
                                                        size_bytes=4)
            result = remote_execution_pb2.ActionResult()
            with pytest.raises(NotImplementedError,
                               match='Updating cache not allowed'):
                cache.update_action_result(action_digest, result)
Exemple #15
0
def test_expiry(cas):
    cache = ActionCache(cas, 2)

    action_digest1 = remote_execution_pb2.Digest(hash='alpha', size_bytes=4)
    action_digest2 = remote_execution_pb2.Digest(hash='bravo', size_bytes=4)
    action_digest3 = remote_execution_pb2.Digest(hash='charlie', size_bytes=4)
    dummy_result = remote_execution_pb2.ActionResult()

    cache.update_action_result(action_digest1, dummy_result)
    cache.update_action_result(action_digest2, dummy_result)

    # Get digest 1 (making 2 the least recently used)
    assert cache.get_action_result(action_digest1) is not None
    # Add digest 3 (so 2 gets removed from the cache)
    cache.update_action_result(action_digest3, dummy_result)

    assert cache.get_action_result(action_digest1) is not None
    with pytest.raises(NotFoundError):
        cache.get_action_result(action_digest2)

    assert cache.get_action_result(action_digest3) is not None
Exemple #16
0
def test_unmet_platform_requirements(bot_session, context, instance):
    request = bots_pb2.CreateBotSessionRequest(parent='',
                                               bot_session=bot_session)

    action_digest = remote_execution_pb2.Digest(hash='gaff')
    _inject_work(instance._instances[""]._scheduler,
                 action_digest=action_digest,
                 platform_requirements={'OSFamily': set('wonderful-os')})

    response = instance.CreateBotSession(request, context)

    assert len(response.leases) == 0
Exemple #17
0
    def __test_update():
        with serve_cache(['testing']) as server:
            channel = grpc.insecure_channel(server.remote)
            cache = RemoteActionCache(channel, 'testing')

            action_digest = remote_execution_pb2.Digest(hash='alpha',
                                                        size_bytes=4)
            result = remote_execution_pb2.ActionResult()
            cache.update_action_result(action_digest, result)

            fetched = cache.get_action_result(action_digest)
            assert result == fetched
Exemple #18
0
def create_digest(bytes_to_digest):
    """Computes the :obj:`Digest` of a piece of data.

    The :obj:`Digest` of a data is a function of its hash **and** size.

    Args:
        bytes_to_digest (bytes): byte data to digest.

    Returns:
        :obj:`Digest`: The :obj:`Digest` for the given byte data.
    """
    return remote_execution_pb2.Digest(hash=HASH(bytes_to_digest).hexdigest(),
                                       size_bytes=len(bytes_to_digest))
Exemple #19
0
def _inject_work(scheduler,
                 action=None,
                 action_digest=None,
                 platform_requirements=None):
    if not action:
        action = remote_execution_pb2.Action()

    if not action_digest:
        action_digest = remote_execution_pb2.Digest()

    scheduler.queue_job_action(action,
                               action_digest,
                               platform_requirements,
                               skip_cache_lookup=True)
Exemple #20
0
def test_unhealthy_bot(bot_session, context, instance):
    # set botstatus to unhealthy
    bot_session.status = BotStatus.UNHEALTHY.value
    request = bots_pb2.CreateBotSessionRequest(parent='',
                                               bot_session=bot_session)

    action_digest = remote_execution_pb2.Digest(hash='gaff')
    _inject_work(instance._instances[""]._scheduler,
                 action_digest=action_digest)

    response = instance.CreateBotSession(request, context)

    # No leases should be given
    assert len(response.leases) == 0
Exemple #21
0
def test_cas_get_tree(mocked, instance):
    '''Directory Structure:
        |--root
           |--subEmptyDir
           |--subParentDir
              |--subChildDir
    '''
    root = re_pb2.Digest(hash=HASH(b'abc').hexdigest(), size_bytes=3)
    rootDir = re_pb2.DirectoryNode(name=b'abc', digest=root)
    digest1 = re_pb2.Digest(hash=HASH(b'def').hexdigest(), size_bytes=3)
    subEmptyDir = re_pb2.DirectoryNode(name=b'def', digest=digest1)
    digest2 = re_pb2.Digest(hash=HASH(b'ghi').hexdigest(), size_bytes=3)
    subParentDir = re_pb2.DirectoryNode(name=b'ghi', digest=digest2)
    digest3 = re_pb2.Digest(hash=HASH(b'xyz').hexdigest(), size_bytes=3)
    subChildDir = re_pb2.DirectoryNode(name=b'xyz', digest=digest3)

    storage = SimpleStorage({b'abc': [subEmptyDir, subParentDir], b'def': [],
                            b'ghi': [subChildDir], b'xyz': []})
    cas_instance = ContentAddressableStorageInstance(storage)
    servicer = ContentAddressableStorageService(server)
    servicer.add_instance(instance, cas_instance)

    request = re_pb2.GetTreeRequest(
        instance_name=instance, root_digest=root)
    result = []
    for response in servicer.GetTree(request, context):
        result.extend(response.directories)

    expectedRoot = re_pb2.Directory()
    expectedRoot.directories.extend([subEmptyDir, subParentDir])
    expectedEmpty = re_pb2.Directory()
    expectedParent = re_pb2.Directory()
    expectedParent.directories.extend([subChildDir])
    expectedChild = re_pb2.Directory()

    expected = [expectedRoot, expectedEmpty, expectedParent, expectedChild]
    assert result == expected
Exemple #22
0
    def _send_blob(self, blob, digest=None):
        """Sends a memory block using ByteStream.Write()"""
        blob_digest = remote_execution_pb2.Digest()
        if digest is not None:
            blob_digest.CopyFrom(digest)
        else:
            blob_digest.hash = HASH(blob).hexdigest()
            blob_digest.size_bytes = len(blob)
        if self.instance_name:
            resource_name = '/'.join([
                self.instance_name, 'uploads', self.u_uid, 'blobs',
                blob_digest.hash,
                str(blob_digest.size_bytes)
            ])
        else:
            resource_name = '/'.join([
                'uploads', self.u_uid, 'blobs', blob_digest.hash,
                str(blob_digest.size_bytes)
            ])

        def __write_request_stream(resource, content):
            offset = 0
            finished = False
            remaining = len(content)
            while not finished:
                chunk_size = min(remaining, MAX_REQUEST_SIZE)
                remaining -= chunk_size

                request = bytestream_pb2.WriteRequest()
                request.resource_name = resource
                request.data = content[offset:offset + chunk_size]
                request.write_offset = offset
                request.finish_write = remaining <= 0

                yield request

                offset += chunk_size
                finished = request.finish_write

        write_resquests = __write_request_stream(resource_name, blob)
        # TODO: Handle connection loss/recovery using QueryWriteStatus()
        try:
            write_response = self.__bytestream_stub.Write(write_resquests)
        except grpc.RpcError as e:
            raise ConnectionError(e.details())

        assert write_response.committed_size == blob_digest.size_bytes

        return blob_digest
Exemple #23
0
    def __test_remote_nonexistent_read(queue, remote, serialized_digests):
        channel = grpc.insecure_channel(remote)
        remote_storage = RemoteStorage(channel, 'testing')
        digests = []

        for data in serialized_digests:
            digest = remote_execution_pb2.Digest()
            digest.ParseFromString(data)
            digests.append(digest)

        try:
            __test_nonexistent_read(remote_storage, digests)
        except AssertionError:
            queue.put(False)
        else:
            queue.put(True)
Exemple #24
0
def test_disabled_update_result(context):
    disabled_push = ReferenceCache(cas, 50, False)
    keys = ["rick", "roy", "rach"]

    with mock.patch.object(service, 'buildstream_pb2_grpc'):
        instance = ReferenceStorageService(server)
        instance.add_instance(instance_name, disabled_push)

    # Add an ReferenceResult to the cache
    reference_result = remote_execution_pb2.Digest(hash='deckard')
    request = buildstream_pb2.UpdateReferenceRequest(keys=keys,
                                                     digest=reference_result)
    instance.UpdateReference(request, context)

    request = buildstream_pb2.UpdateReferenceRequest()
    instance.UpdateReference(request, context)

    context.set_code.assert_called_once_with(grpc.StatusCode.UNIMPLEMENTED)
Exemple #25
0
    def bulk_update_blobs(self, blobs):
        sent_digests = []
        with upload(self.channel, instance=self.instance_name) as uploader:
            for digest, blob in blobs:
                if len(blob) != digest.size_bytes or HASH(
                        blob).hexdigest() != digest.hash:
                    sent_digests.append(remote_execution_pb2.Digest())
                else:
                    sent_digests.append(
                        uploader.put_blob(blob, digest=digest, queue=True))

        assert len(sent_digests) == len(blobs)

        return [
            status_pb2.Status(code=code_pb2.OK)
            if d.ByteSize() > 0 else status_pb2.Status(code=code_pb2.UNKNOWN)
            for d in sent_digests
        ]
Exemple #26
0
def test_simple_result(instance, context):
    keys = ["rick", "roy", "rach"]

    # Check that before adding the ReferenceResult, attempting to fetch it fails
    request = buildstream_pb2.GetReferenceRequest(key=keys[0])
    instance.GetReference(request, context)
    context.set_code.assert_called_once_with(grpc.StatusCode.NOT_FOUND)

    # Add an ReferenceResult to the cache
    reference_result = remote_execution_pb2.Digest(hash='deckard')
    request = buildstream_pb2.UpdateReferenceRequest(keys=keys,
                                                     digest=reference_result)
    instance.UpdateReference(request, context)

    # Check that fetching it now works
    for key in keys:
        request = buildstream_pb2.GetReferenceRequest(key=key)
        fetched_result = instance.GetReference(request, context)
        assert fetched_result.digest == reference_result
Exemple #27
0
def parse_digest(digest_string):
    """Creates a :obj:`Digest` from a digest string.

    A digest string should alway be: ``{hash}/{size_bytes}``.

    Args:
        digest_string (str): the digest string.

    Returns:
        :obj:`Digest`: The :obj:`Digest` read from the string or None if
            `digest_string` is not a valid digest string.
    """
    digest_hash, digest_size = digest_string.split('/')

    if len(digest_hash) == HASH_LENGTH and digest_size.isdigit():
        return remote_execution_pb2.Digest(hash=digest_hash,
                                           size_bytes=int(digest_size))

    return None
Exemple #28
0
def test_browser_url_initialization(instance, digest_hash, digest_size):
    # Initialize and generate a browser compatible URL:
    browser_url = BrowserURL(BASE_URL, instance)
    browser_digest = remote_execution_pb2.Digest(hash=digest_hash,
                                                 size_bytes=digest_size)

    assert browser_url.generate() is None
    assert browser_url.for_message('type', browser_digest)
    assert not browser_url.for_message(None, None)

    url = browser_url.generate()

    assert url is not None

    parsed_url = urlparse(url)

    if instance:
        assert parsed_url.path.find(instance)
    assert parsed_url.path.find('type') > 0
    assert parsed_url.path.find(digest_hash) > 0
    assert parsed_url.path.find(str(digest_size)) > 0
Exemple #29
0
def test_deletes(any_storage, blobs_digests):
    """ Test the functionality of deletes.

    Deleting a blob should cause has_blob to return False and
    get_blob to return None.
    """
    blobs, digests = blobs_digests
    # any_storage returns a string for remote storage. Since deletes
    # are not supported with remote storage, we ignore those
    if isinstance(any_storage, StorageABC):
        for blob, digest in zip(blobs, digests):
            write(any_storage, digest, blob)

        for blob, digest in zip(blobs, digests):
            assert any_storage.has_blob(digest)
            assert any_storage.get_blob(digest).read() == blob

        first_digest, *_ = digests

        any_storage.delete_blob(first_digest)

        for blob, digest in zip(blobs, digests):
            if digest != first_digest:
                assert any_storage.has_blob(digest)
                assert any_storage.get_blob(digest).read() == blob
            else:
                assert not any_storage.has_blob(digest)
                assert any_storage.get_blob(digest) is None

        # There shouldn't be any issue with deleting a blob that isn't there
        missing_digest = remote_execution_pb2.Digest(
            hash=HASH(b'missing_blob').hexdigest(),
            size_bytes=len(b'missing_blob'))
        assert not any_storage.has_blob(missing_digest)
        assert any_storage.get_blob(missing_digest) is None
        any_storage.delete_blob(missing_digest)
        assert not any_storage.has_blob(missing_digest)
        assert any_storage.get_blob(missing_digest) is None
Exemple #30
0
def test_cas_batch_read_blobs(mocked, instance):
    data = set([b'abc', b'defg', b'hij', b'klmnop'])
    storage = SimpleStorage(data)

    cas_instance = ContentAddressableStorageInstance(storage)
    servicer = ContentAddressableStorageService(server)
    servicer.add_instance(instance, cas_instance)

    bloblists_to_request = [
        [b'abc', b'defg'],
        [b'defg', b'missing_blob'],
        [b'missing_blob']
    ]

    digest_lists = [
        [
            re_pb2.Digest(hash=HASH(blob).hexdigest(), size_bytes=len(blob))
            for blob in bloblist
        ]
        for bloblist in bloblists_to_request
    ]

    read_requests = [
        re_pb2.BatchReadBlobsRequest(
            instance_name=instance, digests=digest_list
        )
        for digest_list in digest_lists
    ]

    for request, bloblist in zip(read_requests, bloblists_to_request):
        batched_responses = servicer.BatchReadBlobs(request, context)
        for response, blob in zip(batched_responses.responses, bloblist):
            if blob in data:
                assert response.status.code == code_pb2.OK
                assert response.data == blob
            else:
                assert response.status.code == code_pb2.NOT_FOUND