예제 #1
0
 def __init__(self, name=None):
     """
     Creates a message builder that will create a named or anonymous
     message.
     Args:
         name: The name of the message to create. If not provided or
               set to None, the name is set to
               'anonymous_message_XXXXXXXX_XXXX_XXXX_XXXX_XXXXXXXXXXXX',
               where each X is a random hex digit.
     Returns:
         nothing
     """
     self._name = name or 'anonymous_message_{}'.format(generate_guid())
     self._file_descriptor_name = 'schema_containing_{}'.format(self._name)
     self._package = 'bonsai.proto'
     self._full_name = '{}.{}'.format(self._package, self._name)
     self._fields = {}
     self._current_field_name = ''
     self._current_field_type = None
     self._current_field_is_array = False
     self._factory = MessageFactory()
     inkling_file_descriptor = FileDescriptorProto()
     inkling_types_pb2.DESCRIPTOR.CopyToProto(inkling_file_descriptor)
     self._factory.pool.Add(inkling_file_descriptor)
     self._fields_to_resolve = {}
def str_to_schema(raw_schema):
    file_descriptor_proto = descriptor_pb2.FileDescriptorProto.FromString(
        raw_schema)

    descriptor_pool = DescriptorPool()
    descriptor_pool.Add(file_descriptor_proto)

    name = file_descriptor_proto.name
    descriptor_person = descriptor_pool.FindMessageTypeByName(
        'tutorial.Person')
    descriptor_addressbook = descriptor_pool.FindMessageTypeByName(
        'tutorial.AddressBook')
    descriptor_phonenumber = descriptor_pool.FindMessageTypeByName(
        'tutorial.Person.PhoneNumber')

    person_class = MessageFactory().GetPrototype(descriptor_person)
    addressbook_class = MessageFactory().GetPrototype(descriptor_addressbook)
    phonenumber_class = MessageFactory().GetPrototype(descriptor_phonenumber)

    # A different way to extract types from the proto
    # messages_type_dic = message_factory.GetMessages([file_descriptor_proto])
    # # messages_type_dic only contain 'tutorial.Person' and 'tutorial.AddressBook'
    # assert len(messages_type_dic) == 2
    # assert 'tutorial.Person' in messages_type_dic
    # assert 'tutorial.AddressBook' in messages_type_dic
    # person_class = messages_type_dic['tutorial.Person']
    # addressbook_class = messages_type_dic['tutorial.AddressBook']
    # person_instance = person_class()
    # addressbook_instance = addressbook_class()
    # return person_class, addressbook_class

    return person_class, addressbook_class, phonenumber_class
예제 #3
0
 def __init__(self, path):
     self._log_reader = _LogReader(path)
     # header
     header_ = self._log_reader.header
     fd_set = FileDescriptorSet()
     fd_set.ParseFromString(header_.proto)
     self._header = Header(proto=fd_set, types=header_.types)
     # descriptors
     self._pool = DescriptorPool()
     for proto in self._header.proto.file:
         self._pool.Add(proto)
     self._factory = MessageFactory()
예제 #4
0
    def __init__(self, message_type, conf=None):

        # Require use.deprecated.format to be explicitly configured
        # during a transitionary period since old/new format are
        # incompatible.
        if conf is None or 'use.deprecated.format' not in conf:
            raise RuntimeError(
                "ProtobufDeserializer: the 'use.deprecated.format' configuration "
                "property must be explicitly set due to backward incompatibility "
                "with older confluent-kafka-python Protobuf producers and consumers. "
                "See the release notes for more details")

        # handle configuration
        conf_copy = self._default_conf.copy()
        if conf is not None:
            conf_copy.update(conf)

        self._use_deprecated_format = conf_copy.pop('use.deprecated.format')
        if not isinstance(self._use_deprecated_format, bool):
            raise ValueError("use.deprecated.format must be a boolean value")
        if self._use_deprecated_format:
            warnings.warn("ProtobufDeserializer: the 'use.deprecated.format' "
                          "configuration property, and the ability to use the "
                          "old incorrect Protobuf serializer heading format "
                          "introduced in confluent-kafka-python v1.4.0, "
                          "will be removed in an upcoming release in 2022 Q2. "
                          "Please migrate your Python Protobuf producers and "
                          "consumers to 'use.deprecated.format':False as "
                          "soon as possible")

        descriptor = message_type.DESCRIPTOR
        self._msg_index = _create_msg_index(descriptor)
        self._msg_class = MessageFactory().GetPrototype(descriptor)
예제 #5
0
def update_message_classes():
    global message_classes, descriptor_path, method_info
    factory = MessageFactory()
    # Add well-known types first
    for file_descriptor in file_descriptors.values():
        file_proto = FileDescriptorProto()
        file_proto.ParseFromString(file_descriptor.serialized_pb)
        factory.pool.Add(file_proto)
    # Then add our types
    with open(descriptor_path, 'rb') as f:
        fileset = google.protobuf.descriptor_pb2.FileDescriptorSet.FromString(f.read())
    for file_proto in fileset.file:
        factory.pool.Add(file_proto)
    message_classes = factory.GetMessages([file_proto.name for file_proto in fileset.file])

    # HACK to add nested types. Is there an API for this?
    for desc in factory.pool._descriptors.values():
        if desc.full_name not in message_classes:
            message_classes[desc.full_name] = factory.GetPrototype(desc)

    method_info = {}

    for file_proto in fileset.file:
        for service in file_proto.service:
            for method in service.method:
                k = "{}.{}".format(service.name, method.name)
                input_type = method.input_type
                output_type = method.output_type
                if input_type.startswith('.'):
                    input_type = input_type[1:]
                if output_type.startswith('.'):
                    output_type = output_type[1:]
                if input_type not in message_classes or output_type not in message_classes:
                    print("WARNING: types for method {} not found".format(k))
                input_type = message_classes[input_type]
                output_type = message_classes[output_type]

                method_info[k] = (method, input_type, output_type)
예제 #6
0
    def test_get_request_with_different_copy_of_same_message_class(self):
        some_message_clone = MessageFactory(
            self._method.request_type.DESCRIPTOR.file.pool).GetPrototype(
                self._method.request_type.DESCRIPTOR)

        msg = some_message_clone()

        # Protobuf classes obtained with a MessageFactory may or may not be a
        # unique type, but will always use the same descriptor instance.
        self.assertIsInstance(msg, some_message_clone)
        self.assertIs(msg.DESCRIPTOR, self._method.request_type.DESCRIPTOR)

        result = self._method.get_request(msg, {})
        self.assertIs(result, msg)
예제 #7
0
 def __init__(self, message_type):
     descriptor = message_type.DESCRIPTOR
     self._msg_index = _create_msg_index(descriptor)
     self._msg_class = MessageFactory().GetPrototype(descriptor)
예제 #8
0
class MessageBuilder:
    """
    Class used to build protobuf dynamic messages appropriate for
    Python. This builder is intended to be used while traversing
    an Inkling AST, allowing users to set parameters as they are
    discovered in the tree rather than caching names and values
    then calling a single add_field(name,type) operation. For
    testing and ad-hoc purposes, the builder can be used in the
    standard GoF Builder pattern-style popular with Java frameworks.
    >>> x = MessageBuilder('Test')
    >>> Test = (x.with_name('a').with_type(brain_pb2.uint8Type).add_field()
    >>>          .with_name('b').with_type(brain_pb2.stringType).add_field()
    >>>          .with_name('c').with_type(brain_pb2.doubleType).add_field()
    >>>          .build())
    >>> tests = Test()
    >>> tests.a = 42
    >>> tests.b = 'Bonsai Rules!!!!'
    >>> tests.c = 3.14159
    >>> assert(tests.a == 42)
    >>> assert(tests.b == 'Bonsai Rules!!!!')
    >>> assert(tests.c == 3.14159)
    """
    def __init__(self, name=None):
        """
        Creates a message builder that will create a named or anonymous
        message.
        Args:
            name: The name of the message to create. If not provided or
                  set to None, the name is set to
                  'anonymous_message_XXXXXXXX_XXXX_XXXX_XXXX_XXXXXXXXXXXX',
                  where each X is a random hex digit.
        Returns:
            nothing
        """
        self._name = name or 'anonymous_message_{}'.format(generate_guid())
        self._file_descriptor_name = 'schema_containing_{}'.format(self._name)
        self._package = 'bonsai.proto'
        self._full_name = '{}.{}'.format(self._package, self._name)
        self._fields = {}
        self._current_field_name = ''
        self._current_field_type = None
        self._current_field_is_array = False
        self._factory = MessageFactory()
        inkling_file_descriptor = FileDescriptorProto()
        inkling_types_pb2.DESCRIPTOR.CopyToProto(inkling_file_descriptor)
        self._factory.pool.Add(inkling_file_descriptor)
        self._fields_to_resolve = {}

    def as_array(self):
        """
        Marks the current field being added as an array. In Protobuf,
        the field will be a REPEATED field.
        Returns:
            self
        """
        self._current_field_is_array = True
        return self

    def _resolve_composite_schemas(self, descriptor):
        """
        The DescriptorPool in MessageFactory doesn't resolve message
        types for composite schemas (i.e. a Luminance or Matrix schema
        type field in the message). build(), reconstitute(), and
        reconstitute_file() each flag fields in the descriptor marked
        as a TYPE_MESSAGE and caches the type names those fields are
        assigned. Then, after the Descriptor is created, it goes back
        and associates the appropriate structure with those fields.
        Args:
            descriptor: The Descriptor object for the message that
                        needs resolving.
        Returns:
            nothing.
        """
        for field in descriptor.fields:
            if field.type == FieldDescriptor.TYPE_MESSAGE:
                type_name = self._fields_to_resolve[field.name]
                type = self._factory.pool.FindMessageTypeByName(type_name)
                field.message_type = type

    def reconstitute_from_bytes(self, descriptor_proto_bytes):
        """
        Reconstitutes a Python protobuf class from a byte stream. The
        intended purpose of this function is to create a Protobuf
        Python class from a byte stream sent from another service. This
        way, services can define arbitrary data types and send schemas
        for those types to other services.
        Args:
            descriptor_proto_bytes: Serialized protocol buffer describing
                                    a single class
        Returns:
            A Python class for the message encoded in
            descriptor_proto_bytes.
        """
        descriptor_proto = DescriptorProto()
        descriptor_proto.ParseFromString(descriptor_proto_bytes)
        return self.reconstitute(descriptor_proto)

    def reconstitute(self, descriptor_proto):
        """
        Reconstitutes a Python protobuf class from a DescriptorProto
        message. Use this instead of reconstitute_from_bytes if you've
        already got a DescriptorProto message.
        """
        for field in descriptor_proto.field:
            if field.type == FieldDescriptorProto.TYPE_MESSAGE:
                self._fields_to_resolve[field.name] = field.type_name
        descriptor = MakeDescriptor(descriptor_proto, self._package)
        self._resolve_composite_schemas(descriptor)
        return self._factory.GetPrototype(descriptor)

    def reconstitute_file_from_bytes(self, file_descriptor_proto_bytes):
        """
        Reconstitutes one or more Python protobuf classes from a byte
        stream. The intended purpose of this function is to create a
        set of Protobuf Python classes from a byte stream file sent
        from another service. This way, services can define arbitrary
        data types and send schemas for those types to other services.
        Args:
            file_descriptor_proto_bytes: Serialized protocol buffer file
                                         containing one or more messages.

        Returns:
            An array containing each class contained in
            file_descriptor_proto_bytes.
        """
        file_descriptor_proto = FileDescriptorProto()
        file_descriptor_proto.ParseFromString(file_descriptor_proto_bytes)
        return self.reconstitute_file(file_descriptor_proto)

    def reconstitute_file(self, file_descriptor_proto):
        """
        Reconstitutes one or more Python protobuf classes from a
        FileDescriptorProto message. Use this instead of
        reconstitute_file_from_bytes if you've already got a
        FileDescriptorProto message.
        """
        classes = []
        for message_proto in file_descriptor_proto.message_type:
            for field in message_proto.field:
                if field.type == FieldDescriptorProto.TYPE_MESSAGE:
                    self._fields_to_resolve[field.name] = field.type_name
            descriptor = MakeDescriptor(message_proto, self._package)
            self._resolve_composite_schemas(descriptor)
            message_type = self._factory.GetPrototype(descriptor)
            classes.append(message_type)
        return classes
예제 #9
0
 def __init__(self):
     self._message_factory = MessageFactory()
     inkling_file_desc = FileDescriptorProto()
     inkling_types_pb2.DESCRIPTOR.CopyToProto(inkling_file_desc)
     self._message_factory.pool.Add(inkling_file_desc)
예제 #10
0
class InklingMessageFactory(object):
    def __init__(self):
        self._message_factory = MessageFactory()
        inkling_file_desc = FileDescriptorProto()
        inkling_types_pb2.DESCRIPTOR.CopyToProto(inkling_file_desc)
        self._message_factory.pool.Add(inkling_file_desc)

    def message_for_dynamic_message(self, dynamic_msg, desc_proto):
        if desc_proto is None:
            return None
        message = self.new_message_from_proto(desc_proto)
        message.ParseFromString(dynamic_msg)
        return message

    def new_message_from_proto(self, desc_proto):
        # TODO(oren.leiman): in sdk1, this happens once for each
        # DescriptorProto. Profile and consider refactoring.
        package = self._create_package_name(desc_proto)
        desc = self._find_descriptor(desc_proto, package)
        if desc is None:
            raise Exception(
                "new_message_from_proto: unable to find descriptor")

        message_cls = self._message_factory.GetPrototype(desc)
        if message_cls is None:
            raise Exception(
                "new_message_from_proto: unable to get prototype")

        return message_cls()

    def _create_package_name(self, desc_proto):
        if not desc_proto.name:
            desc_proto.name = '__INTERNAL_ANONYMOUS__'
        desc_proto_str = MessageToJson(desc_proto)
        signature = hash(desc_proto_str)
        return 'p{}'.format(signature).replace('-', '_')

    def _find_descriptor(self, desc_proto, package):
        if desc_proto is None:
            return None
        full_name = '{}.{}'.format(package, desc_proto.name)
        pool = self._message_factory.pool
        try:
            return pool.FindMessageTypeByName(full_name)
        except KeyError:
            pass

        proto_name = str(uuid.uuid4())
        proto_path = os.path.join(package, proto_name + '.proto')
        file_desc_proto = FileDescriptorProto()
        file_desc_proto.message_type.add().MergeFrom(desc_proto)
        file_desc_proto.name = proto_path
        file_desc_proto.package = package

        file_desc_proto.dependency.append('bonsai/proto/inkling_types.proto')

        file_desc_proto.public_dependency.append(0)

        pool.Add(file_desc_proto)
        result = pool.FindFileByName(proto_path)
        return result.message_types_by_name[desc_proto.name]
        raw_schema)

    descriptor_pool = DescriptorPool()
    descriptor_pool.Add(file_descriptor_proto)

    # message_descriptors = []
    # for message_type in file_descriptor_proto.message_type:
    #     print(message_type)
    #     # The following line would raise error asking to import
    #     message_descriptors.append(descriptor.MakeDescriptor(message_type))

    name = file_descriptor_proto.name
    file_descriptor = descriptor_pool.FindFileByName(name)
    descriptor1 = descriptor_pool.FindMessageTypeByName('tutorial.Person')
    descriptor2 = descriptor_pool.FindMessageTypeByName('tutorial.AddressBook')
    descriptor3 = descriptor_pool.FindMessageTypeByName(
        'tutorial.Person.PhoneNumber')

    return descriptor1, descriptor2, descriptor3


protobuf_class = addressbook_syntax_proto3_pb2.AddressBook
assert protobuf_class.DESCRIPTOR.full_name == 'tutorial.AddressBook'

schema_str = schema_to_str(protobuf_class.DESCRIPTOR.file)
descriptor1, descriptor2, descriptor3 = str_to_schema(schema_str)

msg_class = MessageFactory().GetPrototype(descriptor1)

msg = msg_class()
print(type(msg))
예제 #12
0
"""Defines a class for building dynamic protobuf messages.
"""
import uuid
import os

from google.protobuf.descriptor_pb2 import FileDescriptorProto
from google.protobuf.descriptor_pb2 import DescriptorProto
from google.protobuf.message_factory import MessageFactory

from bonsai.proto import inkling_types_pb2

# The message factory
_message_factory = MessageFactory()

# (Relying on Python module implementation being thread-safe here...)
# Add our custom inkling types into the message factory pool so
# they are available to the message factory.
_inkling_file_descriptor = FileDescriptorProto()
inkling_types_pb2.DESCRIPTOR.CopyToProto(_inkling_file_descriptor)
_message_factory.pool.Add(_inkling_file_descriptor)


def _create_package_from_fields(descriptor_proto):
    """
    This generates a "package" name from the fields in a descriptor proto.
    :param descriptor_proto: The DescriptorProto object to analyze.
    :return: Unique "hash" of the fields and field types in descriptor_proto.
    """
    elements = (tuple((f.name, f.number, f.label, f.type, f.type_name)
                      for f in descriptor_proto.field) if descriptor_proto else
                ())
예제 #13
0
class LogReader(object):
    """
        File-like interface for binary logs.

        >>> with LogReader("path/to/log/file.gz") as log:
        ...     for item in log.items():
        ...         print(entry.type)
        ...         print(entry.value.some.nested.object)
        ...         print()

        >>> with LogReader("path/to/log/file.gz") as log:
        ...     for value in log.values():
        ...         print(value.some.nested.object)
        ...         print()
    """
    def __init__(self, path):
        self._log_reader = _LogReader(path)
        # header
        header_ = self._log_reader.header
        fd_set = FileDescriptorSet()
        fd_set.ParseFromString(header_.proto)
        self._header = Header(proto=fd_set, types=header_.types)
        # descriptors
        self._pool = DescriptorPool()
        for proto in self._header.proto.file:
            self._pool.Add(proto)
        self._factory = MessageFactory()

    @property
    def path(self):
        """Log path."""
        return self._log_reader.path

    @property
    def header(self):
        """Log header."""
        return self._header

    def __repr__(self):
        return repr(self._log_reader)

    def items(self):
        """Return iterator to log items."""
        this = self

        class Iterator(object):
            def __iter__(self):
                return self

            def next(self):
                return this._next()

        return Iterator()

    def values(self):
        """Return iterator to log values."""
        this = self

        class Iterator(object):
            def __iter__(self):
                return self

            def next(self):
                return this._next().value

        return Iterator()

    def _next(self):
        next_ = self._log_reader.next()
        descriptor = self._pool.FindMessageTypeByName(next_.type)
        value = self._factory.GetPrototype(descriptor)()
        value.ParseFromString(next_.data)
        return LogItem(next_.type, value)

    def read(self):
        """Return None on EOF."""
        try:
            return self._next().value
        except StopIteration:
            return None

    def close(self):
        """Closes LogReader. LogReader will take EOF state."""
        self._log_reader.close()

    def __enter__(self):
        return self

    def __exit__(self, type, value, traceback):
        self.close()