def test_address_subpackage_no_version(): addr = metadata.Address( package=('foo', 'bar', 'baz', 'spam', 'eggs'), api_naming=naming.NewNaming(proto_package='foo.bar.baz'), ) assert addr.subpackage == ('spam', 'eggs')
def test_address_subpackage_empty(): addr = metadata.Address( package=('foo', 'bar', 'baz', 'v1'), api_naming=naming.NewNaming(proto_package='foo.bar.baz.v1'), ) assert addr.subpackage == ()
def test_address_rel(): addr = metadata.Address(package=('foo', 'bar'), module='baz', name='Bacon') assert addr.rel( metadata.Address(package=('foo', 'bar'), module='baz'), ) == "'Bacon'"
def test_address_resolve(): addr = metadata.Address(package=('foo', 'bar'), module='baz', name='Qux') assert addr.resolve('Bacon') == 'foo.bar.Bacon' assert addr.resolve('foo.bar.Bacon') == 'foo.bar.Bacon' assert addr.resolve('google.example.Bacon') == 'google.example.Bacon'
def test_address_proto(): addr = metadata.Address(package=('foo', 'bar'), module='baz', name='Bacon') assert addr.proto == 'foo.bar.Bacon' assert addr.proto_package == 'foo.bar'
def test_address_child_no_parent(): addr = metadata.Address(package=('foo', 'bar'), module='baz') child = addr.child('Bacon', path=(4, 0)) assert child.name == 'Bacon' assert child.parent == () assert child.module_path == (4, 0)
def test_address_str(): addr = metadata.Address(package=('foo', 'bar'), module='baz', name='Bacon') assert str(addr) == 'baz.Bacon'
def test_address_str_parent(): addr = metadata.Address(package=('foo', 'bar'), module='baz', name='Bacon', parent=('spam', 'eggs')) assert str(addr) == 'baz.spam.eggs.Bacon'
def test_address_rel_other(): addr = metadata.Address(package=('foo', 'bar'), module='baz', name='Bacon') assert addr.rel(metadata.Address(package=('foo', 'not_bar'), module='baz'), ) == 'baz.Bacon' assert addr.rel(metadata.Address(package=('foo', 'bar'), module='not_baz'), ) == 'baz.Bacon'
def test_address_str_no_parent(): addr = metadata.Address(package=('foo', 'bar'), module='baz') assert str(addr) == 'foo.bar'
def __init__( self, file_descriptor: descriptor_pb2.FileDescriptorProto, file_to_generate: bool, naming: api_naming.Naming, opts: Options = Options(), prior_protos: Mapping[str, Proto] = None, load_services: bool = True, all_resources: Optional[Mapping[str, wrappers.MessageType]] = None, ): self.proto_messages: Dict[str, wrappers.MessageType] = {} self.proto_enums: Dict[str, wrappers.EnumType] = {} self.proto_services: Dict[str, wrappers.Service] = {} self.file_descriptor = file_descriptor self.file_to_generate = file_to_generate self.prior_protos = prior_protos or {} self.opts = opts # Iterate over the documentation and place it into a dictionary. # # The comments in protocol buffers are sorted by a concept called # the "path", which is a sequence of integers described in more # detail below; this code simply shifts from a list to a dict, # with tuples of paths as the dictionary keys. self.docs: Dict[Tuple[int, ...], descriptor_pb2.SourceCodeInfo.Location] = {} for location in file_descriptor.source_code_info.location: self.docs[tuple(location.path)] = location # Everything has an "address", which is the proto where the thing # was declared. # # We put this together by a baton pass of sorts: everything in # this file *starts with* this address, which is appended to # for each item as it is loaded. self.address = metadata.Address( api_naming=naming, module=file_descriptor.name.split('/')[-1][:-len('.proto')], package=tuple(file_descriptor.package.split('.')), ) # Now iterate over the FileDescriptorProto and pull out each of # the messages, enums, and services. # # The hard-coded path keys sent here are based on how descriptor.proto # works; it uses the proto message number of the pieces of each # message (e.g. the hard-code `4` for `message_type` immediately # below is because `repeated DescriptorProto message_type = 4;` in # descriptor.proto itself). self._load_children(file_descriptor.enum_type, self._load_enum, address=self.address, path=(5, ), resources=all_resources or {}) self._load_children(file_descriptor.message_type, self._load_message, address=self.address, path=(4, ), resources=all_resources or {}) # Edge case: Protocol buffers is not particularly picky about # ordering, and it is possible that a message will have had a field # referencing another message which appears later in the file # (or itself, recursively). # # In this situation, we would not have come across the message yet, # and the field would have its original textual reference to the # message (`type_name`) but not its resolved message wrapper. orphan_field_gen = ( (field.type_name.lstrip('.'), field) for message in self.proto_messages.values() for field in message.fields.values() if field.type_name and not (field.message or field.enum)) for key, field in orphan_field_gen: maybe_msg_type = self.proto_messages.get(key) maybe_enum_type = self.proto_enums.get(key) if maybe_msg_type: object.__setattr__(field, 'message', maybe_msg_type) elif maybe_enum_type: object.__setattr__(field, 'enum', maybe_enum_type) else: raise TypeError(f"Unknown type referenced in " f"{self.file_descriptor.name}: '{key}'") # Only generate the service if this is a target file to be generated. # This prevents us from generating common services (e.g. LRO) when # they are being used as an import just to get types declared in the # same files. if file_to_generate and load_services: self._load_children(file_descriptor.service, self._load_service, address=self.address, path=(6, ), resources=all_resources or {})
def test_address_str_different_proto_package(): addr = metadata.Address(package=('google', 'iam', 'v1'), module='options', name='GetPolicyOptions', api_naming=naming.NewNaming(proto_package='foo.bar.baz.v1')) assert str(addr) == 'options_pb2.GetPolicyOptions'
def __init__(self, file_descriptor: descriptor_pb2.FileDescriptorProto, file_to_generate: bool, prior_protos: Mapping[str, Proto] = None): self.messages = {} self.enums = {} self.services = {} self.file_descriptor = file_descriptor self.file_to_generate = file_to_generate self.prior_protos = prior_protos or {} # Iterate over the documentation and place it into a dictionary. # # The comments in protocol buffers are sorted by a concept called # the "path", which is a sequence of integers described in more # detail below; this code simply shifts from a list to a dict, # with tuples of paths as the dictionary keys. self.docs = {} for location in file_descriptor.source_code_info.location: self.docs[tuple(location.path)] = location # Everything has an "address", which is the proto where the thing # was declared. # # We put this together by a baton pass of sorts: everything in # this file *starts with* this address, which is appended to # for each item as it is loaded. address = metadata.Address( module=file_descriptor.name.split('/')[-1][:-len('.proto')], package=file_descriptor.package.split('.'), ) # Now iterate over the FileDescriptorProto and pull out each of # the messages, enums, and services. # # The hard-coded path keys sent here are based on how descriptor.proto # works; it uses the proto message number of the pieces of each # message (e.g. the hard-code `4` for `message_type` immediately # below is because `repeated DescriptorProto message_type = 4;` in # descriptor.proto itself). self._load_children(file_descriptor.enum_type, self._load_enum, address=address, path=(5, )) self._load_children(file_descriptor.message_type, self._load_message, address=address, path=(4, )) # Edge case: Protocol buffers is not particularly picky about # ordering, and it is possible that a message will have had a field # referencing another message which appears later in the file # (or itself, recursively). # # In this situation, we would not have come across the message yet, # and the field would have its original textual reference to the # message (`type_name`) but not its resolved message wrapper. for message in self.messages.values(): for field in message.fields.values(): if field.type_name and not any((field.message, field.enum)): object.__setattr__( field, 'message', self.messages[field.type_name.lstrip('.')], ) # Only generate the service if this is a target file to be generated. # This prevents us from generating common services (e.g. LRO) when # they are being used as an import just to get types declared in the # same files. if file_to_generate: self._load_children(file_descriptor.service, self._load_service, address=address, path=(6, ))