def test_mock_value_original_type_message(): subfields = collections.OrderedDict(( ('foo', make_field(name='foo', type='TYPE_INT32')), ('bar', make_field(name='bar', type='TYPE_STRING')) )) message = wrappers.MessageType( fields=subfields, message_pb=descriptor_pb2.DescriptorProto(name='Message', field=[ i.field_pb for i in subfields.values() ]), meta=metadata.Metadata(address=metadata.Address( module='bogus', name='Message', )), nested_enums={}, nested_messages={}, ) field = make_field( type='TYPE_MESSAGE', type_name='bogus.Message', message=message, ) mock = field.mock_value_original_type assert mock == {"foo": 324, "bar": "bar_value"} # Messages by definition aren't primitive with pytest.raises(TypeError): field.primitive_mock() # Special case for map entries entry_msg = make_message( name='MessageEntry', fields=( make_field(name='key', type='TYPE_STRING'), make_field(name='value', type='TYPE_STRING'), ), options=descriptor_pb2.MessageOptions(map_entry=True), ) entry_field = make_field( name="messages", type_name="stuff.MessageEntry", message=entry_msg, label=3, type='TYPE_MESSAGE', ) assert entry_field.mock_value_original_type == {}
def test_type_message(): T = descriptor_pb2.FieldDescriptorProto.Type message = wrappers.MessageType( fields={}, nested_messages={}, nested_enums={}, message_pb=descriptor_pb2.DescriptorProto(), ) field = make_field( type=T.Value('TYPE_MESSAGE'), type_name='bogus.Message', message=message, ) assert field.type == message
def testCopyToProto_TypeError(self): file_proto = descriptor_pb2.FileDescriptorProto() self.assertRaises(TypeError, unittest_pb2.TestEmptyMessage.DESCRIPTOR.CopyToProto, file_proto) self.assertRaises(TypeError, unittest_pb2.ForeignEnum.DESCRIPTOR.CopyToProto, file_proto) self.assertRaises(TypeError, unittest_pb2.TestService.DESCRIPTOR.CopyToProto, file_proto) proto = descriptor_pb2.DescriptorProto() self.assertRaises(TypeError, unittest_import_pb2.DESCRIPTOR.CopyToProto, proto)
def test_field_not_primitive(): Type = descriptor_pb2.FieldDescriptorProto.Type message = wrappers.MessageType( fields={}, nested_messages={}, nested_enums={}, message_pb=descriptor_pb2.DescriptorProto(), ) non_primitive_field = make_field( type=Type.Value('TYPE_MESSAGE'), type_name='bogus.Message', message=message, ) assert not non_primitive_field.is_primitive
def test_operation_meta(): lro_response = wrappers.MessageType( fields={}, nested_messages={}, nested_enums={}, message_pb=descriptor_pb2.DescriptorProto(name='LroResponse'), meta=metadata.Metadata(address=metadata.Address( name='LroResponse', module='foo', )), ) operation = wrappers.OperationType(lro_response=lro_response) assert 'representing a long-running operation' in operation.meta.doc assert ':class:`~.foo.LroResponse`' in operation.meta.doc
def testAdjustReservedRange(self): """AdjustReservedRange removes specified skip_reserved_numbers.""" desc_pb_text = """ reserved_range { start: 41 end: 41 } reserved_range { start: 42 end: 42 } reserved_range { start: 43 end: 44 } reserved_range { start: 50 end: 51 } """ desc = descriptor_pb2.DescriptorProto() text_format.Merge(desc_pb_text, desc) target = descriptor_pb2.DescriptorProto() merge_active_shadow.AdjustReservedRange(target, desc.reserved_range, [42, 43]) target_pb_text = """ reserved_range { start: 41 end: 41 } reserved_range { start: 50 end: 51 } """ self.assertTextProtoEq(target_pb_text, str(target))
def make_message(name: str, package: str = 'foo.bar.v1', module: str = 'baz', fields: Sequence[wrappers.Field] = (), ) -> wrappers.MessageType: message_pb = descriptor_pb2.DescriptorProto( name=name, field=[i.field_pb for i in fields], ) return wrappers.MessageType( message_pb=message_pb, fields=collections.OrderedDict((i.name, i) for i in fields), meta=metadata.Metadata(address=metadata.Address( package=tuple(package.split('.')), module=module, )), )
def make_message_pb2( name: str, fields: tuple = (), nested_type: tuple = (), enum_type: tuple = (), options: desc.MessageOptions = None, **kwargs, ) -> desc.DescriptorProto: return desc.DescriptorProto( name=name, field=fields, nested_type=nested_type, enum_type=enum_type, options=options, **kwargs, )
def testJsonName(self): descriptor_proto = descriptor_pb2.DescriptorProto() descriptor_proto.name = 'TestJsonName' names = ['field_name', 'fieldName', 'FieldName', '_field_name', 'FIELD_NAME', 'json_name'] json_names = ['fieldName', 'fieldName', 'FieldName', 'FieldName', 'FIELDNAME', '@type'] for index in range(len(names)): field = descriptor_proto.field.add() field.number = index + 1 field.name = names[index] field.json_name = '@type' result = descriptor.MakeDescriptor(descriptor_proto) for index in range(len(json_names)): self.assertEqual(result.fields[index].json_name, json_names[index])
def get_message(dot_path: str) -> wrappers.MessageType: # Note: The `dot_path` here is distinct from the canonical proto path # because it includes the module, which the proto path does not. # # So, if trying to test the DescriptorProto message here, the path # would be google.protobuf.descriptor.DescriptorProto (whereas the proto # path is just google.protobuf.DescriptorProto). pieces = dot_path.split('.') pkg, module, name = pieces[:-2], pieces[-2], pieces[-1] return wrappers.MessageType( fields={}, message_pb=descriptor_pb2.DescriptorProto(name=name), meta=metadata.Metadata(address=metadata.Address( package=pkg, module=module, )), )
def state(): try: request_debug("Get slave state", request) mesos_handler = SlaveHttp.get_mesos_handler() state_json = json.dumps({ 'version': mesos_handler.MESOS_VERSION, 'resources': {}, 'attributes': {}, 'flags': {}, 'frameworks': [], 'completed_frameworks': [], }) if True: # if request.is_json: resp = Response(state_json, status=200, mimetype="application/json") else: try: descriptor_proto = descriptor_pb2.DescriptorProto() descriptor_proto.name = "state" descriptor_proto.field.add( name='version', number=1, type=descriptor_pb2.FieldDescriptorProto.TYPE_STRING, label=descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL) desc = descriptor.MakeDescriptor(descriptor_proto) clazz = reflection.MakeClass(desc) msg = clazz(version=mesos_handler.MESOS_VERSION) ser_msg = msg.SerializeToString() logger.info("state content=%s" % ser_msg) # msg = json_format.Parse(state_json, xxx_pb2.Event(), ignore_unknown_fields=False) resp = Response(ser_msg, status=200, mimetype="application/x-protobuf") except Exception as se: msg = "Exception: %s" % se logger.error(msg) return Response(msg, status=500) return resp except Exception as e: msg = "Exception handling: %s" % (request.url_rule, e) logger.error(msg) return Response(msg, status=500)
def make_message(name: str, package: str = 'foo.bar.v1', module: str = 'baz', fields: typing.Sequence[wrappers.Field] = (), meta: metadata.Metadata = None, options: descriptor_pb2.MethodOptions = None, ) -> wrappers.MessageType: message_pb = descriptor_pb2.DescriptorProto( name=name, field=[i.field_pb for i in fields], options=options, ) return wrappers.MessageType( message_pb=message_pb, fields=collections.OrderedDict((i.name, i) for i in fields), nested_messages={}, nested_enums={}, meta=meta or metadata.Metadata(address=metadata.Address( name=name, package=tuple(package.split('.')), module=module, )), )
def test_mock_value_message(): subfields = collections.OrderedDict( (('foo', make_field(name='foo', type='TYPE_INT32')), ('bar', make_field(name='bar', type='TYPE_STRING')))) message = wrappers.MessageType( fields=subfields, message_pb=descriptor_pb2.DescriptorProto( name='Message', field=[i.field_pb for i in subfields.values()]), meta=metadata.Metadata(address=metadata.Address( module='bogus', name='Message', )), nested_enums={}, nested_messages={}, ) field = make_field( type='TYPE_MESSAGE', type_name='bogus.Message', message=message, ) assert field.mock_value == 'bogus.Message(foo=324)'
def test_proto_builder_constructor(): sentinel_message = descriptor_pb2.DescriptorProto() sentinel_enum = descriptor_pb2.EnumDescriptorProto() sentinel_service = descriptor_pb2.ServiceDescriptorProto() # Create a file descriptor proto. It does not matter that none # of the sentinels have actual data because this test just ensures # they are sent off to the correct methods unmodified. fdp = make_file_pb2( messages=(sentinel_message, ), enums=(sentinel_enum, ), services=(sentinel_service, ), ) # Test the load function. with mock.patch.object(api._ProtoBuilder, '_load_children') as lc: pb = api._ProtoBuilder( fdp, file_to_generate=True, naming=make_naming(), ) # There should be three total calls to load the different types # of children. assert lc.call_count == 3 # The enum type should come first. _, args, _ = lc.mock_calls[0] assert args[0][0] == sentinel_enum assert args[1] == pb._load_enum # The message type should come second. _, args, _ = lc.mock_calls[1] assert args[0][0] == sentinel_message assert args[1] == pb._load_message # The services should come third. _, args, _ = lc.mock_calls[2] assert args[0][0] == sentinel_service assert args[1] == pb._load_service
def test_mock_value_recursive(mock_method, expected): # The elaborate setup is an unfortunate requirement. file_pb = descriptor_pb2.FileDescriptorProto( name="turtle.proto", package="animalia.chordata.v2", message_type=( descriptor_pb2.DescriptorProto( # It's turtles all the way down ;) name="Turtle", field=(descriptor_pb2.FieldDescriptorProto( name="turtle", type="TYPE_MESSAGE", type_name=".animalia.chordata.v2.Turtle", number=1, ), ), ), ), ) my_api = api.API.build([file_pb], package="animalia.chordata.v2") turtle_field = my_api.messages["animalia.chordata.v2.Turtle"].fields[ "turtle"] # If not handled properly, this will run forever and eventually OOM. actual = getattr(turtle_field, mock_method) assert actual == expected
def test_map_field_name_disambiguation(): squid_file_pb = descriptor_pb2.FileDescriptorProto( name="mollusc.proto", package="animalia.mollusca.v2", message_type=(descriptor_pb2.DescriptorProto(name="Mollusc", ), ), ) method_types_file_pb = descriptor_pb2.FileDescriptorProto( name="mollusc_service.proto", package="animalia.mollusca.v2", message_type=( descriptor_pb2.DescriptorProto( name="CreateMolluscRequest", field=( descriptor_pb2.FieldDescriptorProto( name="mollusc", type="TYPE_MESSAGE", type_name=".animalia.mollusca.v2.Mollusc", number=1, ), descriptor_pb2.FieldDescriptorProto( name="molluscs_map", type="TYPE_MESSAGE", number=2, type_name= ".animalia.mollusca.v2.CreateMolluscRequest.MolluscsMapEntry", label="LABEL_REPEATED", ), ), nested_type=( descriptor_pb2.DescriptorProto( name="MolluscsMapEntry", field=( descriptor_pb2.FieldDescriptorProto( name="key", type="TYPE_STRING", number=1, ), descriptor_pb2.FieldDescriptorProto( name="value", type="TYPE_MESSAGE", number=2, # We use the same type for the map value as for # the singleton above to better highlight the # problem raised in # https://github.com/googleapis/gapic-generator-python/issues/618. # The module _is_ disambiguated for singleton # fields but NOT for map fields. type_name=".animalia.mollusca.v2.Mollusc"), ), options=descriptor_pb2.MessageOptions(map_entry=True), ), ), ), ), ) my_api = api.API.build( file_descriptors=[squid_file_pb, method_types_file_pb], ) create = my_api.messages['animalia.mollusca.v2.CreateMolluscRequest'] mollusc = create.fields['mollusc'] molluscs_map = create.fields['molluscs_map'] mollusc_ident = str(mollusc.type.ident) mollusc_map_ident = str(molluscs_map.message.fields['value'].type.ident) # The same module used in the same place should have the same import alias. # Because there's a "mollusc" name used, the import should be disambiguated. assert mollusc_ident == mollusc_map_ident == "am_mollusc.Mollusc"
def testCustomOptionsCopyTo(self): message_descriptor = (unittest_custom_options_pb2. TestMessageWithCustomOptions.DESCRIPTOR) message_proto = descriptor_pb2.DescriptorProto() message_descriptor.CopyToProto(message_proto) self.assertEqual(len(message_proto.options.ListFields()), 2)
def testMergeActiveShadowMessage(self): """MergeActiveShadowMessage recovers shadow fields with oneofs.""" active_pb_text = """ field { number: 1 name: "foo" } field { number: 0 name: "bar" oneof_index: 2 } field { number: 3 name: "baz" } field { number: 4 name: "newbie" } reserved_name: "wow" reserved_range { start: 2 end: 3 } oneof_decl { name: "ign" } oneof_decl { name: "ign2" } oneof_decl { name: "some_oneof" } """ active_proto = descriptor_pb2.DescriptorProto() text_format.Merge(active_pb_text, active_proto) shadow_pb_text = """ field { number: 1 name: "foo" } field { number: 0 name: "bar" } field { number: 3 name: "baz" } field { number: 2 name: "hidden_envoy_deprecated_wow" oneof_index: 0 } oneof_decl { name: "some_oneof" } """ shadow_proto = descriptor_pb2.DescriptorProto() text_format.Merge(shadow_pb_text, shadow_proto) target_proto = descriptor_pb2.DescriptorProto() merge_active_shadow.MergeActiveShadowMessage(active_proto, shadow_proto, target_proto) target_pb_text = """ field { name: "foo" number: 1 } field { name: "baz" number: 3 } field { name: "newbie" number: 4 } field { name: "bar" number: 0 oneof_index: 2 } field { name: "hidden_envoy_deprecated_wow" number: 2 oneof_index: 2 } oneof_decl { name: "ign" } oneof_decl { name: "ign2" } oneof_decl { name: "some_oneof" } """ self.assertTextProtoEq(target_pb_text, str(target_proto))
def make_message_pb2(name: str, fields=()) -> descriptor_pb2.DescriptorProto: return descriptor_pb2.DescriptorProto(name=name, field=fields)
def testmerge_active_shadow_message_comments(self): """merge_active_shadow_message preserves comment field correspondence.""" active_pb_text = """ field { number: 9 name: "oneof_1_0" oneof_index: 0 } field { number: 1 name: "simple_field_0" } field { number: 0 name: "oneof_2_0" oneof_index: 2 } field { number: 8 name: "oneof_2_1" oneof_index: 2 } field { number: 3 name: "oneof_0_0" oneof_index: 1 } field { number: 4 name: "newbie" } field { number: 7 name: "oneof_3_0" oneof_index: 3 } reserved_name: "missing_oneof_field_0" reserved_name: "missing_oneof_field_1" reserved_name: "missing_oneof_field_2" oneof_decl { name: "oneof_0" } oneof_decl { name: "oneof_1" } oneof_decl { name: "oneof_2" } oneof_decl { name: "oneof_3" } """ active_proto = descriptor_pb2.DescriptorProto() text_format.Merge(active_pb_text, active_proto) active_source_code_info_text = """ location { path: [4, 1, 2, 4] leading_comments: "field_4" } location { path: [4, 1, 2, 5] leading_comments: "field_5" } location { path: [4, 1, 2, 3] leading_comments: "field_3" } location { path: [4, 1, 2, 0] leading_comments: "field_0" } location { path: [4, 1, 2, 1] leading_comments: "field_1" } location { path: [4, 0, 2, 2] leading_comments: "ignore_0" } location { path: [4, 1, 2, 6] leading_comments: "field_6" } location { path: [4, 1, 2, 2] leading_comments: "field_2" } location { path: [3] leading_comments: "ignore_1" } """ active_source_code_info = descriptor_pb2.SourceCodeInfo() text_format.Merge(active_source_code_info_text, active_source_code_info) shadow_pb_text = """ field { number: 10 name: "hidden_envoy_deprecated_missing_oneof_field_0" oneof_index: 0 } field { number: 11 name: "hidden_envoy_deprecated_missing_oneof_field_1" oneof_index: 3 } field { number: 11 name: "hidden_envoy_deprecated_missing_oneof_field_2" oneof_index: 2 } oneof_decl { name: "oneof_0" } oneof_decl { name: "oneof_1" } oneof_decl { name: "oneof_2" } oneof_decl { name: "some_removed_oneof" } oneof_decl { name: "oneof_3" } """ shadow_proto = descriptor_pb2.DescriptorProto() text_format.Merge(shadow_pb_text, shadow_proto) target_proto = descriptor_pb2.DescriptorProto() source_code_info = api_type_context.SourceCodeInfo('fake', active_source_code_info) fake_type_context = api_type_context.TypeContext(source_code_info, 'fake_package') target_proto_dependencies = [] merge_active_shadow.merge_active_shadow_message( fake_type_context.extend_message(1, "foo", False), active_proto, shadow_proto, target_proto, target_proto_dependencies) target_pb_text = """ field { name: "oneof_1_0" number: 9 oneof_index: 0 } field { name: "hidden_envoy_deprecated_missing_oneof_field_0" number: 10 oneof_index: 0 } field { name: "simple_field_0" number: 1 } field { name: "oneof_2_0" number: 0 oneof_index: 2 } field { name: "oneof_2_1" number: 8 oneof_index: 2 } field { name: "hidden_envoy_deprecated_missing_oneof_field_2" number: 11 oneof_index: 2 } field { name: "oneof_0_0" number: 3 oneof_index: 1 } field { name: "newbie" number: 4 } field { name: "oneof_3_0" number: 7 oneof_index: 3 } field { name: "hidden_envoy_deprecated_missing_oneof_field_1" number: 11 oneof_index: 4 } oneof_decl { name: "oneof_0" } oneof_decl { name: "oneof_1" } oneof_decl { name: "oneof_2" } oneof_decl { name: "oneof_3" } oneof_decl { name: "some_removed_oneof" } """ target_source_code_info_text = """ location { path: 4 path: 1 path: 2 path: 6 leading_comments: "field_4" } location { path: 4 path: 1 path: 2 path: 7 leading_comments: "field_5" } location { path: 4 path: 1 path: 2 path: 4 leading_comments: "field_3" } location { path: 4 path: 1 path: 2 path: 0 leading_comments: "field_0" } location { path: 4 path: 1 path: 2 path: 2 leading_comments: "field_1" } location { path: 4 path: 0 path: 2 path: 2 leading_comments: "ignore_0" } location { path: 4 path: 1 path: 2 path: 8 leading_comments: "field_6" } location { path: 4 path: 1 path: 2 path: 3 leading_comments: "field_2" } location { path: 3 leading_comments: "ignore_1" } """ self.maxDiff = None self.assert_text_proto_eq(target_pb_text, str(target_proto)) self.assert_text_proto_eq(target_source_code_info_text, str(fake_type_context.source_code_info.proto))
def __new__(mcls, name, bases, attrs): # Do not do any special behavior for Message itself. if not bases: return super().__new__(mcls, name, bases, attrs) # Get the essential information about the proto package, and where # this component belongs within the file. package, marshal = _package_info.compile(name, attrs) # Determine the local path of this proto component within the file. local_path = tuple(attrs.get("__qualname__", name).split(".")) # Sanity check: We get the wrong full name if a class is declared # inside a function local scope; correct this. if "<locals>" in local_path: ix = local_path.index("<locals>") local_path = local_path[:ix - 1] + local_path[ix + 1:] # Determine the full name in protocol buffers. full_name = ".".join((package, ) + local_path).lstrip(".") # Special case: Maps. Map fields are special; they are essentially # shorthand for a nested message and a repeated field of that message. # Decompose each map into its constituent form. # https://developers.google.com/protocol-buffers/docs/proto3#maps map_fields = {} for key, field in attrs.items(): if not isinstance(field, MapField): continue # Determine the name of the entry message. msg_name = "{pascal_key}Entry".format(pascal_key=re.sub( r"_\w", lambda m: m.group()[1:].upper(), key, ).replace(key[0], key[0].upper(), 1), ) # Create the "entry" message (with the key and value fields). # # Note: We instantiate an ordered dictionary here and then # attach key and value in order to ensure that the fields are # iterated in the correct order when the class is created. # This is only an issue in Python 3.5, where the order is # random (and the wrong order causes the pool to refuse to add # the descriptor because reasons). entry_attrs = collections.OrderedDict({ "__module__": attrs.get("__module__", None), "__qualname__": "{prefix}.{name}".format( prefix=attrs.get("__qualname__", name), name=msg_name, ), "_pb_options": { "map_entry": True }, }) entry_attrs["key"] = Field(field.map_key_type, number=1) entry_attrs["value"] = Field( field.proto_type, number=2, enum=field.enum, message=field.message, ) map_fields[msg_name] = MessageMeta(msg_name, (Message, ), entry_attrs) # Create the repeated field for the entry message. map_fields[key] = RepeatedField( ProtoType.MESSAGE, number=field.number, message=map_fields[msg_name], ) # Add the new entries to the attrs attrs.update(map_fields) # Okay, now we deal with all the rest of the fields. # Iterate over all the attributes and separate the fields into # their own sequence. fields = [] new_attrs = {} oneofs = collections.OrderedDict() proto_imports = set() index = 0 for key, field in attrs.items(): # Sanity check: If this is not a field, do nothing. if not isinstance(field, Field): # The field objects themselves should not be direct attributes. new_attrs[key] = field continue # Add data that the field requires that we do not take in the # constructor because we can derive it from the metaclass. # (The goal is to make the declaration syntax as nice as possible.) field.mcls_data = { "name": key, "parent_name": full_name, "index": index, "package": package, } # Add the field to the list of fields. fields.append(field) # If this field is part of a "oneof", ensure the oneof itself # is represented. if field.oneof: # Keep a running tally of the index of each oneof, and assign # that index to the field's descriptor. oneofs.setdefault(field.oneof, len(oneofs)) field.descriptor.oneof_index = oneofs[field.oneof] # If this field references a message, it may be from another # proto file; ensure we know about the import (to faithfully # construct our file descriptor proto). if field.message and not isinstance(field.message, str): field_msg = field.message if hasattr(field_msg, "pb") and callable(field_msg.pb): field_msg = field_msg.pb() # Sanity check: The field's message may not yet be defined if # it was a Message defined in the same file, and the file # descriptor proto has not yet been generated. # # We do nothing in this situation; everything will be handled # correctly when the file descriptor is created later. if field_msg: proto_imports.add(field_msg.DESCRIPTOR.file.name) # Same thing, but for enums. elif field.enum and not isinstance(field.enum, str): field_enum = (field.enum._meta.pb if hasattr( field.enum, "_meta") else field.enum.DESCRIPTOR) if field_enum: proto_imports.add(field_enum.file.name) # Increment the field index counter. index += 1 # As per descriptor.proto, all synthetic oneofs must be ordered after # 'real' oneofs. opt_attrs = {} for field in fields: if field.optional: field.oneof = "_{}".format(field.name) field.descriptor.oneof_index = oneofs[field.oneof] = len( oneofs) opt_attrs[field.name] = field.name # Generating a metaclass dynamically provides class attributes that # instances can't see. This provides idiomatically named constants # that enable the following pattern to check for field presence: # # class MyMessage(proto.Message): # field = proto.Field(proto.INT32, number=1, optional=True) # # m = MyMessage() # MyMessage.field in m if opt_attrs: mcls = type("AttrsMeta", (mcls, ), opt_attrs) # Determine the filename. # We determine an appropriate proto filename based on the # Python module. filename = _file_info._FileInfo.proto_file_name( new_attrs.get("__module__", name.lower())) # Get or create the information about the file, including the # descriptor to which the new message descriptor shall be added. file_info = _file_info._FileInfo.maybe_add_descriptor( filename, package) # Ensure any imports that would be necessary are assigned to the file # descriptor proto being created. for proto_import in proto_imports: if proto_import not in file_info.descriptor.dependency: file_info.descriptor.dependency.append(proto_import) # Retrieve any message options. opts = descriptor_pb2.MessageOptions( **new_attrs.pop("_pb_options", {})) # Create the underlying proto descriptor. desc = descriptor_pb2.DescriptorProto( name=name, field=[i.descriptor for i in fields], oneof_decl=[ descriptor_pb2.OneofDescriptorProto(name=i) for i in oneofs.keys() ], options=opts, ) # If any descriptors were nested under this one, they need to be # attached as nested types here. child_paths = [ p for p in file_info.nested.keys() if local_path == p[:-1] ] for child_path in child_paths: desc.nested_type.add().MergeFrom(file_info.nested.pop(child_path)) # Same thing, but for enums child_paths = [ p for p in file_info.nested_enum.keys() if local_path == p[:-1] ] for child_path in child_paths: desc.enum_type.add().MergeFrom( file_info.nested_enum.pop(child_path)) # Add the descriptor to the file if it is a top-level descriptor, # or to a "holding area" for nested messages otherwise. if len(local_path) == 1: file_info.descriptor.message_type.add().MergeFrom(desc) else: file_info.nested[local_path] = desc # Create the MessageInfo instance to be attached to this message. new_attrs["_meta"] = _MessageInfo( fields=fields, full_name=full_name, marshal=marshal, options=opts, package=package, ) # Run the superclass constructor. cls = super().__new__(mcls, name, bases, new_attrs) # The info class and fields need a reference to the class just created. cls._meta.parent = cls for field in cls._meta.fields.values(): field.parent = cls # Add this message to the _FileInfo instance; this allows us to # associate the descriptor with the message once the descriptor # is generated. file_info.messages[full_name] = cls # Generate the descriptor for the file if it is ready. if file_info.ready(new_class=cls): file_info.generate_file_pb(new_class=cls, fallback_salt=full_name) # Done; return the class. return cls
def make_message_pb2(name: str, fields: tuple = (), **kwargs) -> desc.DescriptorProto: return desc.DescriptorProto(name=name, field=fields, **kwargs)
def testmerge_active_shadow_message(self): """merge_active_shadow_message recovers shadow fields with oneofs.""" active_pb_text = """ field { number: 1 name: "foo" } field { number: 0 name: "bar" oneof_index: 2 } field { number: 3 name: "baz" } field { number: 4 name: "newbie" } reserved_name: "wow" reserved_range { start: 2 end: 3 } oneof_decl { name: "ign" } oneof_decl { name: "ign2" } oneof_decl { name: "some_oneof" } """ active_proto = descriptor_pb2.DescriptorProto() text_format.Merge(active_pb_text, active_proto) shadow_pb_text = """ field { number: 1 name: "foo" } field { number: 0 name: "bar" } field { number: 3 name: "baz" } field { number: 5 name: "hidden_envoy_deprecated_wow" options { deprecated: true [validate.rules] { string { max_bytes: 1024 } } [envoy.annotations.disallowed_by_default]: true } oneof_index: 0 } oneof_decl { name: "some_oneof" } """ shadow_proto = descriptor_pb2.DescriptorProto() text_format.Merge(shadow_pb_text, shadow_proto) target_proto = descriptor_pb2.DescriptorProto() target_proto_dependencies = [] merge_active_shadow.merge_active_shadow_message(self.fake_type_context(), active_proto, shadow_proto, target_proto, target_proto_dependencies) target_pb_text = """ field { name: "foo" number: 1 } field { name: "bar" number: 0 oneof_index: 2 } field { name: "hidden_envoy_deprecated_wow" number: 5 options { deprecated: true [validate.rules] { string { max_bytes: 1024 } } [envoy.annotations.disallowed_by_default]: true } oneof_index: 2 } field { name: "baz" number: 3 } field { name: "newbie" number: 4 } oneof_decl { name: "ign" } oneof_decl { name: "ign2" } oneof_decl { name: "some_oneof" } reserved_range { start: 2 end: 3 } """ self.assert_text_proto_eq(target_pb_text, str(target_proto)) self.assertEqual(target_proto_dependencies[0], 'envoy/annotations/deprecation.proto')