def __new__(mcls, name, bases, attrs): # Do not do any special behavior for Message itself. if not bases: return super().__new__(mcls, name, bases, attrs) # Get the essential information about the proto package, and where # this component belongs within the file. package, marshal = _package_info.compile(name, attrs) # Determine the local path of this proto component within the file. local_path = tuple(attrs.get("__qualname__", name).split(".")) # Sanity check: We get the wrong full name if a class is declared # inside a function local scope; correct this. if "<locals>" in local_path: ix = local_path.index("<locals>") local_path = local_path[:ix - 1] + local_path[ix + 1:] # Determine the full name in protocol buffers. full_name = ".".join((package, ) + local_path).lstrip(".") # Special case: Maps. Map fields are special; they are essentially # shorthand for a nested message and a repeated field of that message. # Decompose each map into its constituent form. # https://developers.google.com/protocol-buffers/docs/proto3#maps map_fields = {} for key, field in attrs.items(): if not isinstance(field, MapField): continue # Determine the name of the entry message. msg_name = "{pascal_key}Entry".format(pascal_key=re.sub( r"_\w", lambda m: m.group()[1:].upper(), key, ).replace(key[0], key[0].upper(), 1), ) # Create the "entry" message (with the key and value fields). # # Note: We instantiate an ordered dictionary here and then # attach key and value in order to ensure that the fields are # iterated in the correct order when the class is created. # This is only an issue in Python 3.5, where the order is # random (and the wrong order causes the pool to refuse to add # the descriptor because reasons). entry_attrs = collections.OrderedDict({ "__module__": attrs.get("__module__", None), "__qualname__": "{prefix}.{name}".format( prefix=attrs.get("__qualname__", name), name=msg_name, ), "_pb_options": { "map_entry": True }, }) entry_attrs["key"] = Field(field.map_key_type, number=1) entry_attrs["value"] = Field( field.proto_type, number=2, enum=field.enum, message=field.message, ) map_fields[msg_name] = MessageMeta(msg_name, (Message, ), entry_attrs) # Create the repeated field for the entry message. map_fields[key] = RepeatedField( ProtoType.MESSAGE, number=field.number, message=map_fields[msg_name], ) # Add the new entries to the attrs attrs.update(map_fields) # Okay, now we deal with all the rest of the fields. # Iterate over all the attributes and separate the fields into # their own sequence. fields = [] new_attrs = {} oneofs = collections.OrderedDict() proto_imports = set() index = 0 for key, field in attrs.items(): # Sanity check: If this is not a field, do nothing. if not isinstance(field, Field): # The field objects themselves should not be direct attributes. new_attrs[key] = field continue # Add data that the field requires that we do not take in the # constructor because we can derive it from the metaclass. # (The goal is to make the declaration syntax as nice as possible.) field.mcls_data = { "name": key, "parent_name": full_name, "index": index, "package": package, } # Add the field to the list of fields. fields.append(field) # If this field is part of a "oneof", ensure the oneof itself # is represented. if field.oneof: # Keep a running tally of the index of each oneof, and assign # that index to the field's descriptor. oneofs.setdefault(field.oneof, len(oneofs)) field.descriptor.oneof_index = oneofs[field.oneof] # If this field references a message, it may be from another # proto file; ensure we know about the import (to faithfully # construct our file descriptor proto). if field.message and not isinstance(field.message, str): field_msg = field.message if hasattr(field_msg, "pb") and callable(field_msg.pb): field_msg = field_msg.pb() # Sanity check: The field's message may not yet be defined if # it was a Message defined in the same file, and the file # descriptor proto has not yet been generated. # # We do nothing in this situation; everything will be handled # correctly when the file descriptor is created later. if field_msg: proto_imports.add(field_msg.DESCRIPTOR.file.name) # Same thing, but for enums. elif field.enum and not isinstance(field.enum, str): field_enum = (field.enum._meta.pb if hasattr( field.enum, "_meta") else field.enum.DESCRIPTOR) if field_enum: proto_imports.add(field_enum.file.name) # Increment the field index counter. index += 1 # As per descriptor.proto, all synthetic oneofs must be ordered after # 'real' oneofs. opt_attrs = {} for field in fields: if field.optional: field.oneof = "_{}".format(field.name) field.descriptor.oneof_index = oneofs[field.oneof] = len( oneofs) opt_attrs[field.name] = field.name # Generating a metaclass dynamically provides class attributes that # instances can't see. This provides idiomatically named constants # that enable the following pattern to check for field presence: # # class MyMessage(proto.Message): # field = proto.Field(proto.INT32, number=1, optional=True) # # m = MyMessage() # MyMessage.field in m if opt_attrs: mcls = type("AttrsMeta", (mcls, ), opt_attrs) # Determine the filename. # We determine an appropriate proto filename based on the # Python module. filename = _file_info._FileInfo.proto_file_name( new_attrs.get("__module__", name.lower())) # Get or create the information about the file, including the # descriptor to which the new message descriptor shall be added. file_info = _file_info._FileInfo.maybe_add_descriptor( filename, package) # Ensure any imports that would be necessary are assigned to the file # descriptor proto being created. for proto_import in proto_imports: if proto_import not in file_info.descriptor.dependency: file_info.descriptor.dependency.append(proto_import) # Retrieve any message options. opts = descriptor_pb2.MessageOptions( **new_attrs.pop("_pb_options", {})) # Create the underlying proto descriptor. desc = descriptor_pb2.DescriptorProto( name=name, field=[i.descriptor for i in fields], oneof_decl=[ descriptor_pb2.OneofDescriptorProto(name=i) for i in oneofs.keys() ], options=opts, ) # If any descriptors were nested under this one, they need to be # attached as nested types here. child_paths = [ p for p in file_info.nested.keys() if local_path == p[:-1] ] for child_path in child_paths: desc.nested_type.add().MergeFrom(file_info.nested.pop(child_path)) # Same thing, but for enums child_paths = [ p for p in file_info.nested_enum.keys() if local_path == p[:-1] ] for child_path in child_paths: desc.enum_type.add().MergeFrom( file_info.nested_enum.pop(child_path)) # Add the descriptor to the file if it is a top-level descriptor, # or to a "holding area" for nested messages otherwise. if len(local_path) == 1: file_info.descriptor.message_type.add().MergeFrom(desc) else: file_info.nested[local_path] = desc # Create the MessageInfo instance to be attached to this message. new_attrs["_meta"] = _MessageInfo( fields=fields, full_name=full_name, marshal=marshal, options=opts, package=package, ) # Run the superclass constructor. cls = super().__new__(mcls, name, bases, new_attrs) # The info class and fields need a reference to the class just created. cls._meta.parent = cls for field in cls._meta.fields.values(): field.parent = cls # Add this message to the _FileInfo instance; this allows us to # associate the descriptor with the message once the descriptor # is generated. file_info.messages[full_name] = cls # Generate the descriptor for the file if it is ready. if file_info.ready(new_class=cls): file_info.generate_file_pb(new_class=cls, fallback_salt=full_name) # Done; return the class. return cls
def make_oneof_pb2(name: str) -> desc.OneofDescriptorProto: return desc.OneofDescriptorProto(name=name, )