def testCustomDescriptorPool(self): # Create a new pool, and add a file descriptor. pool = descriptor_pool.DescriptorPool() file_desc = descriptor_pb2.FileDescriptorProto( name='some/file.proto', package='package') file_desc.message_type.add(name='Message') pool.Add(file_desc) self.assertEqual(pool.FindFileByName('some/file.proto').name, 'some/file.proto') self.assertEqual(pool.FindMessageTypeByName('package.Message').name, 'Message') # Test no package file_proto = descriptor_pb2.FileDescriptorProto( name='some/filename/container.proto') message_proto = file_proto.message_type.add( name='TopMessage') message_proto.field.add( name='bb', number=1, type=descriptor_pb2.FieldDescriptorProto.TYPE_INT32, label=descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL) enum_proto = file_proto.enum_type.add(name='TopEnum') enum_proto.value.add(name='FOREIGN_FOO', number=4) file_proto.service.add(name='TopService') pool = descriptor_pool.DescriptorPool() pool.Add(file_proto) self.assertEqual('TopMessage', pool.FindMessageTypeByName('TopMessage').name) self.assertEqual('TopEnum', pool.FindEnumTypeByName('TopEnum').name) self.assertEqual('TopService', pool.FindServiceByName('TopService').name)
def validate_fragment(type_name, fragment): """Validate a dictionary representing a JSON/YAML fragment against an Envoy API proto3 type. Throws Protobuf errors on parsing exceptions, successful validations produce no result. Args: type_name: a string providing the type name, e.g. envoy.config.bootstrap.v3.Bootstrap. fragment: a dictionary representing the parsed JSON/YAML configuration fragment. """ json_fragment = json.dumps(fragment) r = runfiles.Create() all_protos_pb_text_path = r.Rlocation( 'envoy/tools/type_whisperer/all_protos_with_ext_pb_text.pb_text') file_desc_set = descriptor_pb2.FileDescriptorSet() text_format.Parse(pathlib.Path(all_protos_pb_text_path).read_text(), file_desc_set, allow_unknown_extension=True) pool = descriptor_pool.DescriptorPool() for f in file_desc_set.file: pool.Add(f) desc = pool.FindMessageTypeByName(type_name) msg = message_factory.MessageFactory(pool=pool).GetPrototype(desc)() json_format.Parse(json_fragment, msg, descriptor_pool=pool)
def testParseDictAnyDescriptorPoolMissingType(self): # Confirm that ParseDict does not raise ParseError with default pool js_dict = { 'any_value': { '@type': 'type.googleapis.com/proto3.MessageType', 'value': 1234 } } json_format.ParseDict(js_dict, any_test_pb2.TestAny()) # Check ParseDict raises ParseError with empty pool js_dict = { 'any_value': { '@type': 'type.googleapis.com/proto3.MessageType', 'value': 1234 } } with self.assertRaises(json_format.ParseError) as cm: empty_pool = descriptor_pool.DescriptorPool() json_format.ParseDict(js_dict, any_test_pb2.TestAny(), descriptor_pool=empty_pool) self.assertEqual( str(cm.exception), 'Failed to parse any_value field: Can not find message descriptor by' ' type_url: type.googleapis.com/proto3.MessageType..')
def __init__(self, pool=None): """Initializes a new factory.""" self.pool = (pool or descriptor_pool.DescriptorPool( descriptor_database.DescriptorDatabase())) # local cache of all classes built from protobuf descriptors self._classes = {}
def _resolve_proto_operator(self, op: placeholder_pb2.ProtoOperator) -> str: """Evaluates the proto operator.""" raw_message = self.resolve(op.expression) pool = descriptor_pool.DescriptorPool() for file_descriptor in op.proto_schema.file_descriptors.file: pool.Add(file_descriptor) message_descriptor = pool.FindMessageTypeByName( op.proto_schema.message_type) factory = message_factory.MessageFactory(pool) message_type = factory.GetPrototype(message_descriptor) value = message_type() json_format.Parse(raw_message, value, descriptor_pool=pool) if op.proto_field_path: for field in op.proto_field_path: if field.startswith("."): value = getattr(value, field[1:]) continue map_key = re.findall(r"\[['\"](.+)['\"]\]", field) if len(map_key) == 1: value = value[map_key[0]] continue index = re.findall(r"\[(\d+)\]", field) if len(index) == 1 and str.isdecimal(index[0]): value = value[int(index[0])] continue raise ValueError(f"Got unsupported proto field path: {field}") if not isinstance(value, message.Message): return value return text_format.MessageToString(value)
def _TestEnum(self, prefix): pool = descriptor_pool.DescriptorPool() pool.AddEnumDescriptor(unittest_pb2.ForeignEnum.DESCRIPTOR) self.assertEqual( 'protobuf_unittest.ForeignEnum', pool.FindEnumTypeByName( prefix + 'protobuf_unittest.ForeignEnum').full_name) # AddEnumDescriptor is not recursive. with self.assertRaises(KeyError): pool.FindEnumTypeByName( prefix + 'protobuf_unittest.ForeignEnum.NestedEnum') pool.AddEnumDescriptor(unittest_pb2.TestAllTypes.NestedEnum.DESCRIPTOR) self.assertEqual( 'protobuf_unittest.TestAllTypes.NestedEnum', pool.FindEnumTypeByName( prefix + 'protobuf_unittest.TestAllTypes.NestedEnum').full_name) # Files are implicitly also indexed when enums are added. self.assertEqual( 'google/protobuf/unittest.proto', pool.FindFileByName( 'google/protobuf/unittest.proto').name) self.assertEqual( 'google/protobuf/unittest.proto', pool.FindFileContainingSymbol( prefix + 'protobuf_unittest.TestAllTypes.NestedEnum').name)
def _write_header(self, fdescr): self._fobj.write(MAGIC) # Write protocol buffer version in header self._write_blob(T_PROTOBUF_VERSION, len(google.protobuf.__version__), google.protobuf.__version__.encode("utf8")) if type(fdescr) == PBZReader: self._write_blob(T_FILE_DESCRIPTOR, len(fdescr._raw_descriptor), (fdescr._raw_descriptor)) self._dpool = fdescr._dpool else: # Read FileDescriptorSet with open(fdescr, "rb") as fi: fdset = fi.read() sz = fi.tell() # Write FileDescriptorSet self._write_blob(T_FILE_DESCRIPTOR, sz, fdset) # Parse descriptor for checking that the messages will be defined in # the serialized file self._dpool = descriptor_pool.DescriptorPool() ds = FileDescriptorSet() ds.ParseFromString(fdset) for df in ds.file: self._dpool.Add(df)
def testProtoFileDescriptorIsGeneratedForDynamicAnyValueType(self): test_pb_file_descriptor, deps = ( DynamicAnyValueTypeTest.EmitProtoFileDescriptor("grr_export")) pool = descriptor_pool.DescriptorPool() for file_descriptor in deps + [test_pb_file_descriptor]: pool.Add(file_descriptor) proto_descriptor = pool.FindMessageTypeByName( "grr_export.DynamicAnyValueTypeTest") factory = message_factory.MessageFactory() proto_class = factory.GetPrototype(proto_descriptor) # Now let's define an RDFProtoStruct for the dynamically generated # proto_class. new_dynamic_class = type( "DynamicAnyValueTypeTestReversed", (rdf_structs.RDFProtoStruct, ), # TODO(user): We shouldn't need to specify Any here. Investigate. dict(protobuf=proto_class, rdf_deps=["Any"]), ) new_dynamic_instance = new_dynamic_class(type="foo") self.assertEqual(new_dynamic_instance.type, "foo") # Test that a proto can be deserialized from serialized RDFValue # with a dynamic AnyValue field. test_pb = DynamicAnyValueTypeTest(type="TestStruct") test_pb.dynamic.foobar = "Hello" proto_value = proto_class() proto_value.ParseFromString(test_pb.SerializeToString()) self.assertEqual(proto_value.type, "TestStruct") self.assertEqual(proto_value.dynamic.type_url, "TestStruct") self.assertEqual(proto_value.dynamic.value, test_pb.dynamic.SerializeToString())
def testDuplicateExtensionNumber(self): pool = descriptor_pool.DescriptorPool() factory = message_factory.MessageFactory(pool=pool) # Add Container message. f = descriptor_pb2.FileDescriptorProto() f.name = 'google/protobuf/internal/container.proto' f.package = 'google.protobuf.python.internal' msg = f.message_type.add() msg.name = 'Container' rng = msg.extension_range.add() rng.start = 1 rng.end = 10 pool.Add(f) msgs = factory.GetMessages([f.name]) self.assertIn('google.protobuf.python.internal.Container', msgs) # Extend container. f = descriptor_pb2.FileDescriptorProto() f.name = 'google/protobuf/internal/extension.proto' f.package = 'google.protobuf.python.internal' f.dependency.append('google/protobuf/internal/container.proto') msg = f.message_type.add() msg.name = 'Extension' ext = msg.extension.add() ext.name = 'extension_field' ext.number = 2 ext.label = descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL ext.type_name = 'Extension' ext.extendee = 'Container' pool.Add(f) msgs = factory.GetMessages([f.name]) self.assertIn('google.protobuf.python.internal.Extension', msgs) # Add Duplicate extending the same field number. f = descriptor_pb2.FileDescriptorProto() f.name = 'google/protobuf/internal/duplicate.proto' f.package = 'google.protobuf.python.internal' f.dependency.append('google/protobuf/internal/container.proto') msg = f.message_type.add() msg.name = 'Duplicate' ext = msg.extension.add() ext.name = 'extension_field' ext.number = 2 ext.label = descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL ext.type_name = 'Duplicate' ext.extendee = 'Container' pool.Add(f) with self.assertRaises(Exception) as cm: factory.GetMessages([f.name]) self.assertIn(str(cm.exception), [ 'Extensions ' '"google.protobuf.python.internal.Duplicate.extension_field" and' ' "google.protobuf.python.internal.Extension.extension_field"' ' both try to extend message type' ' "google.protobuf.python.internal.Container"' ' with field number 2.', 'Double registration of Extensions' ])
def _ValidateFleetspeakServiceConfig(self, config_path): """Validates a Fleetspeak service config. Checks that the given file is a valid TextFormat representation of a Fleetspeak service config proto. Args: config_path: Path to the config file. Raises: BuildError: If the config is not valid. """ with open(config_path, "rb") as f: pool = descriptor_pool.DescriptorPool() pool.AddDescriptor(fs_config_pb2.Config.DESCRIPTOR) parsed_config = text_format.Parse( f.read(), fs_system_pb2.ClientServiceConfig(), descriptor_pool=pool) if parsed_config.factory != "Daemon": raise BuildError( "Fleetspeak config does not have the expected factory type." ) daemon_cfg = fs_config_pb2.Config() parsed_config.config.Unpack(daemon_cfg) if not daemon_cfg.argv: raise BuildError( "Fleetspeak daemon service config does not specify command line " "args.")
def _TestMessage(self, prefix): pool = descriptor_pool.DescriptorPool() pool.AddDescriptor(unittest_pb2.TestAllTypes.DESCRIPTOR) self.assertEquals( 'protobuf_unittest.TestAllTypes', pool.FindMessageTypeByName( prefix + 'protobuf_unittest.TestAllTypes').full_name) # AddDescriptor is not recursive. with self.assertRaises(KeyError): pool.FindMessageTypeByName( prefix + 'protobuf_unittest.TestAllTypes.NestedMessage') pool.AddDescriptor(unittest_pb2.TestAllTypes.NestedMessage.DESCRIPTOR) self.assertEquals( 'protobuf_unittest.TestAllTypes.NestedMessage', pool.FindMessageTypeByName( prefix + 'protobuf_unittest.TestAllTypes.NestedMessage').full_name) # Files are implicitly also indexed when messages are added. self.assertEquals( 'google/protobuf/unittest.proto', pool.FindFileByName('google/protobuf/unittest.proto').name) self.assertEquals( 'google/protobuf/unittest.proto', pool.FindFileContainingSymbol( prefix + 'protobuf_unittest.TestAllTypes.NestedMessage').name)
def testService(self): pool = descriptor_pool.DescriptorPool() with self.assertRaises(KeyError): pool.FindServiceByName('protobuf_unittest.TestService') pool.AddServiceDescriptor(unittest_pb2._TESTSERVICE) self.assertEqual( 'protobuf_unittest.TestService', pool.FindServiceByName('protobuf_unittest.TestService').full_name)
def setUp(self): self.pool = descriptor_pool.DescriptorPool() self.factory_test1_fd = descriptor_pb2.FileDescriptorProto.FromString( factory_test1_pb2.DESCRIPTOR.serialized_pb) self.factory_test2_fd = descriptor_pb2.FileDescriptorProto.FromString( factory_test2_pb2.DESCRIPTOR.serialized_pb) self.pool.Add(self.factory_test1_fd) self.pool.Add(self.factory_test2_fd)
def create_message_factory(descriptor_file_path, proto_type): pool = descriptor_pool.DescriptorPool() descriptor = read_descriptor(descriptor_file_path) for file in descriptor.file: pool.Add(file) return message_factory.MessageFactory().GetPrototype( pool.FindMessageTypeByName(proto_type))
def create_pool(args): trace_descriptor = read_descriptor(args.trace_descriptor) pool = descriptor_pool.DescriptorPool() for file in trace_descriptor.file: pool.Add(file) return pool
def testCustomDescriptorPool(self): # Create a new pool, and add a file descriptor. pool = descriptor_pool.DescriptorPool() file_desc = descriptor_pb2.FileDescriptorProto( name='some/file.proto', package='package') file_desc.message_type.add(name='Message') pool.Add(file_desc) self.assertEqual(pool.FindFileByName('some/file.proto').name, 'some/file.proto') self.assertEqual(pool.FindMessageTypeByName('package.Message').name, 'Message')
def testFile(self): pool = descriptor_pool.DescriptorPool() pool.AddFileDescriptor(unittest_pb2.DESCRIPTOR) self.assertEqual( 'google/protobuf/unittest.proto', pool.FindFileByName('google/protobuf/unittest.proto').name) # AddFileDescriptor is not recursive; messages and enums within files must # be explicitly registered. with self.assertRaises(KeyError): pool.FindFileContainingSymbol('protobuf_unittest.TestAllTypes')
def testEmptyDescriptorPool(self): # Check that an empty DescriptorPool() contains no messages. pool = descriptor_pool.DescriptorPool() proto_file_name = descriptor_pb2.DESCRIPTOR.name self.assertRaises(KeyError, pool.FindFileByName, proto_file_name) # Add the above file to the pool file_descriptor = descriptor_pb2.FileDescriptorProto() descriptor_pb2.DESCRIPTOR.CopyToProto(file_descriptor) pool.Add(file_descriptor) # Now it exists. self.assertTrue(pool.FindFileByName(proto_file_name))
def testAddSerializedFile(self): if isinstance(self, SecondaryDescriptorFromDescriptorDB): if api_implementation.Type() == 'cpp': # Cpp extension cannot call Add on a DescriptorPool # that uses a DescriptorDatabase. # TODO(jieluo): Fix python and cpp extension diff. return self.pool = descriptor_pool.DescriptorPool() self.pool.AddSerializedFile(self.factory_test1_fd.SerializeToString()) self.pool.AddSerializedFile(self.factory_test2_fd.SerializeToString()) self.testFindMessageTypeByName()
def testAddTypeError(self): pool = descriptor_pool.DescriptorPool() with self.assertRaises(TypeError): pool.AddDescriptor(0) with self.assertRaises(TypeError): pool.AddEnumDescriptor(0) with self.assertRaises(TypeError): pool.AddServiceDescriptor(0) with self.assertRaises(TypeError): pool.AddExtensionDescriptor(0) with self.assertRaises(TypeError): pool.AddFileDescriptor(0)
def testMakeSameProtoClassTwice(self): """Test that the DescriptorPool is used.""" pool = descriptor_pool.DescriptorPool() proto_cls1 = proto_builder.MakeSimpleProtoClass( self._fields, full_name='net.proto2.python.public.proto_builder_test.Test', pool=pool) proto_cls2 = proto_builder.MakeSimpleProtoClass( self._fields, full_name='net.proto2.python.public.proto_builder_test.Test', pool=pool) self.assertIs(proto_cls1.DESCRIPTOR, proto_cls2.DESCRIPTOR)
def testAnyMessageDescriptorPoolMissingType(self): packed_message = unittest_pb2.OneString() packed_message.data = 'string' message = any_test_pb2.TestAny() message.any_value.Pack(packed_message) empty_pool = descriptor_pool.DescriptorPool() with self.assertRaises(TypeError) as cm: json_format.MessageToJson(message, True, descriptor_pool=empty_pool) self.assertEqual( 'Can not find message descriptor by type_url:' ' type.googleapis.com/protobuf_unittest.OneString.', str(cm.exception))
def testGetPrototype(self): db = descriptor_database.DescriptorDatabase() pool = descriptor_pool.DescriptorPool(db) db.Add(self.factory_test1_fd) db.Add(self.factory_test2_fd) factory = message_factory.MessageFactory() cls = factory.GetPrototype(pool.FindMessageTypeByName( 'google.protobuf.python.internal.Factory2Message')) self.assertFalse(cls is factory_test2_pb2.Factory2Message) self._ExerciseDynamicClass(cls) cls2 = factory.GetPrototype(pool.FindMessageTypeByName( 'google.protobuf.python.internal.Factory2Message')) self.assertTrue(cls is cls2)
def pytest_runtest_setup(item): _FileInfo.registry.clear() # Replace the descriptor pool and symbol database to avoid tests # polluting one another. pool = descriptor_pool.DescriptorPool() sym_db = symbol_database.SymbolDatabase(pool=pool) item._mocks = ( mock.patch.object(descriptor_pool, 'Default', return_value=pool), mock.patch.object(symbol_database, 'Default', return_value=sym_db), ) [i.start() for i in item._mocks] # Importing a pb2 module registers those messages with the pool. # However, our test harness is subbing out the default pool above, # which means that all the dependencies that messages may depend on # are now absent from the pool. # # Add any pb2 modules that may have been imported by the test's module to # the descriptor pool and symbol database. # # This is exceptionally tricky in the C implementation because there is # no way to add an existing descriptor to a pool; the only acceptable # approach is to add a file descriptor proto, which then creates *new* # descriptors. We therefore do that and then plop the replacement classes # onto the pb2 modules. reloaded = set() for name in dir(item.module): if name.endswith('_pb2') and not name.startswith('test_'): module = getattr(item.module, name) pool.AddSerializedFile(module.DESCRIPTOR.serialized_pb) fd = pool.FindFileByName(module.DESCRIPTOR.name) # Register all the messages to the symbol database and the # module. Do this recursively if there are nested messages. _register_messages(module, fd.message_types_by_name, sym_db) # Track which modules had new message classes loaded. # This is used below to wire the new classes into the marshal. reloaded.add(name) # If the marshal had previously registered the old message classes, # then reload the appropriate modules so the marshal is using the new ones. if 'wrappers_pb2' in reloaded: imp.reload(rules.wrappers) if 'struct_pb2' in reloaded: imp.reload(rules.struct) if reloaded.intersection({'timestamp_pb2', 'duration_pb2'}): imp.reload(rules.dates)
def setUp(self): self.factory_test1_fd = descriptor_pb2.FileDescriptorProto.FromString( factory_test1_pb2.DESCRIPTOR.serialized_pb) self.factory_test2_fd = descriptor_pb2.FileDescriptorProto.FromString( factory_test2_pb2.DESCRIPTOR.serialized_pb) db = descriptor_database.DescriptorDatabase() db.Add(self.factory_test1_fd) db.Add(self.factory_test2_fd) db.Add(descriptor_pb2.FileDescriptorProto.FromString( unittest_import_public_pb2.DESCRIPTOR.serialized_pb)) db.Add(descriptor_pb2.FileDescriptorProto.FromString( unittest_import_pb2.DESCRIPTOR.serialized_pb)) db.Add(descriptor_pb2.FileDescriptorProto.FromString( unittest_pb2.DESCRIPTOR.serialized_pb)) self.pool = descriptor_pool.DescriptorPool(descriptor_db=db)
def get_descriptor_pool(): """ Generates a protocol buffer object descriptor pool which allows looking up info about our proto API, such as options for each servicer, method, or message. """ # this needs to be imported so the annotations are available in the generated pool... from proto import annotations_pb2 # noqa pool = descriptor_pool.DescriptorPool() with open(Path(__file__).parent / ".." / "proto" / "descriptors.pb", "rb") as descriptor_set_f: desc = descriptor_pb2.FileDescriptorSet.FromString( descriptor_set_f.read()) for file_descriptor in desc.file: pool.Add(file_descriptor) return pool
def testCreatePrototypeOverride(self): class MyMessageFactory(message_factory.MessageFactory): def CreatePrototype(self, descriptor): cls = super(MyMessageFactory, self).CreatePrototype(descriptor) cls.additional_field = 'Some value' return cls db = descriptor_database.DescriptorDatabase() pool = descriptor_pool.DescriptorPool(db) db.Add(self.factory_test1_fd) db.Add(self.factory_test2_fd) factory = MyMessageFactory() cls = factory.GetPrototype( pool.FindMessageTypeByName( 'google.protobuf.python.internal.Factory2Message')) self.assertTrue(hasattr(cls, 'additional_field'))
def testFileDescriptorOptionsWithCustomDescriptorPool(self): # Create a descriptor pool, and add a new FileDescriptorProto to it. pool = descriptor_pool.DescriptorPool() file_name = 'file_descriptor_options_with_custom_descriptor_pool.proto' file_descriptor_proto = descriptor_pb2.FileDescriptorProto(name=file_name) extension_id = file_options_test_pb2.foo_options file_descriptor_proto.options.Extensions[extension_id].foo_name = 'foo' pool.Add(file_descriptor_proto) # The options set on the FileDescriptorProto should be available in the # descriptor even if they contain extensions that cannot be deserialized # using the pool. file_descriptor = pool.FindFileByName(file_name) options = file_descriptor.GetOptions() self.assertEqual('foo', options.Extensions[extension_id].foo_name) # The object returned by GetOptions() is cached. self.assertIs(options, file_descriptor.GetOptions())
def setUp(self): self.pool = descriptor_pool.DescriptorPool() self.factory_test1_fd = descriptor_pb2.FileDescriptorProto.FromString( factory_test1_pb2.DESCRIPTOR.serialized_pb) self.factory_test2_fd = descriptor_pb2.FileDescriptorProto.FromString( factory_test2_pb2.DESCRIPTOR.serialized_pb) self.pool.Add(self.factory_test1_fd) self.pool.Add(self.factory_test2_fd) self.pool.Add(descriptor_pb2.FileDescriptorProto.FromString( unittest_import_public_pb2.DESCRIPTOR.serialized_pb)) self.pool.Add(descriptor_pb2.FileDescriptorProto.FromString( unittest_import_pb2.DESCRIPTOR.serialized_pb)) self.pool.Add(descriptor_pb2.FileDescriptorProto.FromString( unittest_pb2.DESCRIPTOR.serialized_pb)) self.pool.Add(descriptor_pb2.FileDescriptorProto.FromString( no_package_pb2.DESCRIPTOR.serialized_pb))
def setUp(self): # TODO(jieluo): Should make the pool which is created by # serialized_pb same with generated pool. # TODO(jieluo): More test coverage for the generated pool. self.pool = descriptor_pool.DescriptorPool() self.factory_test1_fd = descriptor_pb2.FileDescriptorProto.FromString( factory_test1_pb2.DESCRIPTOR.serialized_pb) self.factory_test2_fd = descriptor_pb2.FileDescriptorProto.FromString( factory_test2_pb2.DESCRIPTOR.serialized_pb) self.pool.Add(self.factory_test1_fd) self.pool.Add(self.factory_test2_fd) self.pool.Add(descriptor_pb2.FileDescriptorProto.FromString( unittest_import_public_pb2.DESCRIPTOR.serialized_pb)) self.pool.Add(descriptor_pb2.FileDescriptorProto.FromString( unittest_import_pb2.DESCRIPTOR.serialized_pb)) self.pool.Add(descriptor_pb2.FileDescriptorProto.FromString( unittest_pb2.DESCRIPTOR.serialized_pb))