def __init__(self, pool=None): """Initializes a new factory.""" self.pool = (pool or descriptor_pool.DescriptorPool( descriptor_database.DescriptorDatabase())) # local cache of all classes built from protobuf descriptors self._classes = {}
def testAdd(self): db = descriptor_database.DescriptorDatabase() file_desc_proto = descriptor_pb2.FileDescriptorProto.FromString( factory_test2_pb2.DESCRIPTOR.serialized_pb) db.Add(file_desc_proto) self.assertEquals( file_desc_proto, db.FindFileByName('google/protobuf/internal/factory_test2.proto')) self.assertEquals( file_desc_proto, db.FindFileContainingSymbol( 'google.protobuf.python.internal.Factory2Message')) self.assertEquals( file_desc_proto, db.FindFileContainingSymbol( 'google.protobuf.python.internal.Factory2Message.NestedFactory2Message' )) self.assertEquals( file_desc_proto, db.FindFileContainingSymbol( 'google.protobuf.python.internal.Factory2Enum')) self.assertEquals( file_desc_proto, db.FindFileContainingSymbol( 'google.protobuf.python.internal.Factory2Message.NestedFactory2Enum' )) self.assertEquals( file_desc_proto, db.FindFileContainingSymbol( 'google.protobuf.python.internal.MessageWithNestedEnumOnly.NestedEnum' ))
def __init__(self, descriptor_db=None): """Initializes a Pool of proto buffs. The descriptor_db argument to the constructor is provided to allow specialized file descriptor proto lookup code to be triggered on demand. An example would be an implementation which will read and compile a file specified in a call to FindFileByName() and not require the call to Add() at all. Results from this database will be cached internally here as well. Args: descriptor_db: A secondary source of file descriptors. """ self._internal_db = descriptor_database.DescriptorDatabase() self._descriptor_db = descriptor_db self._descriptors = {} self._enum_descriptors = {} self._service_descriptors = {} self._file_descriptors = {} self._toplevel_extensions = {} # TODO(jieluo): Remove _file_desc_by_toplevel_extension after # maybe year 2020 for compatibility issue (with 3.4.1 only). self._file_desc_by_toplevel_extension = {} # We store extensions in two two-level mappings: The first key is the # descriptor of the message being extended, the second key is the extension # full name or its tag number. self._extensions_by_name = collections.defaultdict(dict) self._extensions_by_number = collections.defaultdict(dict)
def testAdd(self): db = descriptor_database.DescriptorDatabase() file_desc_proto = descriptor_pb2.FileDescriptorProto.FromString( factory_test2_pb2.DESCRIPTOR.serialized_pb) file_desc_proto2 = descriptor_pb2.FileDescriptorProto.FromString( no_package_pb2.DESCRIPTOR.serialized_pb) db.Add(file_desc_proto) db.Add(file_desc_proto2) self.assertEqual(file_desc_proto, db.FindFileByName( 'google/protobuf/internal/factory_test2.proto')) # Can find message type. self.assertEqual(file_desc_proto, db.FindFileContainingSymbol( 'google.protobuf.python.internal.Factory2Message')) # Can find nested message type. self.assertEqual(file_desc_proto, db.FindFileContainingSymbol( 'google.protobuf.python.internal.Factory2Message.NestedFactory2Message')) # Can find enum type. self.assertEqual(file_desc_proto, db.FindFileContainingSymbol( 'google.protobuf.python.internal.Factory2Enum')) # Can find nested enum type. self.assertEqual(file_desc_proto, db.FindFileContainingSymbol( 'google.protobuf.python.internal.Factory2Message.NestedFactory2Enum')) self.assertEqual(file_desc_proto, db.FindFileContainingSymbol( 'google.protobuf.python.internal.MessageWithNestedEnumOnly.NestedEnum')) # Can find field. self.assertEqual(file_desc_proto, db.FindFileContainingSymbol( 'google.protobuf.python.internal.Factory2Message.list_field')) # Can find enum value. self.assertEqual(file_desc_proto, db.FindFileContainingSymbol( 'google.protobuf.python.internal.Factory2Enum.FACTORY_2_VALUE_0')) self.assertEqual(file_desc_proto, db.FindFileContainingSymbol( 'google.protobuf.python.internal.FACTORY_2_VALUE_0')) self.assertEqual(file_desc_proto2, db.FindFileContainingSymbol( '.NO_PACKAGE_VALUE_0')) # Can find top level extension. self.assertEqual(file_desc_proto, db.FindFileContainingSymbol( 'google.protobuf.python.internal.another_field')) # Can find nested extension inside a message. self.assertEqual(file_desc_proto, db.FindFileContainingSymbol( 'google.protobuf.python.internal.Factory2Message.one_more_field')) # Can find service. file_desc_proto2 = descriptor_pb2.FileDescriptorProto.FromString( unittest_pb2.DESCRIPTOR.serialized_pb) db.Add(file_desc_proto2) self.assertEqual(file_desc_proto2, db.FindFileContainingSymbol( 'protobuf_unittest.TestService')) # Non-existent field under a valid top level symbol can also be # found. The behavior is the same with protobuf C++. self.assertEqual(file_desc_proto2, db.FindFileContainingSymbol( 'protobuf_unittest.TestAllTypes.none_field')) with self.assertRaisesRegexp(KeyError, r'\'protobuf_unittest\.NoneMessage\''): db.FindFileContainingSymbol('protobuf_unittest.NoneMessage')
def testGetPrototype(self): db = descriptor_database.DescriptorDatabase() pool = descriptor_pool.DescriptorPool(db) db.Add(self.factory_test1_fd) db.Add(self.factory_test2_fd) factory = message_factory.MessageFactory() cls = factory.GetPrototype(pool.FindMessageTypeByName( 'google.protobuf.python.internal.Factory2Message')) self.assertFalse(cls is factory_test2_pb2.Factory2Message) self._ExerciseDynamicClass(cls) cls2 = factory.GetPrototype(pool.FindMessageTypeByName( 'google.protobuf.python.internal.Factory2Message')) self.assertTrue(cls is cls2)
def setUp(self): self.factory_test1_fd = descriptor_pb2.FileDescriptorProto.FromString( factory_test1_pb2.DESCRIPTOR.serialized_pb) self.factory_test2_fd = descriptor_pb2.FileDescriptorProto.FromString( factory_test2_pb2.DESCRIPTOR.serialized_pb) db = descriptor_database.DescriptorDatabase() db.Add(self.factory_test1_fd) db.Add(self.factory_test2_fd) db.Add(descriptor_pb2.FileDescriptorProto.FromString( unittest_import_public_pb2.DESCRIPTOR.serialized_pb)) db.Add(descriptor_pb2.FileDescriptorProto.FromString( unittest_import_pb2.DESCRIPTOR.serialized_pb)) db.Add(descriptor_pb2.FileDescriptorProto.FromString( unittest_pb2.DESCRIPTOR.serialized_pb)) self.pool = descriptor_pool.DescriptorPool(descriptor_db=db)
def testCreatePrototypeOverride(self): class MyMessageFactory(message_factory.MessageFactory): def CreatePrototype(self, descriptor): cls = super(MyMessageFactory, self).CreatePrototype(descriptor) cls.additional_field = 'Some value' return cls db = descriptor_database.DescriptorDatabase() pool = descriptor_pool.DescriptorPool(db) db.Add(self.factory_test1_fd) db.Add(self.factory_test2_fd) factory = MyMessageFactory() cls = factory.GetPrototype( pool.FindMessageTypeByName( 'google.protobuf.python.internal.Factory2Message')) self.assertTrue(hasattr(cls, 'additional_field'))
def __init__(self, descriptor_db=None): """Initializes a Pool of proto buffs. The descriptor_db argument to the constructor is provided to allow specialized file descriptor proto lookup code to be triggered on demand. An example would be an implementation which will read and compile a file specified in a call to FindFileByName() and not require the call to Add() at all. Results from this database will be cached internally here as well. Args: descriptor_db: A secondary source of file descriptors. """ self._internal_db = descriptor_database.DescriptorDatabase() self._descriptor_db = descriptor_db self._descriptors = {} self._enum_descriptors = {} self._file_descriptors = {}
def testConflictRegister(self): db = descriptor_database.DescriptorDatabase() unittest_fd = descriptor_pb2.FileDescriptorProto.FromString( unittest_pb2.DESCRIPTOR.serialized_pb) db.Add(unittest_fd) conflict_fd = descriptor_pb2.FileDescriptorProto.FromString( unittest_pb2.DESCRIPTOR.serialized_pb) conflict_fd.name = 'other_file2' with warnings.catch_warnings(record=True) as w: # Cause all warnings to always be triggered. warnings.simplefilter('always') db.Add(conflict_fd) self.assertTrue(len(w)) self.assertIs(w[0].category, RuntimeWarning) self.assertIn('Conflict register for file "other_file2": ', str(w[0].message)) self.assertIn( 'already defined in file ' '"google/protobuf/unittest.proto"', str(w[0].message))
def Test(): #t1 = pb.cp_comunication_pb2.test1() #t1.s = 'hello' command = pb.cp_comunication_pb2.Command() command.cmd = 'hello' command.params.append('world') command_out = command.SerializeToString() # t2 = pb.cp_comunication_pb2.test2() t2.s2 = 'world' t2.t2.s = 'hello' codec = ProtobufCodec() out = codec.PackMessage(t2) out2 = t2.SerializeToString() #======================================= #pool = descriptor_pool.DescriptorPool() #id = descriptor_pb2.FileDescriptorProto.FromString( # pb.cp_comunication_pb2.DESCRIPTOR.serialized_pb) #pool.Add(id) id = descriptor_pb2.FileDescriptorProto.FromString( pb.cp_comunication_pb2.DESCRIPTOR.serialized_pb) db = descriptor_database.DescriptorDatabase() pool = descriptor_pool.DescriptorPool(db) db.Add(id) #db.Add(self.factory_test2_fd) #================= ##id = descriptor_pb2.FileDescriptorProto.FromString( # pb.cc_comunication_pb2.DESCRIPTOR.serialized_pb) #pool.Add(id) #====================== #====================== #id = descriptor_pb2.FileDescriptorProto.FromString( # pb.mq_protomsg_pb2.DESCRIPTOR.serialized_pb) #pool.Add(id) #====================== #id = descriptor_pb2.FileDescriptorProto.FromString( # pb.guard_comunication_pb2.DESCRIPTOR.serialized_pb) #pool.Add(id) #======================================================================== d1 = pb.cp_comunication_pb2.test2.DESCRIPTOR #dp1 = DescriptorProto() #d1.CopyToProto(dp1) # dp1 serialized, sent over network, deserialized #d2 = descriptor.MakeDescriptor(dp1) ## Un-commenting this block fixes the problem # #for desc in dp1.nested_type: # d2.nested_types.append( descriptor.MakeDescriptor(desc) ) #d2.nested_types_by_name = dict((t.name, t) for t in d2.nested_types) # #for f in dp1.field: # d2.fields_by_number[f.number].message_type = d2.nested_types_by_name[f.type_name.split('.')[-1]] # # This cannot be duplicated across the net #d2.fields[0].message_type._concrete_class = d1.fields[0].message_type._concrete_class #======================================= ds_command = pool.FindMessageTypeByName('ctp.cp.Command') print(str(reflection.ParseMessage(ds_command, command_out))) #======================================== temp = pb.cp_comunication_pb2.test2() temp.ParseFromString(out2) print('temp :', str(temp)) ds = pool.FindMessageTypeByName('ctp.cp.test2') #ds = pool.FindMessageTypeByName('pb.cp_comunication_pb2.ctp.cp.test2') print (isinstance(ds, descriptor.Descriptor)) if ds: print('find ds', str(ds)) print('find ds', str(d1)) print(str(reflection.ParseMessage(d1, out2))) print(str(reflection.ParseMessage(ds, out2)))
def testUserDefinedDB(self): db = descriptor_database.DescriptorDatabase() self.pool = descriptor_pool.DescriptorPool(db) db.Add(self.factory_test1_fd) db.Add(self.factory_test2_fd) self.testFindMessageTypeByName()
for key, val in mysqlx_notice_pb2.SessionStateChanged.Parameter.items(): _MESSAGES["Mysqlx.Notice.SessionStateChanged.Parameter.{0}" "".format(key)] = val # Mysql.Prepare for key, val in mysqlx_prepare_pb2.Prepare.OneOfMessage.Type.items(): _MESSAGES["Mysqlx.Prepare.Prepare.OneOfMessage.Type.{0}" "".format(key)] = val # Mysql.Resultset for key, val in mysqlx_resultset_pb2.ColumnMetaData.FieldType.items(): _MESSAGES["Mysqlx.Resultset.ColumnMetaData.FieldType.{0}".format( key)] = val # Add messages to the descriptor pool _DESCRIPTOR_DB = descriptor_database.DescriptorDatabase() _DESCRIPTOR_POOL = descriptor_pool.DescriptorPool(_DESCRIPTOR_DB) _DESCRIPTOR_DB.Add( descriptor_pb2.FileDescriptorProto.FromString( mysqlx_connection_pb2.DESCRIPTOR.serialized_pb)) _DESCRIPTOR_DB.Add( descriptor_pb2.FileDescriptorProto.FromString( mysqlx_crud_pb2.DESCRIPTOR.serialized_pb)) _DESCRIPTOR_DB.Add( descriptor_pb2.FileDescriptorProto.FromString( mysqlx_datatypes_pb2.DESCRIPTOR.serialized_pb)) _DESCRIPTOR_DB.Add( descriptor_pb2.FileDescriptorProto.FromString( mysqlx_expect_pb2.DESCRIPTOR.serialized_pb)) _DESCRIPTOR_DB.Add(
def testAddSerializedFile(self): db = descriptor_database.DescriptorDatabase() self.pool = descriptor_pool.DescriptorPool(db) self.pool.AddSerializedFile(self.factory_test1_fd.SerializeToString()) self.pool.AddSerializedFile(self.factory_test2_fd.SerializeToString()) self.testFindMessageTypeByName()
Returns: A class describing the passed in descriptor. """ if descriptor.full_name not in self._classes: result_class = reflection.GeneratedProtocolMessageType( descriptor.name.encode('ascii', 'ignore'), (message.Message, ), {'DESCRIPTOR': descriptor}) self._classes[descriptor.full_name] = result_class for field in descriptor.fields: if field.message_type: self.GetPrototype(field.message_type) return self._classes[descriptor.full_name] _DB = descriptor_database.DescriptorDatabase() _POOL = descriptor_pool.DescriptorPool(_DB) _FACTORY = MessageFactory() def GetMessages(file_protos): """Builds a dictionary of all the messages available in a set of files. Args: file_protos: A sequence of file protos to build messages out of. Returns: A dictionary containing all the message types in the files mapping the fully qualified name to a Message subclass for the descriptor. """