def create_trace(): parser = argparse.ArgumentParser() parser.add_argument('trace_descriptor', type=str, help='location of trace descriptor') args = parser.parse_args() pool = create_pool(args) factory = message_factory.MessageFactory(pool) ProtoTrace = factory.GetPrototype( pool.FindMessageTypeByName('perfetto.protos.Trace')) class EnumPrototype(object): def from_descriptor(desc): res = EnumPrototype() for desc in desc.values: setattr(res, desc.name, desc.number) return res Prototypes = namedtuple('Prototypes', [ 'TrackEvent', 'ChromeRAILMode', 'ThreadDescriptor', 'ChromeProcessDescriptor', 'CounterDescriptor', ]) prototypes = Prototypes( TrackEvent=factory.GetPrototype( pool.FindMessageTypeByName('perfetto.protos.TrackEvent')), ChromeRAILMode=EnumPrototype.from_descriptor( pool.FindEnumTypeByName('perfetto.protos.ChromeRAILMode')), ThreadDescriptor=factory.GetPrototype( pool.FindMessageTypeByName('perfetto.protos.ThreadDescriptor')), ChromeProcessDescriptor=factory.GetPrototype( pool.FindMessageTypeByName( 'perfetto.protos.ChromeProcessDescriptor')), CounterDescriptor=factory.GetPrototype( pool.FindMessageTypeByName('perfetto.protos.CounterDescriptor')), ) return Trace(ProtoTrace(), prototypes)
def testProtoFileDescriptorIsGeneratedForDynamicType(self): test_pb_file_descriptor, deps = DynamicTypeTest.EmitProtoFileDescriptor( "grr_export") pool = descriptor_pool.DescriptorPool() for file_descriptor in deps + [test_pb_file_descriptor]: pool.Add(file_descriptor) proto_descriptor = pool.FindMessageTypeByName( "grr_export.DynamicTypeTest") factory = message_factory.MessageFactory() proto_class = factory.GetPrototype(proto_descriptor) # Now let's define an RDFProtoStruct for the dynamically generated # proto_class. new_dynamic_class = type( "DynamicTypeTestReversed", (rdf_structs.RDFProtoStruct, ), dict(protobuf=proto_class, rdf_deps=[rdf_client.User])) new_dynamic_instance = new_dynamic_class( type="foo", nested=rdf_client.User(username="******")) self.assertEqual(new_dynamic_instance.type, "foo") self.assertEqual(new_dynamic_instance.nested.username, "bar")
class _mysqlxpb_pure(object): """This class implements the methods in pure Python used by the _mysqlxpb C++ extension.""" factory = message_factory.MessageFactory() @staticmethod def new_message(name): cls = _mysqlxpb_pure.factory.GetPrototype( _DESCRIPTOR_POOL.FindMessageTypeByName(name)) return cls() @staticmethod def enum_value(key): return _MESSAGES[key] @staticmethod def serialize_message(msg): return msg.SerializeToString() @staticmethod def serialize_partial_message(msg): return msg.SerializePartialToString() @staticmethod def parse_message(msg_type_name, payload): msg = _mysqlxpb_pure.new_message(msg_type_name) msg.ParseFromString(payload) return msg @staticmethod def parse_server_message(msg_type, payload): msg_type_name = SERVER_MESSAGES.get(msg_type) if not msg_type_name: raise ValueError("Unknown msg_type: {0}".format(msg_type)) msg = _mysqlxpb_pure.new_message(msg_type_name) msg.ParseFromString(payload) return msg
def testDefaultValueForCustomMessages(self): """Check the value returned by non-existent fields.""" def _CheckValueAndType(value, expected_value, expected_type): self.assertEqual(value, expected_value) self.assertIsInstance(value, expected_type) def _CheckDefaultValues(msg): try: int64 = int except NameError: # Python3 int64 = int try: unicode_type = str except NameError: # Python3 unicode_type = str _CheckValueAndType(msg.optional_int32, 0, int) _CheckValueAndType(msg.optional_uint64, 0, (int64, int)) _CheckValueAndType(msg.optional_float, 0, (float, int)) _CheckValueAndType(msg.optional_double, 0, (float, int)) _CheckValueAndType(msg.optional_bool, False, bool) _CheckValueAndType(msg.optional_string, '', unicode_type) _CheckValueAndType(msg.optional_bytes, b'', bytes) _CheckValueAndType(msg.optional_nested_enum, msg.FOO, int) # First for the generated message _CheckDefaultValues(unittest_pb2.TestAllTypes()) # Then for a message built with from the DescriptorPool. pool = descriptor_pool.DescriptorPool() pool.Add(descriptor_pb2.FileDescriptorProto.FromString( unittest_import_public_pb2.DESCRIPTOR.serialized_pb)) pool.Add(descriptor_pb2.FileDescriptorProto.FromString( unittest_import_pb2.DESCRIPTOR.serialized_pb)) pool.Add(descriptor_pb2.FileDescriptorProto.FromString( unittest_pb2.DESCRIPTOR.serialized_pb)) message_class = message_factory.MessageFactory(pool).GetPrototype( pool.FindMessageTypeByName( unittest_pb2.TestAllTypes.DESCRIPTOR.full_name)) _CheckDefaultValues(message_class())
def get_message_definitions(): messages = message_factory.GetMessages( (VESProtobuf.create_ves_event(), )) message_factory._FACTORY = message_factory.MessageFactory() return messages
def _resolve_proto_operator( self, op: placeholder_pb2.ProtoOperator ) -> Union[int, float, str, bool, bytes]: """Evaluates the proto operator.""" raw_message = self.resolve(op.expression) if raw_message is None: raise NullDereferenceError(op.expression) if isinstance(raw_message, str): # We need descriptor pool to parse encoded raw messages. pool = descriptor_pool.Default() for file_descriptor in op.proto_schema.file_descriptors.file: pool.Add(file_descriptor) message_descriptor = pool.FindMessageTypeByName( op.proto_schema.message_type) factory = message_factory.MessageFactory(pool) message_type = factory.GetPrototype(message_descriptor) value = message_type() json_format.Parse(raw_message, value, descriptor_pool=pool) elif isinstance(raw_message, message.Message): # Message such as platform config should not be encoded. value = raw_message else: raise ValueError( f"Got unsupported value type for proto operator: {type(raw_message)}." ) if op.proto_field_path: for field in op.proto_field_path: if field.startswith("."): value = getattr(value, field[1:]) continue map_key = re.findall(r"\[['\"](.+)['\"]\]", field) if len(map_key) == 1: value = value[map_key[0]] continue index = re.findall(r"\[(\d+)\]", field) if index and str.isdecimal(index[0]): value = value[int(index[0])] continue raise ValueError(f"Got unsupported proto field path: {field}") # Non-message primitive values are returned directly. if isinstance(value, (int, float, str, bool, bytes)): return value if not isinstance(value, message.Message): raise ValueError(f"Got unsupported value type {type(value)} " "from accessing proto field path.") # For message-typed values, we need to consider serialization format. if op.serialization_format: if op.serialization_format == placeholder_pb2.ProtoOperator.JSON: return json_format.MessageToJson( message=value, sort_keys=True, preserving_proto_field_name=True) if op.serialization_format == placeholder_pb2.ProtoOperator.TEXT_FORMAT: return text_format.MessageToString(value) if op.serialization_format == placeholder_pb2.ProtoOperator.BINARY: return value.SerializeToString() raise ValueError( "Proto operator resolves to a proto message value. A serialization " "format is needed to render it.")
# Python Txn Class import types import skull_capi as capi from google.protobuf import message from google.protobuf import message_factory from google.protobuf import descriptor_pool # Global message factory (Notes: Should not create it dynamically, or it will lead memleak) _MESSAGE_FACTORY = message_factory.MessageFactory(descriptor_pool.Default()) class Txn(): # Txn Status TXN_OK = 0 TXN_ERROR = 1 TXN_TIMEOUT = 2 # IO Status IO_OK = 0 IO_ERROR_SVCNAME = 1 IO_ERROR_APINAME = 2 IO_ERROR_STATE = 3 IO_ERROR_BIO = 4 IO_ERROR_SVCBUSY = 5 IO_ERROR_REQUEST = 6 def __init__(self, skull_txn): self._skull_txn = skull_txn self._msg = None
def create_message_factory(message_type): message_desc = self.descriptor_pool.FindMessageTypeByName( message_type) return message_factory.MessageFactory().GetPrototype(message_desc)
# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Utilities to make it easier to work with proto reflection.""" from typing import Any, Type, Union from google.protobuf import descriptor from google.protobuf import message from google.protobuf import message_factory _factory = message_factory.MessageFactory() def _field_descriptor_for_name(msg: message.Message, field_name: str) -> descriptor.FieldDescriptor: """Returns the FieldDescriptor corresponding to field_name.""" result = msg.DESCRIPTOR.fields_by_name.get(field_name) if result is None: raise ValueError('%s is not present on: %s.' % (repr(field_name), msg.DESCRIPTOR.name)) return result def are_same_message_type(descriptor_a: descriptor.Descriptor, descriptor_b: descriptor.Descriptor) -> bool: """Returns True if descriptor_a is the same type as descriptor_b."""
def __init__(self, proto_obj=None): self._proto = proto_obj or proto.Event() self._entry_cache = {} self._factory = message_factory.MessageFactory() self._rev_type_lookup = {} self._dirty_tags = False
def testExtensionValueInDifferentFile(self): # Add Container message. f1 = descriptor_pb2.FileDescriptorProto( name='google/protobuf/internal/container.proto', package='google.protobuf.python.internal') f1.message_type.add(name='Container').extension_range.add(start=1, end=10) # Add ValueType message. f2 = descriptor_pb2.FileDescriptorProto( name='google/protobuf/internal/value_type.proto', package='google.protobuf.python.internal') f2.message_type.add(name='ValueType').field.add( name='setting', number=1, label=descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL, type=descriptor_pb2.FieldDescriptorProto.TYPE_INT32, default_value='123') # Extend container with field of ValueType. f3 = descriptor_pb2.FileDescriptorProto( name='google/protobuf/internal/extension.proto', package='google.protobuf.python.internal', dependency=[f1.name, f2.name]) f3.extension.add( name='top_level_extension_field', number=2, label=descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL, type_name='ValueType', extendee='Container') f3.message_type.add(name='Extension').extension.add( name='nested_extension_field', number=3, label=descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL, type_name='ValueType', extendee='Container') class SimpleDescriptorDB: def __init__(self, files): self._files = files def FindFileByName(self, name): return self._files[name] db = SimpleDescriptorDB({f1.name: f1, f2.name: f2, f3.name: f3}) pool = descriptor_pool.DescriptorPool(db) factory = message_factory.MessageFactory(pool=pool) msgs = factory.GetMessages([f1.name, f3.name]) # Deliberately not f2. msg = msgs['google.protobuf.python.internal.Container'] desc = msgs['google.protobuf.python.internal.Extension'].DESCRIPTOR ext1 = desc.file.extensions_by_name['top_level_extension_field'] ext2 = desc.extensions_by_name['nested_extension_field'] m = msg() m.Extensions[ext1].setting = 234 m.Extensions[ext2].setting = 345 serialized = m.SerializeToString() pool = descriptor_pool.DescriptorPool(db) factory = message_factory.MessageFactory(pool=pool) msgs = factory.GetMessages([f1.name, f3.name]) # Deliberately not f2. msg = msgs['google.protobuf.python.internal.Container'] desc = msgs['google.protobuf.python.internal.Extension'].DESCRIPTOR ext1 = desc.file.extensions_by_name['top_level_extension_field'] ext2 = desc.extensions_by_name['nested_extension_field'] m = msg.FromString(serialized) self.assertEqual(2, len(m.ListFields())) self.assertEqual(234, m.Extensions[ext1].setting) self.assertEqual(345, m.Extensions[ext2].setting)
def MakeFlatRDFClass(self, value): """Generates flattened RDFValue class definition for the given value.""" file_descriptor = descriptor_pb2.FileDescriptorProto() file_descriptor.name = (self.ExportedClassNameForValue(value).lower() + ".proto") file_descriptor.package = self.PACKAGE_NAME descriptors = dict() descriptors[ "." + rdfvalue.ExportedMetadata.protobuf.DESCRIPTOR.full_name] = ( rdfvalue.ExportedMetadata.protobuf.DESCRIPTOR) # Register import of a proto file containing ExportedMetadata definiition. file_descriptor.dependency.append( rdfvalue.ExportedMetadata.protobuf.DESCRIPTOR.file.name) message_type = file_descriptor.message_type.add() message_type.name = self.ExportedClassNameForValue(value) metadata_field = message_type.field.add() metadata_field.name = "metadata" metadata_field.number = 1 metadata_field.label = descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL metadata_field.type = descriptor_pb2.FieldDescriptorProto.TYPE_MESSAGE metadata_field.type_name = ( "." + rdfvalue.ExportedMetadata.protobuf.DESCRIPTOR.full_name) for number, desc in sorted(value.type_infos_by_field_number.items()): # Name 'metadata' is reserved to store ExportedMetadata value. if desc.name == "metadata": continue field = None if isinstance(desc, type_info.ProtoEnum): field = message_type.field.add() field.type = descriptor_pb2.FieldDescriptorProto.TYPE_ENUM field.type_name = desc.enum_name enum_type = message_type.enum_type.add() if value.protobuf: value.protobuf.DESCRIPTOR.enum_types_by_name[ desc.enum_name].CopyToProto(enum_type) else: raise NotImplementedError("Enums are not supported in " "non-protobuf-based RDF values.") elif isinstance(desc, type_info.ProtoEmbedded): # We don't support nested protobufs in data agnostic export yet. pass elif isinstance(desc, type_info.ProtoList): # We don't support repeated fields in data agnostic export yet. pass else: field = message_type.field.add() field.type = self.PRIMITIVE_TYPE_MAPPING[desc.proto_type_name] if field: field.name = desc.name field.number = number + 1 field.label = descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL if value.protobuf: field_options = value.protobuf.DESCRIPTOR.fields_by_name[ desc.name].GetOptions() if field_options: field.options.CopyFrom(field_options) result_descriptor = self.MakeDescriptor(message_type, file_descriptor, descriptors=descriptors) factory = message_factory.MessageFactory() proto_class = factory.GetPrototype(result_descriptor) def Flatten(self, metadata, value): self.metadata = metadata for desc in value.type_infos: if desc.name == "metadata": continue if hasattr(self, desc.name) and value.HasField(desc.name): setattr(self, desc.name, getattr(value, desc.name)) return type(utils.SmartStr(message_type.name), (rdfvalue.RDFProtoStruct, ), dict(protobuf=proto_class, Flatten=Flatten))
def __init__(self): self.methods_by_file = {} self.pool = descriptor_pool.DescriptorPool() self._factory = message_factory.MessageFactory(self.pool)