def fetchProtoJSON(the_path): with open(the_path, 'rb') as f: raw_proto = FileDescriptorSet() raw_proto.ParseFromString(f.read()) f.close() return MessageToJson(raw_proto)
def _write_header(self, fdescr): self._fobj.write(MAGIC) # Write protocol buffer version in header self._write_blob(T_PROTOBUF_VERSION, len(google.protobuf.__version__), google.protobuf.__version__.encode("utf8")) if type(fdescr) == PBZReader: self._write_blob(T_FILE_DESCRIPTOR, len(fdescr._raw_descriptor), (fdescr._raw_descriptor)) self._dpool = fdescr._dpool else: # Read FileDescriptorSet with open(fdescr, "rb") as fi: fdset = fi.read() sz = fi.tell() # Write FileDescriptorSet self._write_blob(T_FILE_DESCRIPTOR, sz, fdset) # Parse descriptor for checking that the messages will be defined in # the serialized file self._dpool = descriptor_pool.DescriptorPool() ds = FileDescriptorSet() ds.ParseFromString(fdset) for df in ds.file: self._dpool.Add(df)
def main(args=None): """Run CLI.""" import argparse from . import LogReader parser = argparse.ArgumentParser( description="Extract google.protobuf source tree.") parser.add_argument('-o', '--destination', required=True, help='Destination directory') parser.add_argument('-l', '--log', help='Log file') parser.add_argument('-f', '--descriptor_set', help='File with FileDescriptorSet') args = parser.parse_args(args=args) if args.log: with LogReader(args.log) as log_reader: proto = log_reader.header.proto elif args.descriptor_set: proto = FileDescriptorSet() with open(args.descriptor_set) as f: proto.ParseFromString(f.read()) else: raise ValueError('You should specify one of: LOG, DESCRIPTOR_SET') extract_source_tree(args.destination, proto)
def discover(self, request, context): logger.info("discovering.") pprint(request) descriptor_set = FileDescriptorSet() for entity in self.event_sourced_entities + self.action_protocol_entities: logger.info(f"entity: {entity.name()}") for descriptor in entity.file_descriptors: logger.info(f"discovering {descriptor.name}") logger.info(f"SD: {entity.service_descriptor.full_name}") from_string = FileDescriptorProto.FromString( descriptor.serialized_pb) descriptor_set.file.append(from_string) descriptor_set.file.append( FileDescriptorProto.FromString(Default().FindFileByName( "google/protobuf/empty.proto").serialized_pb)) descriptor_set.file.append( FileDescriptorProto.FromString(Default().FindFileByName( "cloudstate/entity_key.proto").serialized_pb)) descriptor_set.file.append( FileDescriptorProto.FromString(Default().FindFileByName( "cloudstate/eventing.proto").serialized_pb)) descriptor_set.file.append( FileDescriptorProto.FromString(Default().FindFileByName( "google/protobuf/descriptor.proto").serialized_pb)) descriptor_set.file.append( FileDescriptorProto.FromString(Default().FindFileByName( "google/api/annotations.proto").serialized_pb)) descriptor_set.file.append( FileDescriptorProto.FromString(Default().FindFileByName( "google/api/http.proto").serialized_pb)) descriptor_set.file.append( FileDescriptorProto.FromString(Default().FindFileByName( "google/api/httpbody.proto").serialized_pb)) descriptor_set.file.append( FileDescriptorProto.FromString(Default().FindFileByName( "google/protobuf/any.proto").serialized_pb)) spec = entity_pb2.EntitySpec( service_info=entity_pb2.ServiceInfo( service_name="", service_version="0.1.0", service_runtime="Python " + platform.python_version() + " [" + platform.python_implementation() + " " + platform.python_compiler() + "]", support_library_name="cloudstate-python-support", support_library_version="0.1.0", ), entities=[ entity_pb2.Entity( entity_type=entity.entity_type(), service_name=entity.service_descriptor.full_name, persistence_id=entity.persistence_id, ) for entity in self.event_sourced_entities + self.action_protocol_entities ], proto=descriptor_set.SerializeToString(), ) return spec
def load_descriptor_set(protoc_bin, proto_path): with tempfile.TemporaryDirectory("lorris_temp_proto") as tmpdir: descr_path = os.path.join(tmpdir, "proto.descr") subprocess.check_call( [protoc_bin, "--descriptor_set_out=" + descr_path, proto_path]) with open(descr_path, "rb") as f: s = FileDescriptorSet() s.ParseFromString(f.read()) return s
def __init__(self, path): self._log_reader = _LogReader(path) # header header_ = self._log_reader.header fd_set = FileDescriptorSet() fd_set.ParseFromString(header_.proto) self._header = Header(proto=fd_set, types=header_.types) # descriptors self._pool = DescriptorPool() for proto in self._header.proto.file: self._pool.Add(proto) self._factory = MessageFactory()
def get_compiled_proto(): # Compile trezor.proto to binary format pdir = os.path.abspath(TREZOR_PROTO_DIR) pfile = os.path.join(pdir, "messages.proto") cmd = "protoc --include_imports -I" + PROTOBUF_PROTO_DIR + " -I" + pdir + " " + pfile + " -otrezor.bin" subprocess.check_call(cmd.split()) # Load compiled protocol description to string proto = open('trezor.bin', 'r').read() os.unlink('trezor.bin') # Parse it into FileDescriptorSet structure compiled = FileDescriptorSet() compiled.ParseFromString(proto) return compiled
def load_descriptor_set( proto_file: str, proto_path: Optional[List[str]] = None ) -> FileDescriptorSet: wkt_protos = pkg_resources.resource_filename("grpc_tools", "_proto") validate_protos = str( Path( pkg_resources.resource_filename("validate", "validate.proto") ).parent.parent ) harness_protos = str(Path(__file__).parent.parent.parent.absolute()) with tempfile.NamedTemporaryFile() as f: args = [ "grpc_tools.protoc", "--include_imports", f"--proto_path={wkt_protos}", f"--proto_path={validate_protos}", f"--proto_path={harness_protos}", f"--proto_path={Path(proto_file).parent}", f"--descriptor_set_out={f.name}", ] if proto_path: args.extend(f"--proto_path={p}" for p in proto_path) args.append(proto_file) result = protoc.main(args) if result != 0: raise Exception("Failed to call protoc") content = f.read() return FileDescriptorSet.FromString(content)
def register_descriptor_data(self, type_url, descriptor_data): if type_url not in self._type_url_to_descriptor_data: is_binary = ( isinstance(descriptor_data, six.string_types) or isinstance(descriptor_data, six.binary_type) or isinstance(descriptor_data, bytearray)) if is_binary: from google.protobuf.descriptor_pb2 import FileDescriptorSet fds = FileDescriptorSet() fds.ParseFromString(descriptor_data) descriptor_data = fds self._type_url_to_descriptor_data[type_url] = descriptor_data if self._dynamic_factory is None: self._dynamic_factory = DynamicMessageFactory() self._dynamic_factory.register_type(type_url, descriptor_data)
def discover(self, request, context): pprint(request) descriptor_set = FileDescriptorSet() for entity in self.event_sourced_entities: for descriptor in entity.file_descriptors: descriptor_set.file.append(FileDescriptorProto.FromString(descriptor.serialized_pb)) descriptor_set.file.append( FileDescriptorProto.FromString(Default().FindFileByName('google/protobuf/empty.proto').serialized_pb) ) descriptor_set.file.append( FileDescriptorProto.FromString(Default().FindFileByName('cloudstate/entity_key.proto').serialized_pb) ) descriptor_set.file.append( FileDescriptorProto.FromString(Default().FindFileByName('google/protobuf/descriptor.proto').serialized_pb) ) descriptor_set.file.append( FileDescriptorProto.FromString(Default().FindFileByName('google/api/annotations.proto').serialized_pb) ) descriptor_set.file.append( FileDescriptorProto.FromString(Default().FindFileByName('google/api/http.proto').serialized_pb) ) spec = entity_pb2.EntitySpec( service_info=entity_pb2.ServiceInfo( service_version='0.1.0', service_runtime='Python ' + platform.python_version() + ' [' + platform.python_implementation() + ' ' + platform.python_compiler() + ']', support_library_name='cloudstate-python-support', support_library_version='0.1.0' ), entities=[ entity_pb2.Entity( entity_type=entity.entity_type(), service_name=entity.service_descriptor.full_name, persistence_id=entity.persistence_id, ) for entity in self.event_sourced_entities], proto=descriptor_set.SerializeToString() ) return spec
def read_descriptor_pool(self): dpool = descriptor_pool.DescriptorPool() while True: vtype, data = self._read_next_obj() if vtype is None: raise Exception("Unexpected end of file") if vtype == T_FILE_DESCRIPTOR: ds = FileDescriptorSet() ds.ParseFromString(data) for df in ds.file: dpool.Add(df) return dpool, data elif vtype == T_PROTOBUF_VERSION: pbversion = data.decode("utf8") if google.protobuf.__version__.split(".") < pbversion.split( "."): warnings.warn( f"File uses more recent of protobuf ({pbversion})") else: raise Exception(f"Unknown message type {vtype}")
def parse(self, descfilename, allmeta): """解析传入的desc文件,转换成ProtoMeta信息 Args: desc_file_path: protoc --descriptor_set_out --include_source_info 编译生成的proto描述文件 Returns: A dict mapping message name and ProtoMeta """ with open(descfilename, "rb") as f: desc = FileDescriptorSet.FromString(f.read()) for onefile in desc.file: allmeta.add(self.parse_file(onefile)) # for proto_message_meta in proto_meta_mgr.metas: # if proto_message_meta.name in self._reference_message_name: # proto_message_meta.is_reference_message = True return allmeta
def build_fds_for_msg(msg): """ Given a Protobuf message `msg` (or message class), build a `FileDescriptorSet` that can be used with `DynamicMessageFactory` below (or `protobag::DynamicMsgFactory` in C++) to dynamically deserialize instances of `msg` at runtime (when the Protobuf-generated code for `msg` is unavailable). See also `protobag::DynamicMsgFactory` in C++. We run a BFS of `msg`'s descriptor and its dependencies to collect all data necessary to decode a `msg` instance. (NB: the current search is today over-complete and pulls in unrelated types, too). The algorithm below mirrors that in `protobag::BagIndexBuilder::Observe()`. We must run this collection in python (and not C++) because we assume we only have the Protobuf python-generated code available for `msg` in this code path. Args: msg (Protobuf message or class): Build a `FileDescriptorSet` based upon the `DESCRIPTOR` of this message. Returns: A `FileDescriptorSet` protobuf message instance. """ from google.protobuf.descriptor_pb2 import FileDescriptorProto from google.protobuf.descriptor_pb2 import FileDescriptorSet q = [msg.DESCRIPTOR.file] visited = set() files = [] while q: current = q.pop() if current.name not in visited: # Visit! visited.add(current.name) fd = FileDescriptorProto() current.CopyToProto(fd) files.append(fd) q.extend(current.dependencies) return FileDescriptorSet(file=files)
def load_proto_msgs(proto_path, ret_source_info=False): # List imports that we need to specify to protoc for the necessary *_pb2.py to be generated proto_dir = Path(proto_path).parent arg_proto_path = proto_dir arg_proto_files = [] to_import = [str(proto_path)] while to_import: next_import = to_import.pop() while not exists(arg_proto_path / next_import) and \ str(arg_proto_path.parent).startswith(str(BASE_PATH)): arg_proto_path = arg_proto_path.parent next_import = str(arg_proto_path / next_import) if next_import not in arg_proto_files: arg_proto_files.insert(0, next_import) with open(next_import) as fd: for prior_import in reversed( findall('import(?:\s*weak|public)?\s*"(.+?)"\s*;', fd.read())): to_import.append(prior_import) # Execute protoc and import the actual module from a tmp with TemporaryDirectory() as arg_python_out: args = [ 'protoc', '--proto_path=%s' % arg_proto_path, '--python_out=' + arg_python_out, *arg_proto_files ] if ret_source_info: args += [ '-o%s' % (Path(arg_python_out) / 'desc_info'), '--include_source_info', '--include_imports' ] cmd = run(args, stderr=PIPE, encoding='utf8') if cmd.returncode: raise ValueError(cmd.stderr) if ret_source_info: with open(Path(arg_python_out) / 'desc_info', 'rb') as fd: yield FileDescriptorSet.FromString(fd.read()), arg_proto_path return # Do actual import module_name = str(proto_dir).replace(str(arg_proto_path), '').strip('/\\').replace( '/', '.') if module_name: module_name += '.' module_name += Path(proto_path).stem.replace('-', '_') + '_pb2' PATH.append(arg_python_out) module = import_module(module_name) PATH.remove(arg_python_out) # Recursively iterate over class members to list Protobuf messages yield from iterate_proto_msg(module, '')
def _createDescriptorProto(self): proto = FileDescriptorSet() file_proto = proto.file.add( name='types.proto', package='tensorflow', syntax='proto3') enum_proto = file_proto.enum_type.add(name='DataType') enum_proto.value.add(name='DT_DOUBLE', number=0) enum_proto.value.add(name='DT_BOOL', number=1) file_proto = proto.file.add( name='test_example.proto', package='tensorflow.contrib.proto', dependency=['types.proto']) message_proto = file_proto.message_type.add(name='TestCase') message_proto.field.add( name='values', number=1, type=FieldDescriptorProto.TYPE_MESSAGE, type_name='.tensorflow.contrib.proto.TestValue', label=FieldDescriptorProto.LABEL_REPEATED) message_proto.field.add( name='shapes', number=2, type=FieldDescriptorProto.TYPE_INT32, label=FieldDescriptorProto.LABEL_REPEATED) message_proto.field.add( name='sizes', number=3, type=FieldDescriptorProto.TYPE_INT32, label=FieldDescriptorProto.LABEL_REPEATED) message_proto.field.add( name='fields', number=4, type=FieldDescriptorProto.TYPE_MESSAGE, type_name='.tensorflow.contrib.proto.FieldSpec', label=FieldDescriptorProto.LABEL_REPEATED) message_proto = file_proto.message_type.add( name='TestValue') message_proto.field.add( name='double_value', number=1, type=FieldDescriptorProto.TYPE_DOUBLE, label=FieldDescriptorProto.LABEL_REPEATED) message_proto.field.add( name='bool_value', number=2, type=FieldDescriptorProto.TYPE_BOOL, label=FieldDescriptorProto.LABEL_REPEATED) message_proto = file_proto.message_type.add( name='FieldSpec') message_proto.field.add( name='name', number=1, type=FieldDescriptorProto.TYPE_STRING, label=FieldDescriptorProto.LABEL_OPTIONAL) message_proto.field.add( name='dtype', number=2, type=FieldDescriptorProto.TYPE_ENUM, type_name='.tensorflow.DataType', label=FieldDescriptorProto.LABEL_OPTIONAL) message_proto.field.add( name='value', number=3, type=FieldDescriptorProto.TYPE_MESSAGE, type_name='.tensorflow.contrib.proto.TestValue', label=FieldDescriptorProto.LABEL_OPTIONAL) return proto
def _createDescriptorFile(self): set_proto = FileDescriptorSet() file_proto = set_proto.file.add(name='types.proto', package='tensorflow', syntax='proto3') enum_proto = file_proto.enum_type.add(name='DataType') enum_proto.value.add(name='DT_DOUBLE', number=0) enum_proto.value.add(name='DT_BOOL', number=1) file_proto = set_proto.file.add(name='test_example.proto', package='tensorflow.contrib.proto', dependency=['types.proto']) message_proto = file_proto.message_type.add(name='TestCase') message_proto.field.add( name='values', number=1, type=FieldDescriptorProto.TYPE_MESSAGE, type_name='.tensorflow.contrib.proto.TestValue', label=FieldDescriptorProto.LABEL_REPEATED) message_proto.field.add(name='shapes', number=2, type=FieldDescriptorProto.TYPE_INT32, label=FieldDescriptorProto.LABEL_REPEATED) message_proto.field.add(name='sizes', number=3, type=FieldDescriptorProto.TYPE_INT32, label=FieldDescriptorProto.LABEL_REPEATED) message_proto.field.add( name='fields', number=4, type=FieldDescriptorProto.TYPE_MESSAGE, type_name='.tensorflow.contrib.proto.FieldSpec', label=FieldDescriptorProto.LABEL_REPEATED) message_proto = file_proto.message_type.add(name='TestValue') message_proto.field.add(name='double_value', number=1, type=FieldDescriptorProto.TYPE_DOUBLE, label=FieldDescriptorProto.LABEL_REPEATED) message_proto.field.add(name='bool_value', number=2, type=FieldDescriptorProto.TYPE_BOOL, label=FieldDescriptorProto.LABEL_REPEATED) message_proto = file_proto.message_type.add(name='FieldSpec') message_proto.field.add(name='name', number=1, type=FieldDescriptorProto.TYPE_STRING, label=FieldDescriptorProto.LABEL_OPTIONAL) message_proto.field.add(name='dtype', number=2, type=FieldDescriptorProto.TYPE_ENUM, type_name='.tensorflow.DataType', label=FieldDescriptorProto.LABEL_OPTIONAL) message_proto.field.add( name='value', number=3, type=FieldDescriptorProto.TYPE_MESSAGE, type_name='.tensorflow.contrib.proto.TestValue', label=FieldDescriptorProto.LABEL_OPTIONAL) fn = os.path.join(self.get_temp_dir(), 'descriptor.pb') with open(fn, 'wb') as f: f.write(set_proto.SerializeToString()) return fn
def read_desc(src): with open(src, "rb") as f: return FileDescriptorSet.FromString(f.read())
#!/usr/bin/env python3 import sys import generator.File from google.protobuf.descriptor_pb2 import FileDescriptorSet base_file = sys.argv[1] with open("{}.pb".format(base_file), "rb") as f: # Load the descriptor protobuf file d = FileDescriptorSet() d.ParseFromString(f.read()) # Check that there is only one file assert len(d.file) == 1 # Load the file b = generator.File.File(d.file[0], base_file) # Generate the c++ file header, impl, python = b.generate_cpp() with open("{}.h".format(base_file), "w") as f: f.write(header) with open("{}.cpp".format(base_file), "w") as f: f.write(impl) with open("{}.py.cpp".format(base_file), "w") as f: f.write(python)