示例#1
0
def _get_descriptor_set():
    """Returns a FileDescriptorSet proto to be used by tf.io.decode_proto."""
    proto = pb.FileDescriptorSet()

    # The FileDescriptor for tensorflow.ranking.internal.ExampleListWithContext.
    file_proto = proto.file.add(name=_FILE_NAME,
                                package=_PACKAGE,
                                syntax="proto3")
    message_proto = file_proto.message_type.add(name=_MESSAGE_NAME)
    message_proto.field.add(name=_EXAMPLES_FIELD_NAME,
                            number=1,
                            type=pb.FieldDescriptorProto.TYPE_BYTES,
                            label=pb.FieldDescriptorProto.LABEL_REPEATED)
    message_proto.field.add(name=_CONTEXT_FIELD_NAME,
                            number=2,
                            type=pb.FieldDescriptorProto.TYPE_BYTES)

    return proto
def convert_desc(source_desc, dest_desc):
    """Converts proto comments to restructuredtext format.

    Proto comments are expected to be in markdown format, and to possibly
    contain links to other protobuf types and relative URLs that will not
    resolve to correct documentation using standard tools.

    This task performs the following transformations on the documentation
    in the descriptor set:
    - Replace proto links with literals (e.g. [Foo][bar.baz.Foo] -> `Foo`)
    - Resolve relative URLs to https://cloud.google.com
    - Run pandoc to convert from markdown to restructuredtext"""

    desc_set = desc.FileDescriptorSet()
    with open(source_desc, 'rb') as f:
        desc_set.ParseFromString(f.read())

    cb = CommentsConverter()

    for file_descriptor_proto in desc_set.file:
        sc_info = file_descriptor_proto.source_code_info
        locations = sc_info.location if sc_info else []
        for location in locations:
            cb.put_comment(location.leading_comments)
            cb.put_comment(location.trailing_comments)
            for c in location.leading_detached_comments:
                cb.put_comment(c)

    cb.convert()

    for file_descriptor_proto in desc_set.file:
        sc_info = file_descriptor_proto.source_code_info
        locations = sc_info.location if sc_info else []
        for location in locations:
            location.leading_comments = cb.get_next_comment()
            location.trailing_comments = cb.get_next_comment()
            detached = []
            for _ in location.leading_detached_comments:
                detached.append(cb.get_next_comment())
            del location.leading_detached_comments[:]
            location.leading_detached_comments.extend(detached)

    with open(dest_desc, mode='wb') as f:
        f.write(desc_set.SerializeToString())
def create_message_factory(descriptor_file_path, proto_type):
    with open(descriptor_file_path, 'rb') as descriptor_file:
        descriptor_content = descriptor_file.read()

    file_desc_set_pb2 = descriptor_pb2.FileDescriptorSet()
    file_desc_set_pb2.MergeFromString(descriptor_content)

    desc_by_path = {}
    for f_desc_pb2 in file_desc_set_pb2.file:
        f_desc_pb2_encode = f_desc_pb2.SerializeToString()
        f_desc = descriptor.FileDescriptor(name=f_desc_pb2.name,
                                           package=f_desc_pb2.package,
                                           serialized_pb=f_desc_pb2_encode)

        for desc in f_desc.message_types_by_name.values():
            desc_by_path[desc.full_name] = desc

    return message_factory.MessageFactory().GetPrototype(
        desc_by_path[proto_type])
def create_metrics_message_factory(metrics_descriptor_path):
    with open(metrics_descriptor_path, "r") as metrics_descriptor_file:
        metrics_descriptor_content = metrics_descriptor_file.read()

    file_desc_set_pb2 = descriptor_pb2.FileDescriptorSet()
    file_desc_set_pb2.MergeFromString(metrics_descriptor_content)

    desc_by_path = {}
    for f_desc_pb2 in file_desc_set_pb2.file:
        f_desc_pb2_encode = f_desc_pb2.SerializeToString()
        f_desc = descriptor.FileDescriptor(name=f_desc_pb2.name,
                                           package=f_desc_pb2.package,
                                           serialized_pb=f_desc_pb2_encode)

        for desc in f_desc.message_types_by_name.values():
            desc_by_path[desc.full_name] = desc

    return message_factory.MessageFactory().GetPrototype(
        desc_by_path["perfetto.protos.TraceMetrics"])
示例#5
0
    def load_descriptor(self,
                        descriptor_blob,
                        fold_comments=True,
                        type_tag_name='_type'):

        # decode file descriptor set or if that is not possible,
        # try plugin request
        try:
            message = descriptor_pb2.FileDescriptorSet()
            message.ParseFromString(descriptor_blob)
        except DecodeError:
            message = CodeGeneratorRequest()
            message.ParseFromString(descriptor_blob)

        d = self.parse(message, type_tag_name=type_tag_name)
        print d.keys()
        for _file in d.get('file', None) or d['proto_file']:
            if fold_comments:
                self.fold_comments_in(_file)
            self.catalog[_file['package']] = _file
def parse_protobin(app):
    with open(app.config.proto_bin, 'rb') as f:
        data = f.read()

    proto = descriptor_pb2.FileDescriptorSet()
    proto.ParseFromString(data)

    for file in proto.file:
        for service in file.service:
            symbol = '.{}.{}'.format(file.package, service.name)
            descriptors_by_symbol[symbol] = service
            for method_type in service.method:
                descriptors_by_symbol[symbol + '.' +
                                      method_type.name] = method_type

        for message_type in file.message_type:
            symbol = '.{}.{}'.format(file.package, message_type.name)
            package_by_symbol[symbol] = file.package
            descriptors_by_symbol[symbol] = message_type
            for enum_type in message_type.enum_type:
                package_by_symbol[symbol] = file.package
                descriptors_by_symbol[symbol + '.' +
                                      enum_type.name] = enum_type
            for nested_type in message_type.nested_type:
                package_by_symbol[symbol] = file.package
                descriptors_by_symbol[symbol + '.' +
                                      nested_type.name] = nested_type

        for enum_type in file.enum_type:
            symbol = '.{}.{}'.format(file.package, enum_type.name)
            package_by_symbol[symbol] = file.package
            descriptors_by_symbol[symbol] = enum_type

        for location in file.source_code_info.location:
            if location.HasField('leading_comments'):
                symbol = path_to_symbol(file, location.path)
                comments_by_symbol[symbol] = location.leading_comments.rstrip()
    def run(self) -> desc.FileDescriptorSet:
        # Construct the protoc command with proper argument prefix.
        protoc_command = [self._PROTOC, f"--proto_path={self._PROTOS_DIR}"]
        if self.api_common_protos:
            protoc_command.append(f"--proto_path={self._COMMON_PROTOS_DIR}")
            protoc_command.append(f"--proto_path={self._PROTOBUF_PROTOS_DIR}")
        descriptor_set_output = os.path.join(self._PROTOS_DIR,
                                             self.descriptor_set_file)
        protoc_command.append(f"-o{descriptor_set_output}")
        protoc_command.append("--include_source_info")
        protoc_command.extend(
            os.path.join(self._PROTOS_DIR, pf) for pf in self.proto_files)

        # Run protoc command to get pb file that contains serialized data of
        # the proto files.
        process = subprocess.run(protoc_command)
        if process.returncode != 0:
            raise _ProtocInvokerException(
                f"Protoc commnand to invoke unit test fails: {protoc_command}")
        # Create FileDescriptorSet from the serialized data.
        desc_set = desc.FileDescriptorSet()
        with open(descriptor_set_output, "rb") as f:
            desc_set.ParseFromString(f.read())
        return desc_set
示例#8
0
# Generated by the pRPC protocol buffer compiler plugin.  DO NOT EDIT!
# source: access.proto

import base64
import zlib

from google.protobuf import descriptor_pb2

# Includes description of the access.proto and all of its transitive
# dependencies. Includes source code info.
FILE_DESCRIPTOR_SET = descriptor_pb2.FileDescriptorSet()
FILE_DESCRIPTOR_SET.ParseFromString(
    zlib.decompress(
        base64.b64decode(
            'eJztO0tsG1ly06RkaZ7lcYvS2BoKa7+hx2PJoihZnp1Zy+NJKLEl94wscknKXk8mY7XIptRjqp'
            'vpbkqWN7sBFtmcckyQcwIECyRIkFxyTgIkyCFAbrnmuMgplwQJghxSVe+9/pCUZ705BbAB2/3q'
            'vVdVr6pe/brJ/uojNmW1WnYQlHq+F3q5C2KUv3boeYdde4WgB/3OSrvvW6HjuWJdfn5w3j7uhW'
            'disvCzcTZTsYOW7/RwS90Oep4b2LlH7G3fDry+DyTmNJ5duLi2UpLkR6wv1eXi5FyMIf/XY2xm'
            'xJJcjo09d9w2UNAW3q7Tc26OTbS842PbDecyBFbD3GM2YbVwXzCXJZY+fU2WSmWx3XBD/6yukO'
            'VqbNz3unDQMcK6/rpY67hZ4BSI8gV2QZBKnkZLnSZvsjHcmLvFLlvdrndqt5+p06HA366/I8GS'
            '6fMFk/fZVPJkOZ1ln9tnkiA+5nbY+InV7du08+Lax7+c4OoCyXrme1reZSw+9wiKn6cpfvTLCD'
            'VBr2CxqzXbP3bCMJJI3f6Nvh2EuRvskjK1ZwlzmlLAL9Cs3mfR+JnTDoAtlPFFBTPbQeGfM2xu'
            'mEZ8I3pqbvBGnLcpnhDWEWPIbbFpOJvTdsKzZ+rCSlG9VxI3tqRubKkiF9R1tUdB8ivxxSJiQa'
            'BsJW1Magi28k6aq9fQ3bnnHMFCQndrP9PwPiCK3BOmD2LJXT8fPyk4z7+NgcJbuQq7mPQsV4aE'
            'aKDby8+/wg4Lb33+D4tsUp/U39J/S9fYzzOTUzTIrf25xje93pnvHB6FfKG1yNdW73zCm0c239'
            'nbNHm5Hx55flDi5W6X06KAg2XZ/ondLjG+F9jc6/DwyAm4EBRveW2bw/DQO7F9127zvtu2fVhi'
            '83LPaiFip2UDU0X+2PZRonyttMpggRXyluXyA5t3PNjEHZd27Zibxm7D4B0HDs3W/lQD7gA/Dn'
            'nb7jiuHXCLo8cAVCSUltfloceBVus5P0W80kZg3aFzYruMO8dgsYHnWiFyCOch0m2PAw65iKsb'
            'BOfc8nxuv7COe11g24n4hI1tfnAGm2xu9XrcOrQcNwByrgec+yXGJiczOgNRvwOP2cm39Cl9Qr'
            '9Fz5p+CZ4/YN7kBYBPgzLmdS1vcWFOHEXsoDgTlF55pvgUcLKO5x+z0WdhU0gQyE8TW10aZYCF'
            'WT2j7+W/4oOmCHvDvk+Eug4cDvStSKN6fGHKvxhtXVED+rP6Bf1KApIByFW9lIBkAXJP/z77aU'
示例#9
0
def _create_file_descriptor_set_proto(file_descriptor_list):
    """Creates a FileDescriptorSet proto from a list of file descriptors."""
    result = descriptor_pb2.FileDescriptorSet()
    for file_descriptor in file_descriptor_list:
        file_descriptor.CopyToProto(result.file.add())
    return result
示例#10
0
def _GetDescriptorSetForTextInput():
    """Returns a string for tf.io.decode_proto's descriptor_source."""
    file_descriptor_set = descriptor_pb2.FileDescriptorSet()
    text_input_pb2.DESCRIPTOR.CopyToProto(file_descriptor_set.file.add())
    return b'bytes://' + file_descriptor_set.SerializeToString()
def Decode(path):
    with open(path, 'rb') as f:
        file_set = descriptor_pb2.FileDescriptorSet()
        file_set.ParseFromString(f.read())
        return str(file_set)
示例#12
0
 def test_build_descriptor(self):
     expected_file_descriptor_list = [
         """
       name: "tfx/utils/testdata/bar.proto"
       package: "tfx.utils.proto.testdata"
       message_type {
         name: "Bar"
         field {
           name: "int_field"
           number: 1
           label: LABEL_OPTIONAL
           type: TYPE_INT64
         }
       }
       message_type {
         name: "Bar2"
         field {
           name: "str_field"
           number: 1
           label: LABEL_OPTIONAL
           type: TYPE_STRING
         }
       }
       syntax: "proto3"
     """, """
       name: "tfx/utils/testdata/foo.proto"
       package: "tfx.utils.proto.testdata"
       dependency: "tfx/utils/testdata/bar.proto"
       message_type {
         name: "Foo"
         field {
           name: "bar"
           number: 1
           label: LABEL_OPTIONAL
           type: TYPE_MESSAGE
           type_name: ".tfx.utils.proto.testdata.Bar"
         }
         field {
           name: "bar2"
           number: 2
           label: LABEL_OPTIONAL
           type: TYPE_MESSAGE
           type_name: ".tfx.utils.proto.testdata.Bar2"
         }
       }
       message_type {
         name: "Foo2"
         field {
           name: "value"
           number: 1
           label: LABEL_OPTIONAL
           type: TYPE_INT64
         }
       }
       message_type {
         name: "TestProto"
         field {
           name: "string_value"
           number: 1
           label: LABEL_OPTIONAL
           type: TYPE_STRING
         }
         field {
           name: "int_value"
           number: 2
           label: LABEL_OPTIONAL
           type: TYPE_INT32
         }
         field {
           name: "double_value"
           number: 3
           label: LABEL_OPTIONAL
           type: TYPE_DOUBLE
         }
       }
       syntax: "proto3"
     """
     ]
     actual_file_descriptor = descriptor_pb2.FileDescriptorSet()
     proto_utils.build_file_descriptor_set(foo_pb2.Foo,
                                           actual_file_descriptor)
     self.assertEqual(len(actual_file_descriptor.file), 2)
     actual_file_descriptor_sorted = sorted(list(
         actual_file_descriptor.file),
                                            key=lambda fd: fd.name)
     for expected, actual in zip(expected_file_descriptor_list,
                                 actual_file_descriptor_sorted):
         self.assertProtoPartiallyEquals(expected, actual)
def make_file_set(
        files: Sequence[desc.FileDescriptorProto] = (), ) -> wrappers.FileSet:
    return wrappers.FileSet(file_set_pb=desc.FileDescriptorSet(file=files), )
    def get_descriptor_set(self) -> desc.FileDescriptorSet:
        local_dir = os.getcwd()
        desc_set = desc.FileDescriptorSet()
        # If users pass in descriptor set file directly, we
        # can skip running the protoc command.
        if self.descriptor_set:
            with open(self.descriptor_set, "rb") as f:
                desc_set.ParseFromString(f.read())
            return desc_set
        # Construct the protoc command with proper argument prefix.
        protoc_command = [self.protoc_binary]
        for directory in self.proto_definition_dirs:
            if self.local_protobuf:
                protoc_command.append(f"--proto_path={directory}")
            else:
                protoc_command.append(f"--proto_path={local_dir}/{directory}")
        if self.local_protobuf:
            protoc_command.append(f"--proto_path={self.PROTOBUF_PROTOS_DIR}")
        if self.include_source_code:
            protoc_command.append("--include_source_info")
        # Include the imported dependencies.
        protoc_command.append("--include_imports")
        if self.local_protobuf:
            protoc_command.extend(pf for pf in self.proto_files)
        else:
            protoc_command.extend(
                (local_dir + "/" + pf) for pf in self.proto_files)

        # Run protoc command to get pb file that contains serialized data of
        # the proto files.
        if self.protoc_binary == self.GRPC_TOOLS_PROTOC:
            fd, path = tempfile.mkstemp()
            protoc_command.append("--descriptor_set_out=" + path)
            # Use grpcio-tools.protoc to compile proto files
            if protoc.main(protoc_command) != 0:
                raise _ProtocInvokerException(
                    f"Protoc command to load the descriptor set fails. {protoc_command}"
                )
            else:
                # Create FileDescriptorSet from the serialized data.
                with open(fd, "rb") as f:
                    desc_set.ParseFromString(f.read())
                return desc_set
        try:
            protoc_command.append("-o/dev/stdout")
            union_command = " ".join(protoc_command)
            logging.info(f"Run protoc command: {union_command}")
            process = subprocess.run(union_command,
                                     shell=True,
                                     stdout=PIPE,
                                     stderr=PIPE)
            logging.info(f"Check the process output is not empty:")
            logging.info(bool(process.stdout))
            if process.returncode != 0:
                raise _ProtocInvokerException(
                    f"Protoc command to load the descriptor set fails. {union_command}, error: {process.stderr}"
                )
        except (CalledProcessError, FileNotFoundError) as e:
            logging.info(f"Call process error: {e}")

        # Create FileDescriptorSet from the serialized data.
        desc_set.ParseFromString(process.stdout)
        return desc_set
示例#15
0
文件: autogen.py 项目: merose/yamcs
        "method": method,
        "websocket_options": method.options.Extensions[annotations_pb2.websocket],
    }
    text = YamcsReSTRenderer().render("websocket.rst_t", context)
    with FileAvoidWrite(filename) as f:
        f.write(text)


if __name__ == "__main__":
    destdir = Path(sys.argv[1])
    destdir.mkdir(exist_ok=True)

    with open("yamcs-api.protobin", "rb") as f:
        data = f.read()

    proto = descriptor_pb2.FileDescriptorSet()
    proto.ParseFromString(data)
    service_links = []
    for file in proto.file:
        for service in file.service:
            servicedir = Path(destdir, camel_to_slug(service.name).replace("-api", ""))
            servicedir.mkdir(exist_ok=True)

            servicefile = os.path.join(servicedir, "index.rst")
            symbol = "." + file.package + "." + service.name
            create_service_file(symbol, service, servicefile)
            service_links.append(servicedir.name + "/index")

            for method in service.method:
                filename = camel_to_slug(method.name) + ".rst"
                methodfile = os.path.join(servicedir, filename)