Ejemplo n.º 1
0
 def testCopyToProto_TypeError(self):
     file_proto = descriptor_pb2.FileDescriptorProto()
     self.assertRaises(TypeError,
                       unittest_pb2.TestEmptyMessage.DESCRIPTOR.CopyToProto,
                       file_proto)
     self.assertRaises(TypeError,
                       unittest_pb2.ForeignEnum.DESCRIPTOR.CopyToProto,
                       file_proto)
     self.assertRaises(TypeError,
                       unittest_pb2.TestService.DESCRIPTOR.CopyToProto,
                       file_proto)
     proto = descriptor_pb2.DescriptorProto()
     self.assertRaises(TypeError,
                       unittest_import_pb2.DESCRIPTOR.CopyToProto, proto)
Ejemplo n.º 2
0
    def test_each_proto_matches_tensorflow(self):
        for tf_path, tb_path in PROTO_IMPORTS:
            tf_pb2 = importlib.import_module(tf_path)
            tb_pb2 = importlib.import_module(tb_path)
            expected = descriptor_pb2.FileDescriptorProto()
            actual = descriptor_pb2.FileDescriptorProto()
            tf_pb2.DESCRIPTOR.CopyToProto(expected)
            tb_pb2.DESCRIPTOR.CopyToProto(actual)

            # Convert expected to be actual since this matches the
            # replacements done in proto/update.sh
            actual = str(actual)
            expected = str(expected)
            for orig, repl in PROTO_REPLACEMENTS:
                expected = expected.replace(orig, repl)

            diff = difflib.unified_diff(actual.splitlines(1),
                                        expected.splitlines(1))
            diff = ''.join(diff)

            self.assertEquals(
                diff, '',
                '{} and {} did not match:\n{}'.format(tf_path, tb_path, diff))
Ejemplo n.º 3
0
def make_file_pb2(name: str = 'my_proto.proto', package: str = 'example.v1', *,
        messages: Sequence[descriptor_pb2.DescriptorProto] = (),
        enums: Sequence[descriptor_pb2.EnumDescriptorProto] = (),
        services: Sequence[descriptor_pb2.ServiceDescriptorProto] = (),
        locations: Sequence[descriptor_pb2.SourceCodeInfo.Location] = (),
                  ) -> descriptor_pb2.FileDescriptorProto:
    return descriptor_pb2.FileDescriptorProto(
        name=name,
        package=package,
        message_type=messages,
        enum_type=enums,
        service=services,
        source_code_info=descriptor_pb2.SourceCodeInfo(location=locations),
    )
Ejemplo n.º 4
0
    def maybe_add_descriptor(cls, filename, package):
        descriptor = cls.registry.get(filename)
        if not descriptor:
            descriptor = cls.registry[filename] = cls(
                descriptor=descriptor_pb2.FileDescriptorProto(
                    name=filename, package=package, syntax="proto3",
                ),
                enums=collections.OrderedDict(),
                messages=collections.OrderedDict(),
                name=filename,
                nested={},
                nested_enum={},
            )

        return descriptor
def test_build_factory():
    proto = descriptor_pb2.FileDescriptorProto(
        package='google.mollusc.v1alpha1'
    )
    old = naming.Naming.build(
        proto,
        opts=Options(old_naming=True)
    )
    assert old.versioned_module_name == 'mollusc.v1alpha1'

    new = naming.Naming.build(
        proto,
        opts=Options()
    )
    assert new.versioned_module_name == 'mollusc_v1alpha1'
def _MakeFileDescriptorProto(proto_file_name, full_name, field_items):
    """Populate FileDescriptorProto for MessageFactory's DescriptorPool."""
    package, name = full_name.rsplit('.', 1)
    file_proto = descriptor_pb2.FileDescriptorProto()
    file_proto.name = os.path.join(package.replace('.', '/'), proto_file_name)
    file_proto.package = package
    desc_proto = file_proto.message_type.add()
    desc_proto.name = name
    for f_number, (f_name, f_type) in enumerate(field_items, 1):
        field_proto = desc_proto.field.add()
        field_proto.name = f_name
        field_proto.number = f_number
        field_proto.label = descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL
        field_proto.type = f_type
    return file_proto
def test_get_filename_with_proto():
    file_pb2 = descriptor_pb2.FileDescriptorProto(
        name="bacon.proto",
        package="foo.bar.v1",
    )
    api = make_api(
        make_proto(file_pb2),
        naming=make_naming(namespace=(), name="Spam", version="v2"),
    )

    g = make_generator()
    assert (g._get_filename(
        "%name/types/%proto.py.j2",
        api_schema=api,
        context={"proto": api.protos["bacon.proto"]},
    ) == "spam/types/bacon.py")
Ejemplo n.º 8
0
def test_get_filename_with_proto():
    file_pb2 = descriptor_pb2.FileDescriptorProto(
        name='bacon.proto',
        package='foo.bar.v1',
    )
    api = make_api(
        make_proto(file_pb2),
        naming=make_naming(namespace=(), name='Spam', version='v2'),
    )

    g = make_generator()
    assert g._get_filename(
        '$name/types/$proto.py.j2',
        api_schema=api,
        context={'proto': api.protos['bacon.proto']},
    ) == 'spam/types/bacon.py'
Ejemplo n.º 9
0
def test_build_with_annotations():
    proto = descriptor_pb2.FileDescriptorProto(
        name='spanner.proto',
        package='google.spanner.v1',
    )
    proto.options.Extensions[client_pb2.client_package].MergeFrom(
        client_pb2.Package(
            namespace=['Google', 'Cloud'],
            title='Spanner',
            version='v1',
        ), )
    n = naming.Naming.build(proto)
    assert n.name == 'Spanner'
    assert n.namespace == ('Google', 'Cloud')
    assert n.version == 'v1'
    assert n.product_name == 'Spanner'
Ejemplo n.º 10
0
 def testFileDescriptorOptionsWithCustomDescriptorPool(self):
   # Create a descriptor pool, and add a new FileDescriptorProto to it.
   pool = descriptor_pool.DescriptorPool()
   file_name = 'file_descriptor_options_with_custom_descriptor_pool.proto'
   file_descriptor_proto = descriptor_pb2.FileDescriptorProto(name=file_name)
   extension_id = file_options_test_pb2.foo_options
   file_descriptor_proto.options.Extensions[extension_id].foo_name = 'foo'
   pool.Add(file_descriptor_proto)
   # The options set on the FileDescriptorProto should be available in the
   # descriptor even if they contain extensions that cannot be deserialized
   # using the pool.
   file_descriptor = pool.FindFileByName(file_name)
   options = file_descriptor.GetOptions()
   self.assertEqual('foo', options.Extensions[extension_id].foo_name)
   # The object returned by GetOptions() is cached.
   self.assertIs(options, file_descriptor.GetOptions())
def test_get_response_ignores_unwanted_transports_and_clients():
    g = make_generator()
    with mock.patch.object(jinja2.FileSystemLoader, "list_templates") as lt:
        lt.return_value = [
            "foo/%service/transports/river.py.j2",
            "foo/%service/transports/car.py.j2",
            "foo/%service/transports/grpc.py.j2",
            "foo/%service/transports/__init__.py.j2",
            "foo/%service/transports/base.py.j2",
            "foo/%service/async_client.py.j2",
            "foo/%service/client.py.j2",
            "mollusks/squid/sample.py.j2",
        ]

        with mock.patch.object(jinja2.Environment, "get_template") as gt:
            gt.return_value = jinja2.Template("Service: {{ service.name }}")
            api_schema = make_api(
                make_proto(
                    descriptor_pb2.FileDescriptorProto(service=[
                        descriptor_pb2.ServiceDescriptorProto(
                            name="SomeService"),
                    ]), ))

            cgr = g.get_response(api_schema=api_schema,
                                 opts=Options.build("transport=river+car"))
            assert len(cgr.file) == 5
            assert {i.name
                    for i in cgr.file} == {
                        "foo/some_service/transports/river.py",
                        "foo/some_service/transports/car.py",
                        "foo/some_service/transports/__init__.py",
                        "foo/some_service/transports/base.py",
                        # Only generate async client with grpc transport
                        "foo/some_service/client.py",
                    }

            cgr = g.get_response(api_schema=api_schema,
                                 opts=Options.build("transport=grpc"))
            assert len(cgr.file) == 5
            assert {i.name
                    for i in cgr.file} == {
                        "foo/some_service/transports/grpc.py",
                        "foo/some_service/transports/__init__.py",
                        "foo/some_service/transports/base.py",
                        "foo/some_service/client.py",
                        "foo/some_service/async_client.py",
                    }
Ejemplo n.º 12
0
  def testMakeDescriptorWithNestedFields(self):
    file_descriptor_proto = descriptor_pb2.FileDescriptorProto()
    file_descriptor_proto.name = 'Foo2'
    message_type = file_descriptor_proto.message_type.add()
    message_type.name = file_descriptor_proto.name
    nested_type = message_type.nested_type.add()
    nested_type.name = 'Sub'
    enum_type = nested_type.enum_type.add()
    enum_type.name = 'FOO'
    enum_type_val = enum_type.value.add()
    enum_type_val.name = 'BAR'
    enum_type_val.number = 3
    field = message_type.field.add()
    field.number = 1
    field.name = 'uint64_field'
    field.label = descriptor.FieldDescriptor.LABEL_REQUIRED
    field.type = descriptor.FieldDescriptor.TYPE_UINT64
    field = message_type.field.add()
    field.number = 2
    field.name = 'nested_message_field'
    field.label = descriptor.FieldDescriptor.LABEL_REQUIRED
    field.type = descriptor.FieldDescriptor.TYPE_MESSAGE
    field.type_name = 'Sub'
    enum_field = nested_type.field.add()
    enum_field.number = 2
    enum_field.name = 'bar_field'
    enum_field.label = descriptor.FieldDescriptor.LABEL_REQUIRED
    enum_field.type = descriptor.FieldDescriptor.TYPE_ENUM
    enum_field.type_name = 'Foo2.Sub.FOO'

    result = descriptor.MakeDescriptor(message_type)
    self.assertEqual(result.fields[0].cpp_type,
                     descriptor.FieldDescriptor.CPPTYPE_UINT64)
    self.assertEqual(result.fields[1].cpp_type,
                     descriptor.FieldDescriptor.CPPTYPE_MESSAGE)
    self.assertEqual(result.fields[1].message_type.containing_type,
                     result)
    self.assertEqual(result.nested_types[0].fields[0].full_name,
                     'Foo2.Sub.bar_field')
    self.assertEqual(result.nested_types[0].fields[0].enum_type,
                     result.nested_types[0].enum_types[0])
    self.assertFalse(result.has_options)
    self.assertFalse(result.fields[0].has_options)
    if api_implementation.Type() == 'cpp':
      with self.assertRaises(AttributeError):
        result.fields[0].has_options = False
Ejemplo n.º 13
0
def test_get_response():
    # Create a generator with mock data.
    #
    # We want to ensure that templates are rendered for each service,
    # which we prove by sending two services.
    file_pb2 = descriptor_pb2.FileDescriptorProto(
        name='bacon.proto',
        package='foo.bar.v1',
        service=[
            descriptor_pb2.ServiceDescriptorProto(name='SpamService'),
            descriptor_pb2.ServiceDescriptorProto(name='EggsService')
        ],
    )
    api_schema = make_api(make_proto(file_pb2))
    g = generator.Generator(api_schema=api_schema)

    # Mock all the rendering methods.
    with mock.patch.object(g, '_render_templates') as _render_templates:
        _render_templates.return_value = [
            plugin_pb2.CodeGeneratorResponse.File(
                name='template_file',
                content='This was a template.',
            ),
        ]

        # Okay, now run the `get_response` method.
        response = g.get_response()

        # First and foremost, we care that we got a valid response
        # object back (albeit not so much what is in it).
        assert isinstance(response, plugin_pb2.CodeGeneratorResponse)

        # Next, determine that the general API templates and service
        # templates were both called; the method should be called
        # once per service plus one for the API as a whole.
        assert _render_templates.call_count == len(file_pb2.service) + 1

        # The service templates should have been called with the
        # filename transformation and the additional `service` variable.
        for call in _render_templates.mock_calls:
            _, args, kwargs = call
            if args[0] != g._env.loader.service_templates:
                continue
            service = kwargs['additional_context']['service']
            assert isinstance(service, wrappers.Service)
Ejemplo n.º 14
0
def test_get_response_enumerates_services():
    g = make_generator()
    with mock.patch.object(jinja2.FileSystemLoader, 'list_templates') as lt:
        lt.return_value = ['foo/$service/baz.py.j2']
        with mock.patch.object(jinja2.Environment, 'get_template') as gt:
            gt.return_value = jinja2.Template('Service: {{ service.name }}')
            cgr = g.get_response(api_schema=make_api(
                make_proto(
                    descriptor_pb2.FileDescriptorProto(service=[
                        descriptor_pb2.ServiceDescriptorProto(name='Spam'),
                        descriptor_pb2.ServiceDescriptorProto(
                            name='EggsService'),
                    ]), )))
    assert len(cgr.file) == 2
    assert {i.name
            for i in cgr.file} == {
                'foo/spam/baz.py',
                'foo/eggs_service/baz.py',
            }
Ejemplo n.º 15
0
def push_get4(comp):
    buf = io.BytesIO(b'')
    with proio.Writer(fileobj=buf) as writer:
        writer.set_compression(comp)

        eventsOut = []

        event = proio.Event()
        event.add_entry('test', descriptor_pb2.FileDescriptorProto())
        writer.push(event)
        eventsOut.append(event)

    buf.seek(0, 0)

    with proio.Reader(fileobj=buf) as reader:
        for i in range(0, len(eventsOut)):
            event = reader.__next__()
            assert event != None
            assert event.__str__() == eventsOut[i].__str__()
Ejemplo n.º 16
0
def MakeSimpleProtoClass(fields, full_name, pool=None):
    """Create a Protobuf class whose fields are basic types.

  Note: this doesn't validate field names!

  Args:
    fields: dict of {name: field_type} mappings for each field in the proto.
    full_name: str, the fully-qualified name of the proto type.
    pool: optional DescriptorPool instance.
  Returns:
    a class, the new protobuf class with a FileDescriptor.
  """
    factory = message_factory.MessageFactory(pool=pool)
    try:
        proto_cls = _GetMessageFromFactory(factory, full_name)
        return proto_cls
    except KeyError:
        # The factory's DescriptorPool doesn't know about this class yet.
        pass

    # Use a consistent file name that is unlikely to conflict with any imported
    # proto files.
    fields_hash = hashlib.sha1()
    for f_name, f_type in sorted(fields.items()):
        fields_hash.update(f_name.encode('utf8'))
        fields_hash.update(str(f_type).encode('utf8'))
    proto_file_name = fields_hash.hexdigest() + '.proto'

    package, name = full_name.rsplit('.', 1)
    file_proto = descriptor_pb2.FileDescriptorProto()
    file_proto.name = os.path.join(package.replace('.', '/'), proto_file_name)
    file_proto.package = package
    desc_proto = file_proto.message_type.add()
    desc_proto.name = name
    for f_number, (f_name, f_type) in enumerate(sorted(fields.items()), 1):
        field_proto = desc_proto.field.add()
        field_proto.name = f_name
        field_proto.number = f_number
        field_proto.label = descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL
        field_proto.type = f_type

    factory.pool.Add(file_proto)
    return _GetMessageFromFactory(factory, full_name)
Ejemplo n.º 17
0
def test_get_filename_with_proto_and_sub():
    file_pb2 = descriptor_pb2.FileDescriptorProto(
        name="bacon.proto", package="foo.bar.v2.baz",
    )
    naming = make_naming(
        namespace=("Foo",), name="Bar", proto_package="foo.bar.v2", version="v2",
    )
    api = make_api(
        make_proto(file_pb2, naming=naming), naming=naming, subpackage_view=("baz",),
    )

    g = make_generator()
    assert (
        g._get_filename(
            "%name/types/%sub/%proto.py.j2",
            api_schema=api,
            context={"proto": api.protos["bacon.proto"]},
        )
        == "bar/types/baz/bacon.py"
    )
def make_file_pb2(
    name: str = "my_proto.proto",
    package: str = "example.v1",
    messages: Sequence[wrappers.Message] = (),
    enums: Sequence[wrappers.Enum] = (),
    services: Sequence[wrappers.Service] = (),
    locations: Sequence[desc.SourceCodeInfo.Location] = (),
    options: desc.FileOptions = None,
    dependency: Sequence[str] = [],
    **kwargs,
) -> desc.FileDescriptorProto:
    return desc.FileDescriptorProto(
        name=name,
        package=package,
        message_type=[m.message_pb for m in messages],
        enum_type=[e.enum_pb for e in enums],
        service=[s.service_pb for s in services],
        source_code_info=desc.SourceCodeInfo(location=locations),
        options=options,
        dependency=dependency,
    )
Ejemplo n.º 19
0
def _MakeFileDescriptorProto(proto_file_name, full_name, field_items):
    """Populate FileDescriptorProto for MessageFactory's DescriptorPool."""
    package, name = full_name.rsplit('.', 1)
    file_proto = descriptor_pb2.FileDescriptorProto()
    file_proto.name = os.path.join(package.replace('.', '/'), proto_file_name)
    file_proto.package = package
    desc_proto = file_proto.message_type.add()
    desc_proto.name = name
    for f_number, (f_name, f_type) in enumerate(field_items, 1):
        field_proto = desc_proto.field.add()
        field_proto.name = f_name
        # # If the number falls in the reserved range, reassign it to the correct
        # # number after the range.
        if f_number >= descriptor.FieldDescriptor.FIRST_RESERVED_FIELD_NUMBER:
            f_number += (
                descriptor.FieldDescriptor.LAST_RESERVED_FIELD_NUMBER -
                descriptor.FieldDescriptor.FIRST_RESERVED_FIELD_NUMBER + 1)
        field_proto.number = f_number
        field_proto.label = descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL
        field_proto.type = f_type
    return file_proto
Ejemplo n.º 20
0
def sample_file_info(name):
    filename = name + ".proto"

    # Get the essential information about the proto package, and where
    # this component belongs within the file.
    package, marshal = _package_info.compile(name, {})

    # Get or create the information about the file, including the
    # descriptor to which the new message descriptor shall be added.
    return _file_info._FileInfo.registry.setdefault(
        filename,
        _file_info._FileInfo(
            descriptor=descriptor_pb2.FileDescriptorProto(
                name=filename, package=package, syntax="proto3",
            ),
            enums=collections.OrderedDict(),
            messages=collections.OrderedDict(),
            name=filename,
            nested={},
            nested_enum={},
        ),
    )
Ejemplo n.º 21
0
    def testProtoSerializationJSON(self):
        placeholder_expression = """
      operator {
        proto_op {
          expression {
            placeholder {
              type: EXEC_PROPERTY
              key: "proto_property"
            }
          }
          proto_schema {
            message_type: "tfx.components.infra_validator.ServingSpec"
          }
          serialization_format: JSON
        }
      }
    """
        pb = text_format.Parse(placeholder_expression,
                               placeholder_pb2.PlaceholderExpression())

        # Prepare FileDescriptorSet
        fd = descriptor_pb2.FileDescriptorProto()
        infra_validator_pb2.ServingSpec().DESCRIPTOR.file.CopyToProto(fd)
        pb.operator.proto_op.proto_schema.file_descriptors.file.append(fd)

        expected_json_serialization = """\
{
  "tensorflow_serving": {
    "tags": [
      "latest",
      "1.15.0-gpu"
    ]
  }
}"""

        self.assertEqual(
            placeholder_utils.resolve_placeholder_expression(
                pb, self._resolution_context), expected_json_serialization)
  def setUp(self):
    file_proto = descriptor_pb2.FileDescriptorProto(
        name='some/filename/some.proto',
        package='protobuf_unittest')
    message_proto = file_proto.message_type.add(
        name='NestedMessage')
    message_proto.field.add(
        name='bb',
        number=1,
        type=descriptor_pb2.FieldDescriptorProto.TYPE_INT32,
        label=descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL)
    enum_proto = message_proto.enum_type.add(
        name='ForeignEnum')
    enum_proto.value.add(name='FOREIGN_FOO', number=4)
    enum_proto.value.add(name='FOREIGN_BAR', number=5)
    enum_proto.value.add(name='FOREIGN_BAZ', number=6)

    self.pool = self.GetDescriptorPool()
    self.pool.Add(file_proto)
    self.my_file = self.pool.FindFileByName(file_proto.name)
    self.my_message = self.my_file.message_types_by_name[message_proto.name]
    self.my_enum = self.my_message.enum_types_by_name[enum_proto.name]

    self.my_method = descriptor.MethodDescriptor(
        name='Bar',
        full_name='protobuf_unittest.TestService.Bar',
        index=0,
        containing_service=None,
        input_type=None,
        output_type=None)
    self.my_service = descriptor.ServiceDescriptor(
        name='TestServiceWithOptions',
        full_name='protobuf_unittest.TestServiceWithOptions',
        file=self.my_file,
        index=0,
        methods=[
            self.my_method
        ])
Ejemplo n.º 23
0
def test_get_filename_with_proto_and_sub():
    file_pb2 = descriptor_pb2.FileDescriptorProto(
        name='bacon.proto',
        package='foo.bar.v2.baz',
    )
    naming = make_naming(
        namespace=('Foo', ),
        name='Bar',
        proto_package='foo.bar.v2',
        version='v2',
    )
    api = make_api(
        make_proto(file_pb2, naming=naming),
        naming=naming,
        subpackage_view=('baz', ),
    )

    g = make_generator()
    assert g._get_filename(
        '$name/types/$sub/$proto.py.j2',
        api_schema=api,
        context={'proto': api.protos['bacon.proto']},
    ) == 'bar/types/baz/bacon.py'
Ejemplo n.º 24
0
	def __init__(self, descriptor_bin, fdp=None, linker=None):

		# can only pass exactly one of them
		assert (descriptor_bin is None) != (fdp is None)

		if descriptor_bin is None:
			self.descriptor_bin = fdp.SerializeToString()
			self.fdp = fdp
		else:
			self.descriptor_bin = descriptor_bin
			self.fdp = descriptor_pb2.FileDescriptorProto()
			self.fdp.ParseFromString(descriptor_bin)

		if linker is None:
			self.linker = FDPLinker()
		else:
			self.linker = linker

		self.linker.add(self.fdp)

		self.module = type(__import__('sys'))(ModuleName(str(self.fdp.name)))
		setattr(self.module, 'descriptor', descriptor)
		setattr(self.module, 'message', message)
		setattr(self.module, 'reflection', reflection)
		setattr(self.module, 'descriptor_pb2', descriptor_pb2)

		# these names copied from python_generator.cc
		# all functions of these methods are preserved as much as possible
		self.PrintFileDescriptor()
		self.PrintTopLevelEnums()
		self.PrintTopLevelExtensions()
		self.PrintAllNestedEnumsInFile()
		self.PrintMessageDescriptors()
		self.FixForeignFieldsInDescriptors()
		self.PrintMessages()
		self.FixForeignFieldsInExtensions()
Ejemplo n.º 25
0
def test_mock_value_recursive(mock_method, expected):
    # The elaborate setup is an unfortunate requirement.
    file_pb = descriptor_pb2.FileDescriptorProto(
        name="turtle.proto",
        package="animalia.chordata.v2",
        message_type=(
            descriptor_pb2.DescriptorProto(
                # It's turtles all the way down ;)
                name="Turtle",
                field=(descriptor_pb2.FieldDescriptorProto(
                    name="turtle",
                    type="TYPE_MESSAGE",
                    type_name=".animalia.chordata.v2.Turtle",
                    number=1,
                ), ),
            ), ),
    )
    my_api = api.API.build([file_pb], package="animalia.chordata.v2")
    turtle_field = my_api.messages["animalia.chordata.v2.Turtle"].fields[
        "turtle"]

    # If not handled properly, this will run forever and eventually OOM.
    actual = getattr(turtle_field, mock_method)
    assert actual == expected
Ejemplo n.º 26
0
            type_context.ExtendOneof(oneof_index, field.name))
        fields += '%soneof %s {\n%s%s' % (oneof_leading_comment, oneof_proto.name,
                                          oneof_trailing_comment, FormatOptions(
                                              oneof_proto.options))
      fields += FormatBlock(FormatField(type_context.ExtendField(index, field.name), field))
    if oneof_index is not None:
      fields += '}\n\n'
    return '%smessage %s {\n%s%s%s%s%s%s\n}\n' % (leading_comment, msg_proto.name, trailing_comment,
                                                  formatted_options, formatted_enums,
                                                  formatted_msgs, reserved_fields, fields)

  def VisitFile(self, file_proto, type_context, services, msgs, enums):
    empty_file = len(services) == 0 and len(enums) == 0 and len(msgs) == 0
    header = FormatHeaderFromFile(type_context.source_code_info, file_proto, empty_file)
    formatted_services = FormatBlock('\n'.join(services))
    formatted_enums = FormatBlock('\n'.join(enums))
    formatted_msgs = FormatBlock('\n'.join(msgs))
    return ClangFormat(header + formatted_services + formatted_enums + formatted_msgs)


if __name__ == '__main__':
  proto_desc_path = sys.argv[1]
  file_proto = descriptor_pb2.FileDescriptorProto()
  input_text = pathlib.Path(proto_desc_path).read_text()
  if not input_text:
    sys.exit(0)
  text_format.Merge(input_text, file_proto)
  dst_path = pathlib.Path(sys.argv[2])
  utils.LoadTypeDb(sys.argv[3])
  dst_path.write_bytes(traverse.TraverseFile(file_proto, ProtoFormatVisitor()))
 def create_ves_event():
     file_descriptor_proto = descriptor_pb2.FileDescriptorProto()
     file_descriptor_proto.name = 'VesEvent'
     VESProtobuf.create_commoneventheader(file_descriptor_proto)
     VESProtobuf.create_vesevent(file_descriptor_proto)
     return file_descriptor_proto
Ejemplo n.º 28
0
    del target_file_proto.message_type[:]
    shadow_msgs = {msg.name: msg for msg in shadow_file_proto.message_type}
    for index, msg in enumerate(active_file_proto.message_type):
        MergeActiveShadowMessage(
            package_type_context.ExtendMessage(index, msg.name,
                                               msg.options.deprecated), msg,
            shadow_msgs.get(msg.name), target_file_proto.message_type.add())
    # Visit enum types
    del target_file_proto.enum_type[:]
    shadow_enums = {msg.name: msg for msg in shadow_file_proto.enum_type}
    for enum in active_file_proto.enum_type:
        MergeActiveShadowEnum(enum, shadow_enums.get(enum.name),
                              target_file_proto.enum_type.add())
    # Ensure target has any deprecated message types in case they are needed.
    active_msg_names = set(
        [msg.name for msg in active_file_proto.message_type])
    for msg in shadow_file_proto.message_type:
        if msg.name not in active_msg_names:
            target_file_proto.message_type.add().MergeFrom(msg)
    return target_file_proto


if __name__ == '__main__':
    active_src, shadow_src, dst = sys.argv[1:]
    active_proto = descriptor_pb2.FileDescriptorProto()
    text_format.Merge(pathlib.Path(active_src).read_text(), active_proto)
    shadow_proto = descriptor_pb2.FileDescriptorProto()
    text_format.Merge(pathlib.Path(shadow_src).read_text(), shadow_proto)
    pathlib.Path(dst).write_text(
        str(MergeActiveShadowFile(active_proto, shadow_proto)))
Ejemplo n.º 29
0
def _file_descriptor_to_proto(descriptor):
    proto = descriptor_pb2.FileDescriptorProto()
    descriptor.CopyToProto(proto)
    return proto.SerializeToString()
Ejemplo n.º 30
0
def MakeDescriptor(desc_proto, package='', build_file_if_cpp=True,
                   syntax=None):
  """Make a protobuf Descriptor given a DescriptorProto protobuf.

  Handles nested descriptors. Note that this is limited to the scope of defining
  a message inside of another message. Composite fields can currently only be
  resolved if the message is defined in the same scope as the field.

  Args:
    desc_proto: The descriptor_pb2.DescriptorProto protobuf message.
    package: Optional package name for the new message Descriptor (string).
    build_file_if_cpp: Update the C++ descriptor pool if api matches.
                       Set to False on recursion, so no duplicates are created.
    syntax: The syntax/semantics that should be used.  Set to "proto3" to get
            proto3 field presence semantics.
  Returns:
    A Descriptor for protobuf messages.
  """
  if api_implementation.Type() == 'cpp' and build_file_if_cpp:
    # The C++ implementation requires all descriptors to be backed by the same
    # definition in the C++ descriptor pool. To do this, we build a
    # FileDescriptorProto with the same definition as this descriptor and build
    # it into the pool.
    from google.protobuf import descriptor_pb2
    file_descriptor_proto = descriptor_pb2.FileDescriptorProto()
    file_descriptor_proto.message_type.add().MergeFrom(desc_proto)

    # Generate a random name for this proto file to prevent conflicts with any
    # imported ones. We need to specify a file name so the descriptor pool
    # accepts our FileDescriptorProto, but it is not important what that file
    # name is actually set to.
    proto_name = binascii.hexlify(os.urandom(16)).decode('ascii')

    if package:
      file_descriptor_proto.name = os.path.join(package.replace('.', '/'),
                                                proto_name + '.proto')
      file_descriptor_proto.package = package
    else:
      file_descriptor_proto.name = proto_name + '.proto'

    _message.default_pool.Add(file_descriptor_proto)
    result = _message.default_pool.FindFileByName(file_descriptor_proto.name)

    if _USE_C_DESCRIPTORS:
      return result.message_types_by_name[desc_proto.name]

  full_message_name = [desc_proto.name]
  if package: full_message_name.insert(0, package)

  # Create Descriptors for enum types
  enum_types = {}
  for enum_proto in desc_proto.enum_type:
    full_name = '.'.join(full_message_name + [enum_proto.name])
    enum_desc = EnumDescriptor(
        enum_proto.name, full_name, None, [
            EnumValueDescriptor(enum_val.name, ii, enum_val.number,
                                create_key=_internal_create_key)
            for ii, enum_val in enumerate(enum_proto.value)],
        create_key=_internal_create_key)
    enum_types[full_name] = enum_desc

  # Create Descriptors for nested types
  nested_types = {}
  for nested_proto in desc_proto.nested_type:
    full_name = '.'.join(full_message_name + [nested_proto.name])
    # Nested types are just those defined inside of the message, not all types
    # used by fields in the message, so no loops are possible here.
    nested_desc = MakeDescriptor(nested_proto,
                                 package='.'.join(full_message_name),
                                 build_file_if_cpp=False,
                                 syntax=syntax)
    nested_types[full_name] = nested_desc

  fields = []
  for field_proto in desc_proto.field:
    full_name = '.'.join(full_message_name + [field_proto.name])
    enum_desc = None
    nested_desc = None
    if field_proto.json_name:
      json_name = field_proto.json_name
    else:
      json_name = None
    if field_proto.HasField('type_name'):
      type_name = field_proto.type_name
      full_type_name = '.'.join(full_message_name +
                                [type_name[type_name.rfind('.')+1:]])
      if full_type_name in nested_types:
        nested_desc = nested_types[full_type_name]
      elif full_type_name in enum_types:
        enum_desc = enum_types[full_type_name]
      # Else type_name references a non-local type, which isn't implemented
    field = FieldDescriptor(
        field_proto.name, full_name, field_proto.number - 1,
        field_proto.number, field_proto.type,
        FieldDescriptor.ProtoTypeToCppProtoType(field_proto.type),
        field_proto.label, None, nested_desc, enum_desc, None, False, None,
        options=_OptionsOrNone(field_proto), has_default_value=False,
        json_name=json_name, create_key=_internal_create_key)
    fields.append(field)

  desc_name = '.'.join(full_message_name)
  return Descriptor(desc_proto.name, desc_name, None, None, fields,
                    list(nested_types.values()), list(enum_types.values()), [],
                    options=_OptionsOrNone(desc_proto),
                    create_key=_internal_create_key)