Exemplo n.º 1
0
def VersionUpgradeXform(n, envoy_internal_shadow, file_proto, params):
  """Transform a FileDescriptorProto from vN[alpha\d] to v(N+1).

  Args:
    n: version N to upgrade from.
    envoy_internal_shadow: generate a shadow for Envoy internal use containing deprecated fields.
    file_proto: vN[alpha\d] FileDescriptorProto message.
    params: plugin parameters.

  Returns:
    v(N+1) FileDescriptorProto message.
  """
  # Load type database.
  typedb = utils.GetTypeDb()
  # If this isn't a proto in an upgraded package, return None.
  if file_proto.name not in typedb.next_version_protos or not typedb.next_version_protos[
      file_proto.name]:
    return None
  # Otherwise, this .proto needs upgrading, do it.
  freeze = 'extra_args' in params and params['extra_args'] == 'freeze'
  existing_pkg_version_status = file_proto.options.Extensions[
      status_pb2.file_status].package_version_status
  # Normally, we are generating the NEXT_MAJOR_VERSION_CANDIDATE. However, if
  # freezing and previously this was the active major version, the migrated
  # version is now the ACTIVE version.
  if freeze and existing_pkg_version_status == status_pb2.ACTIVE:
    package_version_status = status_pb2.ACTIVE
  else:
    package_version_status = status_pb2.NEXT_MAJOR_VERSION_CANDIDATE
  return traverse.TraverseFile(
      file_proto, UpgradeVisitor(n, typedb, envoy_internal_shadow, package_version_status))
Exemplo n.º 2
0
def Plugin(output_descriptors):
    """Protoc plugin entry point.

  This defines protoc plugin and manages the stdin -> stdout flow. An
  api_proto_plugin is defined by the provided visitor.

  See
  http://www.expobrain.net/2015/09/13/create-a-plugin-for-google-protocol-buffer/
  for further details on protoc plugin basics.

  Args:
    output_descriptors: a list of OutputDescriptors.
  """
    request = plugin_pb2.CodeGeneratorRequest()
    request.ParseFromString(sys.stdin.buffer.read())
    response = plugin_pb2.CodeGeneratorResponse()
    cprofile_enabled = os.getenv('CPROFILE_ENABLED')

    # We use request.file_to_generate rather than request.file_proto here since we
    # are invoked inside a Bazel aspect, each node in the DAG will be visited once
    # by the aspect and we only want to generate docs for the current node.
    for file_to_generate in request.file_to_generate:
        # Find the FileDescriptorProto for the file we actually are generating.
        file_proto = [
            pf for pf in request.proto_file if pf.name == file_to_generate
        ][0]
        if cprofile_enabled:
            pr = cProfile.Profile()
            pr.enable()
        for od in output_descriptors:
            f = response.file.add()
            f.name = file_proto.name + od.output_suffix
            # Don't run API proto plugins on things like WKT types etc.
            if not file_proto.package.startswith('envoy.'):
                continue
            if request.HasField("parameter") and od.want_params:
                params = dict(
                    param.split('=') for param in request.parameter.split(','))
                xformed_proto = od.xform(file_proto, params)
                visitor_factory = od.visitor_factory(params)
            else:
                xformed_proto = od.xform(file_proto)
                visitor_factory = od.visitor_factory()
            f.content = traverse.TraverseFile(
                xformed_proto, visitor_factory) if xformed_proto else ''
        if cprofile_enabled:
            pr.disable()
            stats_stream = io.StringIO()
            ps = pstats.Stats(pr, stream=stats_stream).sort_stats(
                os.getenv('CPROFILE_SORTBY', 'cumulative'))
            stats_file = response.file.add()
            stats_file.name = file_proto.name + '.profile'
            ps.print_stats()
            stats_file.content = stats_stream.getvalue()
        # Also include the original FileDescriptorProto as text proto, this is
        # useful when debugging.
        descriptor_file = response.file.add()
        descriptor_file.name = file_proto.name + ".descriptor.proto"
        descriptor_file.content = str(file_proto)
    sys.stdout.buffer.write(response.SerializeToString())
Exemplo n.º 3
0
def Plugin(output_suffix, visitor):
  """Protoc plugin entry point.

  This defines protoc plugin and manages the stdin -> stdout flow. An
  api_proto_plugin is defined by the provided visitor.

  See
  http://www.expobrain.net/2015/09/13/create-a-plugin-for-google-protocol-buffer/
  for further details on protoc plugin basics.

  Args:
    output_suffix: output files are generated alongside their corresponding
      input .proto, with this filename suffix.
    visitor: visitor.Visitor defining the business logic of the plugin.
  """
  request = plugin_pb2.CodeGeneratorRequest()
  request.ParseFromString(sys.stdin.buffer.read())
  response = plugin_pb2.CodeGeneratorResponse()
  cprofile_enabled = os.getenv('CPROFILE_ENABLED')

  # We use request.file_to_generate rather than request.file_proto here since we
  # are invoked inside a Bazel aspect, each node in the DAG will be visited once
  # by the aspect and we only want to generate docs for the current node.
  for file_to_generate in request.file_to_generate:
    # Find the FileDescriptorProto for the file we actually are generating.
    file_proto = [pf for pf in request.proto_file if pf.name == file_to_generate][0]
    f = response.file.add()
    f.name = file_proto.name + output_suffix
    if cprofile_enabled:
      pr = cProfile.Profile()
      pr.enable()
    # We don't actually generate any RST right now, we just string dump the
    # input proto file descriptor into the output file.
    f.content = traverse.TraverseFile(file_proto, visitor)
    if cprofile_enabled:
      pr.disable()
      stats_stream = io.StringIO()
      ps = pstats.Stats(pr,
                        stream=stats_stream).sort_stats(os.getenv('CPROFILE_SORTBY', 'cumulative'))
      stats_file = response.file.add()
      stats_file.name = file_proto.name + output_suffix + '.profile'
      ps.print_stats()
      stats_file.content = stats_stream.getvalue()
    # Also include the original FileDescriptorProto as text proto, this is
    # useful when debugging.
    descriptor_file = response.file.add()
    descriptor_file.name = file_proto.name + ".descriptor.proto"
    descriptor_file.content = str(file_proto)
  sys.stdout.buffer.write(response.SerializeToString())
Exemplo n.º 4
0
def V3MigrationXform(file_proto):
  """Transform a FileDescriptorProto from v2[alpha\d] to v3alpha.

  Args:
    file_proto: v2[alpha\d] FileDescriptorProto message.

  Returns:
    v3 FileDescriptorProto message.
  """
  # Load type database.
  typedb = utils.LoadTypeDb()
  # If this isn't a proto in an upgraded package, return None.
  if file_proto.package not in typedb.next_version_packages or not typedb.next_version_packages[
      file_proto.package]:
    return None
  # Otherwise, this .proto needs upgrading, do it.
  return traverse.TraverseFile(file_proto, UpgradeVisitor(typedb))
Exemplo n.º 5
0
def V3MigrationXform(envoy_internal_shadow, file_proto):
    """Transform a FileDescriptorProto from v2[alpha\d] to v3.

  Args:
    envoy_internal_shadow: generate a shadow for Envoy internal use containing deprecated fields.
    file_proto: v2[alpha\d] FileDescriptorProto message.

  Returns:
    v3 FileDescriptorProto message.
  """
    # Load type database.
    typedb = utils.GetTypeDb()
    # If this isn't a proto in an upgraded package, return None.
    if file_proto.name not in typedb.next_version_protos or not typedb.next_version_protos[
            file_proto.name]:
        return None
    # Otherwise, this .proto needs upgrading, do it.
    return traverse.TraverseFile(file_proto,
                                 UpgradeVisitor(typedb, envoy_internal_shadow))
Exemplo n.º 6
0
def FormatHeaderFromFile(source_code_info, file_proto, empty_file):
  """Format proto header.

  Args:
    source_code_info: SourceCodeInfo object.
    file_proto: FileDescriptorProto for file.
    empty_file: are there no message/enum/service defs in file?

  Returns:
    Formatted proto header as a string.
  """
  # Load the type database.
  typedb = utils.GetTypeDb()
  # Figure out type dependencies in this .proto.
  types = Types()
  text_format.Merge(traverse.TraverseFile(file_proto, type_whisperer.TypeWhispererVisitor()), types)
  type_dependencies = sum([list(t.type_dependencies) for t in types.types.values()], [])
  for service in file_proto.service:
    for m in service.method:
      type_dependencies.extend([m.input_type[1:], m.output_type[1:]])
  # Determine the envoy/ import paths from type deps.
  envoy_proto_paths = set(
      typedb.types[t].proto_path
      for t in type_dependencies
      if t.startswith('envoy.') and typedb.types[t].proto_path != file_proto.name)

  def CamelCase(s):
    return ''.join(t.capitalize() for t in re.split('[\._]', s))

  package_line = 'package %s;\n' % file_proto.package
  file_block = '\n'.join(['syntax = "proto3";\n', package_line])

  options = descriptor_pb2.FileOptions()
  options.java_outer_classname = CamelCase(os.path.basename(file_proto.name))
  options.java_multiple_files = True
  options.java_package = 'io.envoyproxy.' + file_proto.package

  # This is a workaround for C#/Ruby namespace conflicts between packages and
  # objects, see https://github.com/envoyproxy/envoy/pull/3854.
  # TODO(htuch): remove once v3 fixes this naming issue in
  # https://github.com/envoyproxy/envoy/issues/8120.
  if file_proto.package in ['envoy.api.v2.listener', 'envoy.api.v2.cluster']:
    qualified_package = '.'.join(s.capitalize() for s in file_proto.package.split('.')) + 'NS'
    options.csharp_namespace = qualified_package
    options.ruby_package = qualified_package

  if file_proto.service:
    options.java_generic_services = True

  if file_proto.options.HasExtension(migrate_pb2.file_migrate):
    options.Extensions[migrate_pb2.file_migrate].CopyFrom(
        file_proto.options.Extensions[migrate_pb2.file_migrate])

  if file_proto.options.HasExtension(
      status_pb2.file_status) and file_proto.package.endswith('alpha'):
    options.Extensions[status_pb2.file_status].CopyFrom(
        file_proto.options.Extensions[status_pb2.file_status])

  if not empty_file:
    options.Extensions[
        status_pb2.file_status].package_version_status = file_proto.options.Extensions[
            status_pb2.file_status].package_version_status

  options_block = FormatOptions(options)

  requires_versioning_import = any(
      protoxform_options.GetVersioningAnnotation(m.options) for m in file_proto.message_type)

  envoy_imports = list(envoy_proto_paths)
  google_imports = []
  infra_imports = []
  misc_imports = []
  public_imports = []

  for idx, d in enumerate(file_proto.dependency):
    if idx in file_proto.public_dependency:
      public_imports.append(d)
      continue
    elif d.startswith('envoy/annotations') or d.startswith('udpa/annotations'):
      infra_imports.append(d)
    elif d.startswith('envoy/'):
      # We ignore existing envoy/ imports, since these are computed explicitly
      # from type_dependencies.
      pass
    elif d.startswith('google/'):
      google_imports.append(d)
    elif d.startswith('validate/'):
      infra_imports.append(d)
    elif d in ['udpa/annotations/versioning.proto', 'udpa/annotations/status.proto']:
      # Skip, we decide to add this based on requires_versioning_import and options.
      pass
    else:
      misc_imports.append(d)

  if options.HasExtension(status_pb2.file_status):
    infra_imports.append('udpa/annotations/status.proto')

  if requires_versioning_import:
    infra_imports.append('udpa/annotations/versioning.proto')

  def FormatImportBlock(xs):
    if not xs:
      return ''
    return FormatBlock('\n'.join(sorted('import "%s";' % x for x in set(xs) if x)))

  def FormatPublicImportBlock(xs):
    if not xs:
      return ''
    return FormatBlock('\n'.join(sorted('import public "%s";' % x for x in xs)))

  import_block = '\n'.join(
      map(FormatImportBlock, [envoy_imports, google_imports, misc_imports, infra_imports]))
  import_block += '\n' + FormatPublicImportBlock(public_imports)
  comment_block = FormatComments(source_code_info.file_level_comments)

  return ''.join(map(FormatBlock, [file_block, import_block, options_block, comment_block]))
Exemplo n.º 7
0
            type_context.ExtendOneof(oneof_index, field.name))
        fields += '%soneof %s {\n%s%s' % (oneof_leading_comment, oneof_proto.name,
                                          oneof_trailing_comment, FormatOptions(
                                              oneof_proto.options))
      fields += FormatBlock(FormatField(type_context.ExtendField(index, field.name), field))
    if oneof_index is not None:
      fields += '}\n\n'
    return '%smessage %s {\n%s%s%s%s%s%s\n}\n' % (leading_comment, msg_proto.name, trailing_comment,
                                                  formatted_options, formatted_enums,
                                                  formatted_msgs, reserved_fields, fields)

  def VisitFile(self, file_proto, type_context, services, msgs, enums):
    empty_file = len(services) == 0 and len(enums) == 0 and len(msgs) == 0
    header = FormatHeaderFromFile(type_context.source_code_info, file_proto, empty_file)
    formatted_services = FormatBlock('\n'.join(services))
    formatted_enums = FormatBlock('\n'.join(enums))
    formatted_msgs = FormatBlock('\n'.join(msgs))
    return ClangFormat(header + formatted_services + formatted_enums + formatted_msgs)


if __name__ == '__main__':
  proto_desc_path = sys.argv[1]
  file_proto = descriptor_pb2.FileDescriptorProto()
  input_text = pathlib.Path(proto_desc_path).read_text()
  if not input_text:
    sys.exit(0)
  text_format.Merge(input_text, file_proto)
  dst_path = pathlib.Path(sys.argv[2])
  utils.LoadTypeDb(sys.argv[3])
  dst_path.write_bytes(traverse.TraverseFile(file_proto, ProtoFormatVisitor()))
Exemplo n.º 8
0
                            field))
        if oneof_index is not None:
            fields += '}\n\n'
        return '%smessage %s {\n%s%s%s%s%s%s\n}\n' % (
            leading_comment, msg_proto.name, trailing_comment,
            formatted_options, formatted_enums, formatted_msgs,
            reserved_fields, fields)

    def VisitFile(self, file_proto, type_context, services, msgs, enums):
        empty_file = len(services) == 0 and len(enums) == 0 and len(msgs) == 0
        header = FormatHeaderFromFile(type_context.source_code_info,
                                      file_proto, empty_file)
        formatted_services = FormatBlock('\n'.join(services))
        formatted_enums = FormatBlock('\n'.join(enums))
        formatted_msgs = FormatBlock('\n'.join(msgs))
        return ClangFormat(header + formatted_services + formatted_enums +
                           formatted_msgs)


if __name__ == '__main__':
    proto_desc_path = sys.argv[1]
    file_proto = descriptor_pb2.FileDescriptorProto()
    input_text = pathlib.Path(proto_desc_path).read_text()
    if not input_text:
        sys.exit(0)
    text_format.Merge(input_text, file_proto)
    dst_path = pathlib.Path(sys.argv[2])
    utils.LoadTypeDb(sys.argv[3])
    dst_path.write_bytes(
        traverse.TraverseFile(file_proto, ProtoFormatVisitor()))
Exemplo n.º 9
0
def FormatHeaderFromFile(source_code_info, file_proto):
    """Format proto header.

  Args:
    source_code_info: SourceCodeInfo object.
    file_proto: FileDescriptorProto for file.

  Returns:
    Formatted proto header as a string.
  """
    # Load the type database.
    typedb = utils.LoadTypeDb()
    # Figure out type dependencies in this .proto.
    types = Types()
    text_format.Merge(
        traverse.TraverseFile(file_proto,
                              type_whisperer.TypeWhispererVisitor()), types)
    type_dependencies = sum(
        [list(t.type_dependencies) for t in types.types.values()], [])
    for service in file_proto.service:
        for m in service.method:
            type_dependencies.extend([m.input_type[1:], m.output_type[1:]])
    # Determine the envoy/ import paths from type deps.
    envoy_proto_paths = set(typedb.types[t].proto_path
                            for t in type_dependencies
                            if t.startswith('envoy.')
                            and typedb.types[t].proto_path != file_proto.name)

    def CamelCase(s):
        return ''.join(t.capitalize() for t in re.split('[\._]', s))

    package_line = 'package %s;\n' % file_proto.package
    file_block = '\n'.join(['syntax = "proto3";\n', package_line])

    options = [
        'option java_outer_classname = "%s";' %
        CamelCase(os.path.basename(file_proto.name)),
        'option java_multiple_files = true;',
        'option java_package = "io.envoyproxy.%s";' % file_proto.package,
    ]
    # This is a workaround for C#/Ruby namespace conflicts between packages and
    # objects, see https://github.com/envoyproxy/envoy/pull/3854.
    # TODO(htuch): remove once v3 fixes this naming issue in
    # https://github.com/envoyproxy/envoy/issues/8120.
    if file_proto.package in ['envoy.api.v2.listener', 'envoy.api.v2.cluster']:
        qualified_package = '.'.join(
            s.capitalize() for s in file_proto.package.split('.')) + 'NS'
        options += [
            'option csharp_namespace = "%s";' % qualified_package,
            'option ruby_package = "%s";' % qualified_package,
        ]
    if file_proto.service:
        options += ['option java_generic_services = true;']
    options_block = FormatBlock('\n'.join(options))

    envoy_imports = list(envoy_proto_paths)
    google_imports = []
    infra_imports = []
    misc_imports = []

    for d in file_proto.dependency:
        if d.startswith('envoy/'):
            # We ignore existing envoy/ imports, since these are computed explicitly
            # from type_dependencies.
            pass
        elif d.startswith('google/'):
            google_imports.append(d)
        elif d.startswith('validate/'):
            infra_imports.append(d)
        else:
            misc_imports.append(d)

    def FormatImportBlock(xs):
        if not xs:
            return ''
        return FormatBlock('\n'.join(sorted('import "%s";' % x for x in xs)))

    import_block = '\n'.join(
        map(FormatImportBlock,
            [envoy_imports, google_imports, misc_imports, infra_imports]))
    comment_block = FormatComments(source_code_info.file_level_comments)

    return ''.join(
        map(FormatBlock,
            [file_block, options_block, import_block, comment_block]))