Exemple #1
0
def test_build_factory():
    proto = descriptor_pb2.FileDescriptorProto(
        package='google.mollusc.v1alpha1')
    old = naming.Naming.build(proto, opts=options.Options(old_naming=True))
    assert old.versioned_module_name == 'mollusc.v1alpha1'

    new = naming.Naming.build(proto, opts=options.Options())
    assert new.versioned_module_name == 'mollusc_v1alpha1'
Exemple #2
0
    def build(cls,
              file_descriptor: descriptor_pb2.FileDescriptorProto,
              file_to_generate: bool,
              naming: api_naming.Naming,
              opts: options.Options = options.Options(),
              prior_protos: Mapping[str, 'Proto'] = None,
              load_services: bool = True) -> 'Proto':
        """Build and return a Proto instance.

        Args:
            file_descriptor (~.FileDescriptorProto): The protocol buffer
                object describing the proto file.
            file_to_generate (bool): Whether this is a file which is
                to be directly generated, or a dependency.
            naming (~.Naming): The :class:`~.Naming` instance associated
                with the API.
            prior_protos (~.Proto): Previous, already processed protos.
                These are needed to look up messages in imported protos.
            load_services (bool): Toggle whether the proto file should
                load its services. Not doing so enables a two-pass fix for
                LRO response and metadata types in certain situations.
        """
        return _ProtoBuilder(file_descriptor,
                             file_to_generate=file_to_generate,
                             naming=naming,
                             opts=opts,
                             prior_protos=prior_protos or {},
                             load_services=load_services).proto
def test_cli_override_name_and_namespace_versionless():
    FileDesc = descriptor_pb2.FileDescriptorProto
    proto1 = FileDesc(package='google.translation')
    n = naming.Naming.build(
        proto1,
        opts=options.Options(namespace=('google', 'cloud'), name='translate'),
    )
    assert n.namespace == ('Google', 'Cloud')
    assert n.name == 'Translate'
    assert not n.version
def test_cli_override_namespace_dotted():
    FileDesc = descriptor_pb2.FileDescriptorProto
    proto1 = FileDesc(package='google.spanner.v1')
    n = naming.Naming.build(
        proto1,
        opts=options.Options(namespace=('google.cloud', )),
    )
    assert n.namespace == ('Google', 'Cloud')
    assert n.name == 'Spanner'
    assert n.version == 'v1'
def test_cli_override_name_underscores():
    FileDesc = descriptor_pb2.FileDescriptorProto
    proto1 = FileDesc(package='google.cloud.videointelligence.v1')
    n = naming.Naming.build(
        proto1,
        opts=options.Options(name='video_intelligence'),
    )
    assert n.namespace == ('Google', 'Cloud')
    assert n.name == 'Video Intelligence'
    assert n.version == 'v1'
Exemple #6
0
    def build(
        cls,
        file_descriptors: Sequence[descriptor_pb2.FileDescriptorProto],
        package: str = '',
        opts: options.Options = options.Options()) -> 'API':
        """Build the internal API schema based on the request.

        Args:
            file_descriptors (Sequence[~.FileDescriptorProto]): A list of
                :class:`~.FileDescriptorProto` objects describing the
                API.
            package (str): A protocol buffer package, as a string, for which
                code should be explicitly generated (including subpackages).
                Protos with packages outside this list are considered imports
                rather than explicit targets.
            opts (~.options.Options): CLI options passed to the generator.
        """
        # Save information about the overall naming for this API.
        naming = api_naming.Naming.build(*filter(
            lambda fd: fd.package.startswith(package),
            file_descriptors,
        ),
                                         opts=opts)

        # Iterate over each FileDescriptorProto and fill out a Proto
        # object describing it, and save these to the instance.
        protos: Dict[str, Proto] = {}
        for fd in file_descriptors:
            protos[fd.name] = _ProtoBuilder(
                file_descriptor=fd,
                file_to_generate=fd.package.startswith(package),
                naming=naming,
                opts=opts,
                prior_protos=protos,
            ).proto

        # Done; return the API.
        return cls(naming=naming, all_protos=protos)
Exemple #7
0
    def __init__(self,
                 file_descriptor: descriptor_pb2.FileDescriptorProto,
                 file_to_generate: bool,
                 naming: api_naming.Naming,
                 opts: options.Options = options.Options(),
                 prior_protos: Mapping[str, Proto] = None,
                 load_services: bool = True):
        self.proto_messages: Dict[str, wrappers.MessageType] = {}
        self.proto_enums: Dict[str, wrappers.EnumType] = {}
        self.proto_services: Dict[str, wrappers.Service] = {}
        self.file_descriptor = file_descriptor
        self.file_to_generate = file_to_generate
        self.prior_protos = prior_protos or {}
        self.opts = opts

        # Iterate over the documentation and place it into a dictionary.
        #
        # The comments in protocol buffers are sorted by a concept called
        # the "path", which is a sequence of integers described in more
        # detail below; this code simply shifts from a list to a dict,
        # with tuples of paths as the dictionary keys.
        self.docs: Dict[Tuple[int, ...],
                        descriptor_pb2.SourceCodeInfo.Location] = {}
        for location in file_descriptor.source_code_info.location:
            self.docs[tuple(location.path)] = location

        # Everything has an "address", which is the proto where the thing
        # was declared.
        #
        # We put this together by a baton pass of sorts: everything in
        # this file *starts with* this address, which is appended to
        # for each item as it is loaded.
        self.address = metadata.Address(
            api_naming=naming,
            module=file_descriptor.name.split('/')[-1][:-len('.proto')],
            package=tuple(file_descriptor.package.split('.')),
        )

        # Now iterate over the FileDescriptorProto and pull out each of
        # the messages, enums, and services.
        #
        # The hard-coded path keys sent here are based on how descriptor.proto
        # works; it uses the proto message number of the pieces of each
        # message (e.g. the hard-code `4` for `message_type` immediately
        # below is because `repeated DescriptorProto message_type = 4;` in
        # descriptor.proto itself).
        self._load_children(file_descriptor.enum_type,
                            self._load_enum,
                            address=self.address,
                            path=(5, ))
        self._load_children(file_descriptor.message_type,
                            self._load_message,
                            address=self.address,
                            path=(4, ))

        # Edge case: Protocol buffers is not particularly picky about
        # ordering, and it is possible that a message will have had a field
        # referencing another message which appears later in the file
        # (or itself, recursively).
        #
        # In this situation, we would not have come across the message yet,
        # and the field would have its original textual reference to the
        # message (`type_name`) but not its resolved message wrapper.
        orphan_field_gen = (
            (field.type_name.lstrip('.'), field)
            for message in self.proto_messages.values()
            for field in message.fields.values()
            if field.type_name and not (field.message or field.enum))
        for key, field in orphan_field_gen:
            maybe_msg_type = self.proto_messages.get(key)
            maybe_enum_type = self.proto_enums.get(key)
            if maybe_msg_type:
                object.__setattr__(field, 'message', maybe_msg_type)
            elif maybe_enum_type:
                object.__setattr__(field, 'enum', maybe_enum_type)
            else:
                raise TypeError(f"Unknown type referenced in "
                                f"{self.file_descriptor.name}: '{key}'")

        # Only generate the service if this is a target file to be generated.
        # This prevents us from generating common services (e.g. LRO) when
        # they are being used as an import just to get types declared in the
        # same files.
        if file_to_generate and load_services:
            self._load_children(file_descriptor.service,
                                self._load_service,
                                address=self.address,
                                path=(6, ))
Exemple #8
0
    def build(
        cls,
        file_descriptors: Sequence[descriptor_pb2.FileDescriptorProto],
        package: str = '',
        opts: options.Options = options.Options(),
        prior_protos: Mapping[str, 'Proto'] = None,
    ) -> 'API':
        """Build the internal API schema based on the request.

        Args:
            file_descriptors (Sequence[~.FileDescriptorProto]): A list of
                :class:`~.FileDescriptorProto` objects describing the
                API.
            package (str): A protocol buffer package, as a string, for which
                code should be explicitly generated (including subpackages).
                Protos with packages outside this list are considered imports
                rather than explicit targets.
            opts (~.options.Options): CLI options passed to the generator.
            prior_protos (~.Proto): Previous, already processed protos.
                These are needed to look up messages in imported protos.
                Primarily used for testing.
        """
        # Save information about the overall naming for this API.
        naming = api_naming.Naming.build(*filter(
            lambda fd: fd.package.startswith(package),
            file_descriptors,
        ),
                                         opts=opts)

        def disambiguate_keyword_fname(full_path: str,
                                       visited_names: Container[str]) -> str:
            path, fname = os.path.split(full_path)
            name, ext = os.path.splitext(fname)
            if name in keyword.kwlist or full_path in visited_names:
                name += "_"
                full_path = os.path.join(path, name + ext)
                if full_path in visited_names:
                    return disambiguate_keyword_fname(full_path, visited_names)

            return full_path

        # Iterate over each FileDescriptorProto and fill out a Proto
        # object describing it, and save these to the instance.
        #
        # The first pass gathers messages and enums but NOT services or methods.
        # This is a workaround for a limitation in protobuf annotations for
        # long running operations: the annotations are strings that reference
        # message types but do not require a proto import.
        # This hack attempts to address a common case where API authors,
        # not wishing to generate an 'unused import' warning,
        # don't import the proto file defining the real response or metadata
        # type into the proto file that defines an LRO.
        # We just load all the APIs types first and then
        # load the services and methods with the full scope of types.
        pre_protos: Dict[str, Proto] = dict(prior_protos or {})
        for fd in file_descriptors:
            fd.name = disambiguate_keyword_fname(fd.name, pre_protos)
            pre_protos[fd.name] = Proto.build(
                file_descriptor=fd,
                file_to_generate=fd.package.startswith(package),
                naming=naming,
                opts=opts,
                prior_protos=pre_protos,
                # Ugly, ugly hack.
                load_services=False,
            )

        # Second pass uses all the messages and enums defined in the entire API.
        # This allows LRO returning methods to see all the types in the API,
        # bypassing the above missing import problem.
        protos: Dict[str, Proto] = {
            name: Proto.build(
                file_descriptor=proto.file_pb2,
                file_to_generate=proto.file_to_generate,
                naming=naming,
                opts=opts,
                prior_protos=pre_protos,
            )
            for name, proto in pre_protos.items()
        }

        # Done; return the API.
        return cls(naming=naming, all_protos=protos)
    def build(
            cls,
            *file_descriptors: descriptor_pb2.FileDescriptorProto,
            opts: options.Options = options.Options(),
    ) -> 'Naming':
        """Return a full Naming instance based on these file descriptors.

        This is pieced together from the proto package names as well as the
        ``google.api.metadata`` file annotation. This information may be
        present in one or many files; this method is tolerant as long as
        the data does not conflict.

        Args:
            file_descriptors (Iterable[~.FileDescriptorProto]): A list of
                file descriptor protos. This list should only include the
                files actually targeted for output (not their imports).

        Returns:
            ~.Naming: A :class:`~.Naming` instance which is provided to
                templates as part of the :class:`~.API`.

        Raises:
            ValueError: If the provided file descriptors contain contradictory
                information.
        """
        # Determine the set of proto packages.
        proto_packages = {fd.package for fd in file_descriptors}
        root_package = os.path.commonprefix(tuple(proto_packages)).rstrip('.')

        # Sanity check: If there is no common ground in the package,
        # we are obviously in trouble.
        if not root_package:
            raise ValueError(
                'The protos provided do not share a common root package. '
                'Ensure that all explicitly-specified protos are for a '
                'single API. '
                f'The packages we got are: {", ".join(proto_packages)}')

        # Define the valid regex to split the package.
        #
        # It is not necessary for the regex to be as particular about package
        # name validity (e.g. avoiding .. or segments starting with numbers)
        # because protoc is guaranteed to give us valid package names.
        pattern = r'^((?P<namespace>[a-z0-9_.]+)\.)?(?P<name>[a-z0-9_]+)'

        # Only require the version portion of the regex if the version is
        # present.
        #
        # This code may look counter-intuitive (why not use ? to make it
        # optional), but the engine's greediness routine will decide that
        # the version is the name, which is not what we want.
        version = r'\.(?P<version>v[0-9]+(p[0-9]+)?((alpha|beta)[0-9]+)?)'
        if re.search(version, root_package):
            pattern += version

        # Okay, do the match
        match = cast(Match, re.search(pattern=pattern,
                                      string=root_package)).groupdict()
        match['namespace'] = match['namespace'] or ''
        package_info = cls(
            name=match['name'].capitalize(),
            namespace=tuple(
                [i.capitalize() for i in match['namespace'].split('.') if i]),
            product_name=match['name'].capitalize(),
            proto_package=root_package,
            version=match.get('version', ''),
        )

        # Sanity check: Ensure that the package directives all inferred
        # the same information.
        if not package_info.version and len(proto_packages) > 1:
            raise ValueError('All protos must have the same proto package '
                             'up to and including the version.')

        # If a naming information was provided on the CLI, override the naming
        # value.
        #
        # We are liberal about what formats we take on the CLI; it will
        # likely make sense to many users to use dot-separated namespaces and
        # snake case, so handle that and do the right thing.
        if opts.name:
            package_info = dataclasses.replace(
                package_info,
                name=' '.join([
                    i.capitalize()
                    for i in opts.name.replace('_', ' ').split(' ')
                ]))
        if opts.namespace:
            package_info = dataclasses.replace(
                package_info,
                namespace=tuple([
                    # The join-and-split on "." here causes us to expand out
                    # dot notation that we may have been sent; e.g. a one-tuple
                    # with ('x.y',) will become a two-tuple: ('x', 'y')
                    i.capitalize() for i in '.'.join(opts.namespace).split('.')
                ]))

        # Done; return the naming information.
        return package_info
Exemple #10
0
def test_services():
    L = descriptor_pb2.SourceCodeInfo.Location

    # Make a silly helper method to not repeat some of the structure.
    def _n(method_name: str):
        return {
            'service': 'google.example.v2.FooService',
            'method': method_name,
        }

    # Set up retry information.
    opts = options.Options(
        retry={
            'methodConfig': [
                {
                    'name': [_n('TimeoutableGetFoo')],
                    'timeout': '30s'
                },
                {
                    'name': [_n('RetryableGetFoo')],
                    'retryPolicy': {
                        'maxAttempts': 3,
                        'initialBackoff': '%dn' % 1e6,
                        'maxBackoff': '60s',
                        'backoffMultiplier': 1.5,
                        'retryableStatusCodes': ['UNAVAILABLE', 'ABORTED'],
                    }
                },
            ]
        })

    # Set up messages for our RPC.
    request_message_pb = make_message_pb2(name='GetFooRequest',
                                          fields=(make_field_pb2(name='name',
                                                                 type=9,
                                                                 number=1), ))
    response_message_pb = make_message_pb2(name='GetFooResponse', fields=())

    # Set up the service with an RPC.
    service_pb = descriptor_pb2.ServiceDescriptorProto(
        name='FooService',
        method=(
            descriptor_pb2.MethodDescriptorProto(
                name='GetFoo',
                input_type='google.example.v2.GetFooRequest',
                output_type='google.example.v2.GetFooResponse',
            ),
            descriptor_pb2.MethodDescriptorProto(
                name='TimeoutableGetFoo',
                input_type='google.example.v2.GetFooRequest',
                output_type='google.example.v2.GetFooResponse',
            ),
            descriptor_pb2.MethodDescriptorProto(
                name='RetryableGetFoo',
                input_type='google.example.v2.GetFooRequest',
                output_type='google.example.v2.GetFooResponse',
            ),
        ),
    )

    # Fake-document our fake stuff.
    locations = (
        L(path=(6, 0), leading_comments='This is the FooService service.'),
        L(path=(6, 0, 2, 0), leading_comments='This is the GetFoo method.'),
        L(path=(4, 0), leading_comments='This is the GetFooRequest message.'),
        L(path=(4, 1), leading_comments='This is the GetFooResponse message.'),
    )

    # Finally, set up the file that encompasses these.
    fdp = make_file_pb2(
        name='test.proto',
        package='google.example.v2',
        messages=(request_message_pb, response_message_pb),
        services=(service_pb, ),
        locations=locations,
    )

    # Make the proto object.
    proto = api.API.build(
        [fdp],
        'google.example.v2',
        opts=opts,
    ).protos['test.proto']

    # Establish that our data looks correct.
    assert len(proto.services) == 1
    assert len(proto.messages) == 2
    service = proto.services['google.example.v2.FooService']
    assert service.meta.doc == 'This is the FooService service.'
    assert len(service.methods) == 3
    method = service.methods['GetFoo']
    assert method.meta.doc == 'This is the GetFoo method.'
    assert isinstance(method.input, wrappers.MessageType)
    assert isinstance(method.output, wrappers.MessageType)
    assert method.input.name == 'GetFooRequest'
    assert method.input.meta.doc == 'This is the GetFooRequest message.'
    assert method.output.name == 'GetFooResponse'
    assert method.output.meta.doc == 'This is the GetFooResponse message.'
    assert not method.timeout
    assert not method.retry

    # Establish that the retry information on a timeout-able method also
    # looks correct.
    timeout_method = service.methods['TimeoutableGetFoo']
    assert timeout_method.timeout == pytest.approx(30.0)
    assert not timeout_method.retry

    # Establish that the retry information on the retryable method also
    # looks correct.
    retry_method = service.methods['RetryableGetFoo']
    assert retry_method.timeout is None
    assert retry_method.retry.max_attempts == 3
    assert retry_method.retry.initial_backoff == pytest.approx(0.001)
    assert retry_method.retry.backoff_multiplier == pytest.approx(1.5)
    assert retry_method.retry.max_backoff == pytest.approx(60.0)
    assert retry_method.retry.retryable_exceptions == {
        exceptions.ServiceUnavailable,
        exceptions.Aborted,
    }