def names(self) -> FrozenSet[str]: """Return a set of names used in this service. This is used for detecting naming collisions in the module names used for imports. """ # Put together a set of the service and method names. answer = {self.name, self.client_name, self.async_client_name} answer.update( utils.to_snake_case(i.name) for i in self.methods.values() ) # Identify any import module names where the same module name is used # from distinct packages. modules: Dict[str, Set[str]] = collections.defaultdict(set) for m in self.methods.values(): for t in m.ref_types: modules[t.ident.module].add(t.ident.package) answer.update( module_name for module_name, packages in modules.items() if len(packages) > 1 ) # Done; return the answer. return frozenset(answer)
def module_name(self) -> str: """Return the appropriate module name for this service. Returns: str: The service name, in snake case. """ return utils.to_snake_case(self.name)
def gapic_metadata(self, options: Options) -> gapic_metadata_pb2.GapicMetadata: gm = gapic_metadata_pb2.GapicMetadata( schema="1.0", comment= "This file maps proto services/RPCs to the corresponding library clients/methods", language="python", proto_package=self.naming.proto_package, library_package=".".join(self.naming.module_namespace + (self.naming.versioned_module_name, )), ) for service in sorted(self.services.values(), key=lambda s: s.name): service_desc = gm.services.get_or_create(service.name) # At least one of "grpc" or "rest" is guaranteed to be present because # of the way that Options instances are created. # This assumes the options are generated by the class method factory. transports = [] if "grpc" in options.transport: transports.append(("grpc", service.client_name)) transports.append(("grpc-async", service.async_client_name)) if "rest" in options.transport: transports.append(("rest", service.client_name)) methods = sorted(service.methods.values(), key=lambda m: m.name) for tprt, client_name in transports: transport = service_desc.clients.get_or_create(tprt) transport.library_client = client_name for method in methods: method_desc = transport.rpcs.get_or_create(method.name) method_desc.methods.append(to_snake_case(method.name)) return gm
def module_name(self) -> str: """Return the appropriate module name for this service. Returns: str: The module name for this service (which is the service name in snake case). """ return to_snake_case(self.name.split('/')[-1][:-len('.proto')])
def _client_output(self, enable_asyncio: bool): """Return the output from the client layer. This takes into account transformations made by the outer GAPIC client to transform the output from the transport. Returns: Union[~.MessageType, ~.PythonType]: A description of the return type. """ # Void messages ultimately return None. if self.void: return PrimitiveType.build(None) # If this method is an LRO, return a PythonType instance representing # that. if self.lro: return PythonType(meta=metadata.Metadata( address=metadata.Address( name='AsyncOperation' if enable_asyncio else 'Operation', module='operation_async' if enable_asyncio else 'operation', package=('google', 'api_core'), collisions=self.lro.response_type.ident.collisions, ), documentation=utils.doc( 'An object representing a long-running operation. \n\n' 'The result type for the operation will be ' ':class:`{ident}` {doc}'.format( doc=self.lro.response_type.meta.doc, ident=self.lro.response_type.ident.sphinx, ), ), )) # If this method is paginated, return that method's pager class. if self.paged_result_field: return PythonType(meta=metadata.Metadata( address=metadata.Address( name=f'{self.name}AsyncPager' if enable_asyncio else f'{self.name}Pager', package=self.ident.api_naming.module_namespace + (self.ident.api_naming.versioned_module_name, ) + self.ident.subpackage + ( 'services', utils.to_snake_case(self.ident.parent[-1]), ), module='pagers', collisions=self.input.ident.collisions, ), documentation=utils.doc( f'{self.output.meta.doc}\n\n' 'Iterating over this object will yield results and ' 'resolve additional pages automatically.', ), )) # Return the usual output. return self.output
def names(self) -> Set[str]: """Return a set of names used in this service. This is used for detecting naming collisions in the module names used for imports. """ # Put together a set of the service and method names. answer = {self.name}.union( {utils.to_snake_case(i.name) for i in self.methods.values()}) # Identify any import module names where the same module name is used # from distinct packages. modules = {} for t in chain(*[m.ref_types for m in self.methods.values()]): modules.setdefault(t.ident.module, set()) modules[t.ident.module].add(t.ident.package) for module_name, packages in modules.items(): if len(packages) > 1: answer.add(module_name) # Done; return the answer. return frozenset(answer)
def _generate_samples_and_manifest( self, api_schema: api.API, index: snippet_index.SnippetIndex, sample_template: jinja2.Template, *, opts: Options) -> Tuple[Dict, snippet_index.SnippetIndex]: """Generate samples and samplegen manifest for the API. Arguments: api_schema (api.API): The schema for the API to which the samples belong. sample_template (jinja2.Template): The template to use to generate samples. opts (Options): Additional generator options. Returns: Tuple[Dict[str, CodeGeneratorResponse.File], snippet_index.SnippetIndex] : A dict mapping filepath to rendered file. """ # The two-layer data structure lets us do two things: # * detect duplicate samples, which is an error # * detect distinct samples with the same ID, which are disambiguated id_to_hash_to_spec: DefaultDict[str, Dict[str, Any]] = defaultdict(dict) # Autogenerated sample specs autogen_specs: typing.List[typing.Dict[str, Any]] = [] if opts.autogen_snippets: autogen_specs = list( samplegen.generate_sample_specs(api_schema, opts=opts)) # Also process any handwritten sample specs handwritten_specs = samplegen.parse_handwritten_specs( self._sample_configs) sample_specs = autogen_specs + list(handwritten_specs) for spec in sample_specs: # Every sample requires an ID. This may be provided # by a samplegen config author. # If no ID is provided, fall back to the region tag. # # Ideally the sample author should pick a descriptive, unique ID, # but this may be impractical and can be error-prone. spec_hash = sha256(str(spec).encode("utf8")).hexdigest()[:8] sample_id = spec.get("id") or spec.get("region_tag") or spec_hash spec["id"] = sample_id hash_to_spec = id_to_hash_to_spec[sample_id] if spec_hash in hash_to_spec: raise DuplicateSample( f"Duplicate samplegen spec found: {spec}") hash_to_spec[spec_hash] = spec out_dir = "samples/generated_samples" fpath_to_spec_and_rendered = {} for hash_to_spec in id_to_hash_to_spec.values(): for spec_hash, spec in hash_to_spec.items(): id_is_unique = len(hash_to_spec) == 1 # The ID is used to generate the file name. It must be globally unique. if not id_is_unique: spec["id"] += f"_{spec_hash}" sample, snippet_metadata = samplegen.generate_sample( spec, api_schema, sample_template,) fpath = utils.to_snake_case(spec["id"]) + ".py" fpath_to_spec_and_rendered[os.path.join(out_dir, fpath)] = ( spec, sample, ) snippet_metadata.file = fpath snippet_metadata.title = fpath index.add_snippet( snippet_index.Snippet(sample, snippet_metadata)) output_files = { fname: CodeGeneratorResponse.File( content=formatter.fix_whitespace(sample), name=fname ) for fname, (_, sample) in fpath_to_spec_and_rendered.items() } if index.metadata_index.snippets: # NOTE(busunkim): Not all fields are yet populated in the snippet metadata. # Expected filename: snippet_metadata_{apishortname}_{apiversion}.json snippet_metadata_path = str(pathlib.Path( out_dir) / f"snippet_metadata_{api_schema.naming.name}_{api_schema.naming.version}.json").lower() output_files[snippet_metadata_path] = CodeGeneratorResponse.File( content=formatter.fix_whitespace(index.get_metadata_json()), name=snippet_metadata_path) return output_files, index
def _generate_samples_and_manifest(self, api_schema: api.API, sample_template: jinja2.Template, *, opts: Options) -> Dict: """Generate samples and samplegen manifest for the API. Arguments: api_schema (api.API): The schema for the API to which the samples belong. sample_template (jinja2.Template): The template to use to generate samples. opts (Options): Additional generator options. Returns: Dict[str, CodeGeneratorResponse.File]: A dict mapping filepath to rendered file. """ # The two-layer data structure lets us do two things: # * detect duplicate samples, which is an error # * detect distinct samples with the same ID, which are disambiguated id_to_hash_to_spec: DefaultDict[str, Dict[str, Any]] = defaultdict(dict) # Autogenerated sample specs autogen_specs: typing.List[typing.Dict[str, Any]] = [] if opts.autogen_snippets: autogen_specs = list( samplegen.generate_sample_specs(api_schema, opts=opts)) # Also process any handwritten sample specs handwritten_specs = samplegen.parse_handwritten_specs( self._sample_configs) sample_specs = autogen_specs + list(handwritten_specs) for spec in sample_specs: # Every sample requires an ID. This may be provided # by a samplegen config author. # If no ID is provided, fall back to the region tag. # # Ideally the sample author should pick a descriptive, unique ID, # but this may be impractical and can be error-prone. spec_hash = sha256(str(spec).encode("utf8")).hexdigest()[:8] sample_id = spec.get("id") or spec.get("region_tag") or spec_hash spec["id"] = sample_id hash_to_spec = id_to_hash_to_spec[sample_id] if spec_hash in hash_to_spec: raise DuplicateSample( f"Duplicate samplegen spec found: {spec}") hash_to_spec[spec_hash] = spec out_dir = "samples/generated_samples" fpath_to_spec_and_rendered = {} for hash_to_spec in id_to_hash_to_spec.values(): for spec_hash, spec in hash_to_spec.items(): id_is_unique = len(hash_to_spec) == 1 # The ID is used to generate the file name. It must be globally unique. if not id_is_unique: spec["id"] += f"_{spec_hash}" sample = samplegen.generate_sample( spec, api_schema, sample_template, ) fpath = utils.to_snake_case(spec["id"]) + ".py" fpath_to_spec_and_rendered[os.path.join(out_dir, fpath)] = ( spec, sample, ) output_files = { fname: CodeGeneratorResponse.File( content=formatter.fix_whitespace(sample), name=fname) for fname, (_, sample) in fpath_to_spec_and_rendered.items() } # TODO(busunkim): Re-enable manifest generation once metadata # format has been formalized. # https://docs.google.com/document/d/1ghBam8vMj3xdoe4xfXhzVcOAIwrkbTpkMLgKc9RPD9k/edit#heading=h.sakzausv6hue # # if output_files: # manifest_fname, manifest_doc = manifest.generate( # ( # (fname, spec) # for fname, (spec, _) in fpath_to_spec_and_rendered.items() # ), # api_schema, # ) # manifest_fname = os.path.join(out_dir, manifest_fname) # output_files[manifest_fname] = CodeGeneratorResponse.File( # content=manifest_doc.render(), name=manifest_fname # ) return output_files
def __str__(self): return to_snake_case(super().__str__().split(".")[-1])
def _fill_sample_metadata(sample: dict, api_schema: api.API): """Returns snippet metadata for the sample.""" # Snippet Metadata can't be fully filled out in any one function # In this function we add information from # the API schema and sample dictionary. # See `snippet_metadata.proto` for documentation on the fields service = api_schema.services[sample["service"]] method = service.methods[sample["rpc"]] async_ = sample["transport"] == api.TRANSPORT_GRPC_ASYNC snippet_metadata = snippet_metadata_pb2.Snippet() # type: ignore snippet_metadata.region_tag = sample["region_tag"] snippet_metadata.description = f"Sample for {sample['rpc']}" snippet_metadata.language = snippet_metadata_pb2.Language.PYTHON # type: ignore snippet_metadata.canonical = True snippet_metadata.origin = snippet_metadata_pb2.Snippet.Origin.API_DEFINITION # type: ignore # Service Client snippet_metadata.client_method.client.short_name = service.async_client_name if async_ else service.client_name snippet_metadata.client_method.client.full_name = f"{'.'.join(sample['module_namespace'])}.{sample['module_name']}.{snippet_metadata.client_method.client.short_name}" # Service snippet_metadata.client_method.method.service.short_name = service.name snippet_metadata.client_method.method.service.full_name = f"{api_schema.naming.proto_package}.{service.name}" # RPC snippet_metadata.client_method.method.short_name = method.name snippet_metadata.client_method.method.full_name = f"{api_schema.naming.proto_package}.{service.name}.{method.name}" # Client Method setattr(snippet_metadata.client_method, "async", async_) snippet_metadata.client_method.short_name = utils.to_snake_case( method.name) snippet_metadata.client_method.full_name = f"{snippet_metadata.client_method.client.full_name}.{snippet_metadata.client_method.short_name}" if not method.void: snippet_metadata.client_method.result_type = method.client_output_async.ident.sphinx if async_ else method.client_output.ident.sphinx if method.server_streaming: snippet_metadata.client_method.result_type = f"Iterable[{snippet_metadata.client_method.result_type }]" # Client Method Parameters parameters = snippet_metadata.client_method.parameters if not method.client_streaming: parameters.append( snippet_metadata_pb2.ClientMethod.Parameter( # type: ignore type=method.input.ident.sphinx, name="request")) for field in method.flattened_fields.values(): parameters.append( snippet_metadata_pb2.ClientMethod.Parameter( # type: ignore type=field.ident.sphinx, name=field.name)) else: parameters.append( snippet_metadata_pb2.ClientMethod.Parameter( # type: ignore type=f"Iterator[{method.input.ident.sphinx}]", name="requests")) parameters.append( snippet_metadata_pb2.ClientMethod.Parameter( # type: ignore name="retry", type="google.api_core.retry.Retry")) parameters.append( snippet_metadata_pb2.ClientMethod.Parameter( # type: ignore name="timeout", type="float")) parameters.append( snippet_metadata_pb2.ClientMethod.Parameter( # type: ignore name="metadata", type="Sequence[Tuple[str, str]")) return snippet_metadata