def main(): request = CodeGeneratorRequest.FromString(sys.stdin.buffer.read()) files_to_generate = set(request.file_to_generate) response = CodeGeneratorResponse() proto_for_entity = dict() for proto_file in request.proto_file: package_name = proto_file.package for named_entity in itertools.chain(proto_file.message_type, proto_file.enum_type, proto_file.service, proto_file.extension): if package_name: fully_qualified_name = ".".join( ["", package_name, named_entity.name]) else: fully_qualified_name = "." + named_entity.name proto_for_entity[fully_qualified_name] = proto_file.name for proto_file in request.proto_file: if proto_file.name in files_to_generate: out = response.file.add() out.name = proto_file.name.replace('.proto', "_grpc.py") out.content = generate_single_proto(proto_file, proto_for_entity) sys.stdout.buffer.write(response.SerializeToString())
def main(input_file=sys.stdin, output_file=sys.stdout): """Parse a CodeGeneratorRequest and return a CodeGeneratorResponse.""" logging.basicConfig(filename='logging.log', level=logging.DEBUG) # Ensure we are getting a bytestream, and writing to a bytestream. if hasattr(input_file, 'buffer'): input_file = input_file.buffer if hasattr(output_file, 'buffer'): output_file = output_file.buffer try: # Instantiate a parser. parser = CodeGeneratorParser.from_input_file(input_file) docs = parser.generate_docs() except: logging.exception("Error when generating docs: ") raise answer = [] for k, item in docs.items(): answer.append( CodeGeneratorResponse.File( name=item.filename, content=item.content, )) cgr = CodeGeneratorResponse(file=answer) output_file.write(cgr.SerializeToString())
def main(input_file=sys.stdin, output_file=sys.stdout): """Parse a CodeGeneratorRequest and return a CodeGeneratorResponse.""" # Ensure we are getting a bytestream, and writing to a bytestream. if hasattr(input_file, 'buffer'): input_file = input_file.buffer if hasattr(output_file, 'buffer'): output_file = output_file.buffer # Instantiate a parser. parser = CodeGeneratorParser.from_input_file(input_file) # Find all the docs and amalgamate them together. comment_data = {} for filename, message_structure in parser.find_docs(): comment_data.setdefault(filename, set()) comment_data[filename].add(message_structure) # Iterate over the data that came back and parse it into a single, # coherent CodeGeneratorResponse. answer = [] _BATCH_TOKEN = "CD985272F78311" meta_docstrings = [] meta_structs = [] for fn, structs in comment_data.items(): for struct in structs: if meta_docstrings: meta_docstrings.append("\n%s" % _BATCH_TOKEN) meta_docstrings.append(struct.get_meta_docstring()) meta_structs.append((fn, struct)) meta_docstring = convert_text("".join(meta_docstrings), 'rst', format='md') meta_docstrings = meta_docstring.split("%s" % _BATCH_TOKEN) index = 0 while index < len(meta_structs) and index < len(meta_docstrings): fn = meta_structs[index][0] struct = meta_structs[index][1] answer.append( CodeGeneratorResponse.File( name=fn.replace('.proto', '_pb2.py'), insertion_point='class_scope:%s' % struct.name, content=',\n__doc__ = """{docstring}""",'.format( docstring=struct.get_python_docstring( meta_docstrings[index]), ), )) index += 1 for fn in _init_files(comment_data.keys()): answer.append(CodeGeneratorResponse.File( name=fn, content='', )) cgr = CodeGeneratorResponse(file=answer) output_file.write(cgr.SerializeToString())
def main() -> None: with os.fdopen(sys.stdin.fileno(), 'rb') as inp: request = CodeGeneratorRequest.FromString(inp.read()) types_map: Dict[str, str] = {} for pf in request.proto_file: for mt in pf.message_type: types_map.update(_type_names(pf, mt)) response = CodeGeneratorResponse() # See https://github.com/protocolbuffers/protobuf/blob/v3.12.0/docs/implementing_proto3_presence.md # noqa if hasattr(CodeGeneratorResponse, 'Feature'): response.supported_features = ( # type: ignore CodeGeneratorResponse.FEATURE_PROTO3_OPTIONAL # type: ignore ) for file_to_generate in request.file_to_generate: proto_file = _get_proto(request, file_to_generate) imports = [ _proto2pb2_module_name(dep) for dep in list(proto_file.dependency) + [file_to_generate] ] services = [] for service in proto_file.service: methods = [] for method in service.method: cardinality = _CARDINALITY[(method.client_streaming, method.server_streaming)] methods.append( Method( name=method.name, cardinality=cardinality, request_type=types_map[method.input_type], reply_type=types_map[method.output_type], )) services.append(Service(name=service.name, methods=methods)) file = response.file.add() module_name = _proto2grpc_module_name(file_to_generate) file.name = module_name.replace(".", "/") + ".py" file.content = render( proto_file=proto_file.name, package=proto_file.package, imports=imports, services=services, ) with os.fdopen(sys.stdout.fileno(), 'wb') as out: out.write(response.SerializeToString())
def main(input_file=sys.stdin, output_file=sys.stdout): request = CodeGeneratorRequest.FromString(input_file.buffer.read()) answer = [] for fname in request.file_to_generate: answer.append( CodeGeneratorResponse.File( name=fname.replace('.proto', '_pb2.py'), insertion_point='module_scope', content="# Hello {}, I'm a dummy plugin!".format(fname), )) cgr = CodeGeneratorResponse(file=answer) output_file.buffer.write(cgr.SerializeToString())
def main() -> None: with os.fdopen(sys.stdin.fileno(), 'rb') as inp: request = CodeGeneratorRequest.FromString(inp.read()) types_map: Dict[str, str] = {} for pf in request.proto_file: for mt in pf.message_type: types_map.update(_type_names(pf, mt)) response = CodeGeneratorResponse() for file_to_generate in request.file_to_generate: proto_file = _get_proto(request, file_to_generate) imports = [ _proto2pb2_module_name(dep) for dep in list(proto_file.dependency) + [file_to_generate] ] services = [] for service in proto_file.service: methods = [] for method in service.method: cardinality = _CARDINALITY[(method.client_streaming, method.server_streaming)] methods.append( Method( name=method.name, cardinality=cardinality, request_type=types_map[method.input_type], reply_type=types_map[method.output_type], )) services.append(Service(name=service.name, methods=methods)) file = response.file.add() module_name = _proto2grpc_module_name(file_to_generate) file.name = module_name.replace(".", "/") + ".py" file.content = render( proto_file=proto_file.name, package=proto_file.package, imports=imports, services=services, ) with os.fdopen(sys.stdout.fileno(), 'wb') as out: out.write(response.SerializeToString())
def main() -> None: with os.fdopen(sys.stdin.fileno(), 'rb') as inp: request = CodeGeneratorRequest.FromString(inp.read()) types_map = { _type_name(pf, mt): '.'.join((_proto2py(pf.name), mt.name)) for pf in request.proto_file for mt in pf.message_type } response = CodeGeneratorResponse() for file_to_generate in request.file_to_generate: proto_file = _get_proto(request, file_to_generate) imports = [ _proto2py(dep) for dep in list(proto_file.dependency) + [file_to_generate] ] services = [] for service in proto_file.service: methods = [] for method in service.method: cardinality = _CARDINALITY[(method.client_streaming, method.server_streaming)] methods.append( Method( name=method.name, cardinality=cardinality, request_type=types_map[method.input_type], reply_type=types_map[method.output_type], )) services.append(Service(name=service.name, methods=methods)) file = response.file.add() file.name = file_to_generate.replace('.proto', SUFFIX) file.content = render( proto_file=proto_file.name, package=proto_file.package, imports=imports, services=services, ) with os.fdopen(sys.stdout.fileno(), 'wb') as out: out.write(response.SerializeToString())
type_name = message.name if type_name == 'Column': continue out += """\n@Entity\n""" out += """@Table(name = "{tbl_name}")\n""".format(tbl_name=message.name) out += """public class {type_name} """.format(type_name=type_name) out += """ {\n\n""" # https://developers.google.com/protocol-buffers/docs/reference/java/com/google/protobuf/DescriptorProtos.FieldDescriptorProto for field in message.field: out += """ @Column(name = "{fname}")\n""".format(fname=field.name) out += """ private {lang_type} {fname};\n""".format(fname=field.json_name, lang_type={ "TYPE_STRING": "String", "TYPE_INT32": "Integer", }.get(str(field.type), "String")) out += str(field.options.__str__()) out += str(field.__str__()) out += str(field.__dict__()) # for x in field.options: # out += """!""" out += """\n""" out += """}\n\n""" f = response.file.add() f.name = type_name + '.java' f.content = out sys.stdout.write(response.SerializeToString())