def Main(): # http://www.expobrain.net/2015/09/13/create-a-plugin-for-google-protocol-buffer/ request = plugin_pb2.CodeGeneratorRequest() request.ParseFromString(sys.stdin.read()) response = plugin_pb2.CodeGeneratorResponse() cprofile_enabled = os.getenv('CPROFILE_ENABLED') for proto_file in request.proto_file: f = response.file.add() f.name = proto_file.name + '.rst' if cprofile_enabled: pr = cProfile.Profile() pr.enable() # We don't actually generate any RST right now, we just string dump the # input proto file descriptor into the output file. f.content = GenerateRst(proto_file) if cprofile_enabled: pr.disable() stats_stream = StringIO.StringIO() ps = pstats.Stats(pr, stream=stats_stream).sort_stats( os.getenv('CPROFILE_SORTBY', 'cumulative')) stats_file = response.file.add() stats_file.name = proto_file.name + '.rst.profile' ps.print_stats() stats_file.content = stats_stream.getvalue() sys.stdout.write(response.SerializeToString())
def wrapper(): # type: () -> None # Read request message from stdin if six.PY3: data = sys.stdin.buffer.read() else: data = sys.stdin.read() # Parse request request = plugin_pb2.CodeGeneratorRequest() request.ParseFromString(data) # Create response response = plugin_pb2.CodeGeneratorResponse() func(request, response) # Serialise response message output = response.SerializeToString() # Write to stdout if six.PY3: sys.stdout.buffer.write(output) else: sys.stdout.write(output)
def plugin(output_descriptors): """Protoc plugin entry point. This defines protoc plugin and manages the stdin -> stdout flow. An api_proto_plugin is defined by the provided visitor. See http://www.expobrain.net/2015/09/13/create-a-plugin-for-google-protocol-buffer/ for further details on protoc plugin basics. Args: output_descriptors: a list of OutputDescriptors. """ request = plugin_pb2.CodeGeneratorRequest() request.ParseFromString(sys.stdin.buffer.read()) response = plugin_pb2.CodeGeneratorResponse() cprofile_enabled = os.getenv('CPROFILE_ENABLED') # We use request.file_to_generate rather than request.file_proto here since we # are invoked inside a Bazel aspect, each node in the DAG will be visited once # by the aspect and we only want to generate docs for the current node. for file_to_generate in request.file_to_generate: # Find the FileDescriptorProto for the file we actually are generating. file_proto = [ pf for pf in request.proto_file if pf.name == file_to_generate ][0] if cprofile_enabled: pr = cProfile.Profile() pr.enable() for od in output_descriptors: f = response.file.add() f.name = file_proto.name + od.output_suffix # Don't run API proto plugins on things like WKT types etc. if not file_proto.package.startswith('envoy.'): continue if request.HasField("parameter") and od.want_params: params = dict( param.split('=') for param in request.parameter.split(',')) xformed_proto = od.xform(file_proto, params) visitor_factory = od.visitor_factory(params) else: xformed_proto = od.xform(file_proto) visitor_factory = od.visitor_factory() f.content = traverse.traverse_file( xformed_proto, visitor_factory) if xformed_proto else '' if cprofile_enabled: pr.disable() stats_stream = io.StringIO() ps = pstats.Stats(pr, stream=stats_stream).sort_stats( os.getenv('CPROFILE_SORTBY', 'cumulative')) stats_file = response.file.add() stats_file.name = file_proto.name + '.profile' ps.print_stats() stats_file.content = stats_stream.getvalue() # Also include the original FileDescriptorProto as text proto, this is # useful when debugging. descriptor_file = response.file.add() descriptor_file.name = file_proto.name + ".descriptor.proto" descriptor_file.content = str(file_proto) sys.stdout.buffer.write(response.SerializeToString())
def main(): # logging.debug(sys.argv) # parser = optparse.OptionParser() # parser.add_option('--out', dest='outdir') # parser.add_option('--parameter', dest='parameter') # options = parser.parse_args() # outdir = '' # parameter = '' # logging.debug('options: %s', options) # logging.debug('outdir: %s', outdir) # logging.debug('param: %s', parameter) # Read request message from stdin input = sys.stdin.read() # Parse request request = plugin.CodeGeneratorRequest() request.ParseFromString(input) # Create response response = plugin.CodeGeneratorResponse() # Generate code generate_code(request, response) # Serialise response message output = response.SerializeToString() # Write to stdout sys.stdout.write(output)
def code_generation() -> Iterator[Tuple[plugin_pb2.CodeGeneratorRequest, plugin_pb2.CodeGeneratorResponse], ]: if len(sys.argv) > 1 and sys.argv[1] in ("-V", "--version"): print("mypy-protobuf " + __version__) sys.exit(0) # Read request message from stdin data = sys.stdin.buffer.read() # Parse request request = plugin_pb2.CodeGeneratorRequest() request.ParseFromString(data) # Create response response = plugin_pb2.CodeGeneratorResponse() # Declare support for optional proto3 fields response.supported_features |= ( plugin_pb2.CodeGeneratorResponse.FEATURE_PROTO3_OPTIONAL) yield request, response # Serialise response message output = response.SerializeToString() # Write to stdout sys.stdout.buffer.write(output)
def main(): # Read request message from stdin OPTS = Cmdoptions() if six.PY2: DATA = sys.stdin.read() else: DATA = sys.stdin.buffer.read() # open('test.dat', 'wb').write(DATA) # DATA = open('test.dat', 'rb').read() # Parse request REQUEST = plugin.CodeGeneratorRequest() REQUEST.ParseFromString(DATA) # Create response RESPONSE = plugin.CodeGeneratorResponse() # Generate code generate_code(OPTS, REQUEST, RESPONSE) # Serialise response message OUTPUT = RESPONSE.SerializeToString() # Write to stdout if six.PY2: sys.stdout.write(OUTPUT) else: sys.stdout.buffer.write(OUTPUT)
def main(): # type: () -> None # Read request message from stdin if six.PY3: data = sys.stdin.buffer.read() else: data = sys.stdin.read() # Parse request request = plugin_pb2.CodeGeneratorRequest() request.ParseFromString(data) # Create response response = plugin_pb2.CodeGeneratorResponse() # Generate mypy generate_mypy_stubs(Descriptors(request), response, "quiet" in request.parameter) # Serialise response message output = response.SerializeToString() # Write to stdout if six.PY3: sys.stdout.buffer.write(output) else: sys.stdout.write(output)
def main(): # Read request protobuf message from stdin and parse data = sys.stdin.buffer.read() request = plugin.CodeGeneratorRequest() request.ParseFromString(data) # log("request options: %r"% request.parameter) # assume no spaces around options and key and vals do not contain '"' or ',' if request.parameter: options = { k: v for k, v in (opt.split('=') for opt in request.parameter.split(',')) } else: options = {*() } # https://twitter.com/raymondh/status/980864744549576704 # log("request options: %r"% options) # Create new response object to fill in and return to the caller response = plugin.CodeGeneratorResponse() # Generate cypher statements. We're only going to produce one file content = generate_code(request, options) f = response.file.add() f.name = 'proto.cql' f.content = '\n'.join(content) # Serialise and return response protobuf message to caller output = response.SerializeToString() sys.stdout.buffer.write(output)
def parse(): data = sys.stdin.read() request = plugin.CodeGeneratorRequest() request.ParseFromString(data) response = plugin.CodeGeneratorResponse() gen(request, response) output = response.SerializeToString() sys.stdout.write(output)
def test_library_gapic_v2(): request = plugin_pb2.CodeGeneratorRequest() proto_files = [ "test/testdata/library_simple.proto", "test/testdata/archive.proto", "test/testdata/common_resources.proto" ] request.file_to_generate.extend(proto_files) request.parameter = "test/testdata/library_gapic_v2.yaml" with open(request.parameter) as f: gapic_yaml = yaml.load(f, Loader=yaml.SafeLoader) file_descriptor_set_file = "test/testdata/test_output/library.desc" shutil.rmtree("test/testdata/test_output", True) os.mkdir("test/testdata/test_output/") subprocess.check_call([ 'protoc', '-o', file_descriptor_set_file, '--include_imports', '--proto_path=.', '--proto_path=./googleapis' ] + proto_files) with open(file_descriptor_set_file, 'rb') as f: file_descriptor_set = descriptor_pb2.FileDescriptorSet.FromString( f.read()) request.proto_file.extend(file_descriptor_set.file) gapic_config = gapic_utils.create_gapic_config_v2(gapic_yaml, request) resource_name_artifacts \ = gapic_utils.collect_resource_name_types( gapic_config, "com.google.example.library.v1") assert [ r for r in resource_name_artifacts if type(r) is resource_name.ResourceName and r.format_string == 'projects/{project}/shelves/{shelf}' and r.format_name_lower == 'shelfName' and r.parent_interface == 'ResourceName' ] assert [ r for r in resource_name_artifacts if type(r) is resource_name.ParentResourceName and r.class_name == "BookName" and r.has_fixed_patterns is True and r.has_formattable_patterns is True ] assert [ r for r in resource_name_artifacts if type(r) is resource_name.ParentResourceName and r.class_name == "ArchiveName" and r.has_fixed_patterns is False and r.has_formattable_patterns is True ] assert [ r for r in resource_name_artifacts if type(r) is resource_name.ParentResourceName and r.class_name == 'BookName' ]
def main(): """ Calls the autogenerator """ # Parse request request = plugin_pb2.CodeGeneratorRequest() request.ParseFromString(sys.stdin.buffer.read()) # Generate code and output it to stdout sys.stdout.buffer.write( AutoGen.generate_reactive(request).SerializeToString())
def main(): data = sys.stdin.buffer.read() request = plugin_pb2.CodeGeneratorRequest() request.ParseFromString(data) response = plugin_pb2.CodeGeneratorResponse() generate_service_metadata(request, response) output = response.SerializeToString() sys.stdout.buffer.write(output)
def main(): with open("debug_data_dump", "rb") as f: data = f.read() plugin_request = plugin_pb2.CodeGeneratorRequest() plugin_request.ParseFromString(data) plugin_response = generate_response(plugin_request) output = plugin_response.SerializeToString() sys.stdout.buffer.write(output)
def run_plugin(input_raw_file, output_raw_file): request = plugin.CodeGeneratorRequest() request.ParseFromString(input_raw_file.read()) proto_py = to_dict_with_locations(request) response = plugin.CodeGeneratorResponse() file = response.file.add() file.name = 'request.json' file.content = json.dumps(proto_py) output_raw_file.write(response.SerializeToString())
def test_library_gapic_v1(): request = plugin_pb2.CodeGeneratorRequest() proto_files = ["library_simple.proto", "archive.proto"] request.file_to_generate.extend(proto_files) request.parameter = "test/testdata/library_gapic_v1.yaml" with open(request.parameter) as f: gapic_yaml = yaml.load(f, Loader=yaml.SafeLoader) file_descriptor_set_file = "test/testdata/test_output/library.desc" shutil.rmtree("test/testdata/test_output", True) os.mkdir("test/testdata/test_output/") subprocess.check_call([ 'protoc', '-o', file_descriptor_set_file, '--include_imports', '--proto_path=test/testdata', '--proto_path=.', '--proto_path=./googleapis' ] + proto_files) with open(file_descriptor_set_file, 'rb') as f: file_descriptor_set = descriptor_pb2.FileDescriptorSet.FromString( f.read()) request.proto_file.extend(file_descriptor_set.file) gapic_config = gapic_utils.create_gapic_config(gapic_yaml) resource_name_artifacts = gapic_utils.collect_resource_name_types( gapic_config, "com.google.example.library.v1") assert [ r for r in resource_name_artifacts if type(r) is resource_name.ResourceName and r.format_string == 'archives/{archive_id}/books/{book_id=**}' and r.format_name_lower == 'archivedBookName' ] assert [ r for r in resource_name_artifacts if type(r) is resource_name.ResourceName and r.format_string == 'book/{book_id}' and r.format_name_lower == 'shelfBookName' ] assert [ r for r in resource_name_artifacts if type(r) is resource_name.ResourceNameFixed and r.fixed_value == '_deleted-book_' and r.class_name == 'DeletedBook' ] assert [ r for r in resource_name_artifacts if type(r) is resource_name.ParentResourceName and r.class_name == 'BookName' ] assert [ r for r in resource_name_artifacts if type(r) is resource_name.ResourceNameFactory and r.class_name == 'BookNames' ]
def run_plugin(func): data = sys.stdin.buffer.read() with open("debug_data_dump", "wb+") as f: f.write(data) # config = get_config(use_tmp=True) plugin_request = plugin_pb2.CodeGeneratorRequest() plugin_request.ParseFromString(data) plugin_response = func(plugin_request) output = plugin_response.SerializeToString() sys.stdout.buffer.write(output)
def test_config_parsing(self): request = plugin_pb2.CodeGeneratorRequest(parameter=GAPIC_CONFIG_PATH) gapic_config = gapic_utils.read_from_gapic_yaml(request) self.assertEqual(1, len(gapic_config.fixed_collections)) self.assertEqual( "deleted_book", gapic_config.fixed_collections['deleted_book'].entity_name) self.assertEqual( "_deleted-book_", gapic_config.fixed_collections['deleted_book'].fixed_value) self.assertEqual( "deleted_book", gapic_config.fixed_collections['deleted_book'].java_entity_name)
def main() -> None: parser = argparse.ArgumentParser() parser.add_argument("--prefix", type=str, default="") data = sys.stdin.buffer.read() request = plugin_pb2.CodeGeneratorRequest() request.ParseFromString(data) args = parser.parse_args(request.parameter.split()) response = plugin_pb2.CodeGeneratorResponse() cython_files = ProtoFile.from_file_descriptor_protos( request.proto_file, set(request.file_to_generate), args.prefix) write_module(cython_files, response) sys.stdout.buffer.write(response.SerializeToString())
def Plugin(output_suffix, visitor): """Protoc plugin entry point. This defines protoc plugin and manages the stdin -> stdout flow. An api_proto_plugin is defined by the provided visitor. See http://www.expobrain.net/2015/09/13/create-a-plugin-for-google-protocol-buffer/ for further details on protoc plugin basics. Args: output_suffix: output files are generated alongside their corresponding input .proto, with this filename suffix. visitor: visitor.Visitor defining the business logic of the plugin. """ request = plugin_pb2.CodeGeneratorRequest() request.ParseFromString(sys.stdin.buffer.read()) response = plugin_pb2.CodeGeneratorResponse() cprofile_enabled = os.getenv('CPROFILE_ENABLED') # We use request.file_to_generate rather than request.file_proto here since we # are invoked inside a Bazel aspect, each node in the DAG will be visited once # by the aspect and we only want to generate docs for the current node. for file_to_generate in request.file_to_generate: # Find the FileDescriptorProto for the file we actually are generating. file_proto = [pf for pf in request.proto_file if pf.name == file_to_generate][0] f = response.file.add() f.name = file_proto.name + output_suffix if cprofile_enabled: pr = cProfile.Profile() pr.enable() # We don't actually generate any RST right now, we just string dump the # input proto file descriptor into the output file. f.content = traverse.TraverseFile(file_proto, visitor) if cprofile_enabled: pr.disable() stats_stream = io.StringIO() ps = pstats.Stats(pr, stream=stats_stream).sort_stats(os.getenv('CPROFILE_SORTBY', 'cumulative')) stats_file = response.file.add() stats_file.name = file_proto.name + output_suffix + '.profile' ps.print_stats() stats_file.content = stats_stream.getvalue() # Also include the original FileDescriptorProto as text proto, this is # useful when debugging. descriptor_file = response.file.add() descriptor_file.name = file_proto.name + ".descriptor.proto" descriptor_file.content = str(file_proto) sys.stdout.buffer.write(response.SerializeToString())
def main(): """Main generation method.""" request = plugin_pb2.CodeGeneratorRequest() request.ParseFromString(sys.stdin.read()) response = plugin_pb2.CodeGeneratorResponse() packages = {} to_generate = set(request.file_to_generate) for proto_file in request.proto_file: if proto_file.name not in to_generate: continue if not proto_file.package: sys.stderr.write( '%s: package definition required, but not found\n' % proto_file.name) sys.exit(1) write_proto_cc(response, proto_file) package = proto_file.package.lstrip('.') if package in packages: packages[package].append(proto_file) else: packages[package] = [proto_file] package_list = [{ 'name': k, 'children': v } for (k, v) in packages.iteritems()] # sys.stderr.write('%s' % package_list) for package in package_list: write_module_cc(response, package) write_setup_py(response, package_list) write_tests(response, package_list) write_manifest(response, package_list) create_namespaces(response, package_list) sys.stdout.write(response.SerializeToString())
def main(data): # Parse request request = plugin.CodeGeneratorRequest() request.ParseFromString(data) java_package = resolve_java_package_name(request) gapic_config = gapic_utils.read_from_gapic_yaml(request.parameter) # Generate output response = plugin.CodeGeneratorResponse() generate_resource_name_types(response, gapic_config, java_package) # Serialise response message output = response.SerializeToString() return output
def main(): # Uncomment to enable to debug logging. # logger.setLevel(logging.DEBUG) logger.debug("Python version info: %r" % sys.version_info) request = plugin_pb2.CodeGeneratorRequest() request.ParseFromString(sys.stdin.read()) response = plugin_pb2.CodeGeneratorResponse() # Dependent proto files are included in the plugin_pb2.CodeGeneratorRequest(). Only process the proto file which the # protoc compiler was invoked with. proto_file = _get_direct_proto_file_from_request(request) Protodoc(proto_file, response).generate_rsts() sys.stdout.write(response.SerializeToString())
def test_explicit_fixed_name_config_parsing(self): request = plugin_pb2.CodeGeneratorRequest( parameter=GAPIC_CONFIG_V1_PATH, ) gapic_config = gapic_utils.read_from_gapic_yaml(request) self.assertEqual(1, len(gapic_config.fixed_collections)) self.assertEqual( "deleted_book", gapic_config.fixed_collections['deleted_book'].entity_name) self.assertEqual( "_deleted-book_", gapic_config.fixed_collections['deleted_book'].fixed_value) self.assertEqual( "deleted_book", gapic_config.fixed_collections['deleted_book'].java_entity_name) self.assertEqual(2, len(gapic_config.collection_configs)) # Project name won't be included because it is a common resource name. self.assertIn("book", gapic_config.collection_configs) self.assertIn("archived_book", gapic_config.collection_configs)
def main() -> None: # Read request message from stdin data = sys.stdin.buffer.read() # Parse request request = plugin.CodeGeneratorRequest() request.ParseFromString(data) # Create response response = plugin.CodeGeneratorResponse() # Generate mypy generate_mypy_stubs(Descriptors(request), response) # Serialise response message output = response.SerializeToString() # Write to stdout sys.stdout.buffer.write(output)
def main(data): # Parse request request = plugin.CodeGeneratorRequest() request.ParseFromString(data) java_packages = resolve_java_package_names(request) gapic_config = gapic_utils.read_from_gapic_yaml(request) # Generate output response = plugin.CodeGeneratorResponse() for java_package in java_packages: generate_resource_name_types(response, gapic_config, java_package) response.supported_features = plugin.CodeGeneratorResponse.FEATURE_PROTO3_OPTIONAL # Serialise response message output = response.SerializeToString() return output
def main(): """The plugin's main entry point.""" # Read request message from stdin data = sys.stdin.buffer.read() # Parse request request = plugin.CodeGeneratorRequest() request.ParseFromString(data) # Create response response = plugin.CodeGeneratorResponse() # Generate code generate_code(request, response) # Serialise response message output = response.SerializeToString() # Write to stdout sys.stdout.buffer.write(output)
def plugin(output_descriptors, traverser=None): """Protoc plugin entry point. This defines protoc plugin and manages the stdin -> stdout flow. An api_proto_plugin is defined by the provided visitor. See http://www.expobrain.net/2015/09/13/create-a-plugin-for-google-protocol-buffer/ for further details on protoc plugin basics. Args: output_descriptors: a list of OutputDescriptors. """ traverser = traverser or traverse.traverse_file request = plugin_pb2.CodeGeneratorRequest() request.ParseFromString(sys.stdin.buffer.read()) response = plugin_pb2.CodeGeneratorResponse() # We use request.file_to_generate rather than request.file_proto here since we # are invoked inside a Bazel aspect, each node in the DAG will be visited once # by the aspect and we only want to generate docs for the current node. for file_to_generate in request.file_to_generate: # Find the FileDescriptorProto for the file we actually are generating. file_proto = [ pf for pf in request.proto_file if pf.name == file_to_generate ][0] for od in output_descriptors: f = response.file.add() f.name = f"{file_proto.name}{od.output_suffix}" if request.HasField("parameter") and od.want_params: params = dict( param.split('=') for param in request.parameter.split(',')) xformed_proto = od.xform(file_proto, params) visitor_factory = od.visitor_factory(params) else: xformed_proto = od.xform(file_proto) visitor_factory = od.visitor_factory() f.content = traverser(xformed_proto, visitor_factory) sys.stdout.buffer.write(response.SerializeToString())
def main(): # See: https://www.expobrain.net/2015/09/13/create-a-plugin-for-google-protocol-buffer/ # Read request message from stdin data = sys.stdin.buffer.read() # Parse request request = plugin.CodeGeneratorRequest() request.ParseFromString(data) # Create response response = plugin.CodeGeneratorResponse() # Generate code generate_code(request, response) # Serialise response message output = response.SerializeToString() # Write to stdout sys.stdout.buffer.write(output)
def main(): """Plugin entrypoint.""" with logbook.StderrHandler().applicationbound(): # Read request message from stdin data = sys.stdin.buffer.read() # Parse request request = plugin.CodeGeneratorRequest() request.ParseFromString(data) # Create response response = plugin.CodeGeneratorResponse() # Generate code generate(request, response) # Serialise response message output = response.SerializeToString() # Write to stdout sys.stdout.buffer.write(output)
def main(): # Uncomment to enable to debug logging. # logger.setLevel(logging.DEBUG) logger.debug("Python version info: %r" % str(sys.version_info)) # Compatibility with both Python 2 and 3 (https://stackoverflow.com/a/23932488/1254292) stdout, stdin = (getattr(sys.stdout, 'buffer', sys.stdout), getattr(sys.stdin, 'buffer', sys.stdin)) request = plugin_pb2.CodeGeneratorRequest() request.ParseFromString(stdin.read()) response = plugin_pb2.CodeGeneratorResponse() # Dependent proto files are included in the plugin_pb2.CodeGeneratorRequest(). Only process the proto file which the # protoc compiler was invoked with. proto_file = _get_direct_proto_file_from_request(request) Protodoc(proto_file, response).generate_rsts() stdout.write(response.SerializeToString())