def test_prior_protos(): L = descriptor_pb2.SourceCodeInfo.Location # Set up a prior proto that mimics google/protobuf/empty.proto empty_proto = api.Proto.build(make_file_pb2( name='empty.proto', package='google.protobuf', messages=(make_message_pb2(name='Empty'), ), ), file_to_generate=False, naming=make_naming()) # Set up the service with an RPC. service_pb = descriptor_pb2.ServiceDescriptorProto( name='PingService', method=(descriptor_pb2.MethodDescriptorProto( name='Ping', input_type='google.protobuf.Empty', output_type='google.protobuf.Empty', ), ), ) # Fake-document our fake stuff. locations = ( L(path=(6, 0), leading_comments='This is the PingService service.'), L(path=(6, 0, 2, 0), leading_comments='This is the Ping method.'), ) # Finally, set up the file that encompasses these. fdp = make_file_pb2( package='google.example.v1', services=(service_pb, ), locations=locations, ) # Make the proto object. proto = api.Proto.build(fdp, file_to_generate=True, prior_protos={ 'google/protobuf/empty.proto': empty_proto, }, naming=make_naming()) # Establish that our data looks correct. assert len(proto.services) == 1 assert len(empty_proto.messages) == 1 assert len(proto.messages) == 0 service = proto.services['google.example.v1.PingService'] assert service.meta.doc == 'This is the PingService service.' assert len(service.methods) == 1 method = service.methods['Ping'] assert isinstance(method.input, wrappers.MessageType) assert isinstance(method.output, wrappers.MessageType) assert method.input.name == 'Empty' assert method.output.name == 'Empty' assert method.meta.doc == 'This is the Ping method.'
def test_lro(): # Set up a prior proto that mimics google/protobuf/empty.proto lro_proto = api.Proto.build(make_file_pb2( name='operations.proto', package='google.longrunning', messages=(make_message_pb2(name='Operation'), ), ), file_to_generate=False, naming=make_naming()) # Set up a method with LRO annotations. method_pb2 = descriptor_pb2.MethodDescriptorProto( name='AsyncDoThing', input_type='google.example.v3.AsyncDoThingRequest', output_type='google.longrunning.Operation', ) method_pb2.options.Extensions[operations_pb2.operation_info].MergeFrom( operations_pb2.OperationInfo( response_type='google.example.v3.AsyncDoThingResponse', metadata_type='google.example.v3.AsyncDoThingMetadata', ), ) # Set up the service with an RPC. service_pb = descriptor_pb2.ServiceDescriptorProto( name='LongRunningService', method=(method_pb2, ), ) # Set up the messages, including the annotated ones. messages = ( make_message_pb2(name='AsyncDoThingRequest', fields=()), make_message_pb2(name='AsyncDoThingResponse', fields=()), make_message_pb2(name='AsyncDoThingMetadata', fields=()), ) # Finally, set up the file that encompasses these. fdp = make_file_pb2( package='google.example.v3', messages=messages, services=(service_pb, ), ) # Make the proto object. proto = api.Proto.build(fdp, file_to_generate=True, prior_protos={ 'google/longrunning/operations.proto': lro_proto, }, naming=make_naming()) # Establish that our data looks correct. assert len(proto.services) == 1 assert len(proto.messages) == 3 assert len(lro_proto.messages) == 1
def test_messages(): L = descriptor_pb2.SourceCodeInfo.Location message_pb = make_message_pb2(name='Foo', fields=(make_field_pb2(name='bar', type=3, number=1), )) locations = ( L(path=(4, 0), leading_comments='This is the Foo message.'), L(path=(4, 0, 2, 0), leading_comments='This is the bar field.'), ) fdp = make_file_pb2( messages=(message_pb, ), locations=locations, package='google.example.v2', ) # Make the proto object. proto = api.Proto.build(fdp, file_to_generate=True, naming=make_naming()) # Get the message. assert len(proto.messages) == 1 message = proto.messages['google.example.v2.Foo'] assert isinstance(message, wrappers.MessageType) assert message.meta.doc == 'This is the Foo message.' assert len(message.fields) == 1 assert message.fields['bar'].meta.doc == 'This is the bar field.'
def test_versioned_module_name(): n = make_naming( name='Genie', namespace=['Agrabah', 'Lamp'], version='v2', ) assert n.versioned_module_name == 'genie_v2'
def test_proto_build(): fdp = descriptor_pb2.FileDescriptorProto( name='my_proto_file.proto', package='google.example.v1', ) proto = api.Proto.build(fdp, file_to_generate=True, naming=make_naming()) assert isinstance(proto, api.Proto)
def test_enums(): L = descriptor_pb2.SourceCodeInfo.Location enum_pb = descriptor_pb2.EnumDescriptorProto( name='Silly', value=( descriptor_pb2.EnumValueDescriptorProto(name='ZERO', number=0), descriptor_pb2.EnumValueDescriptorProto(name='ONE', number=1), descriptor_pb2.EnumValueDescriptorProto(name='THREE', number=3), )) fdp = make_file_pb2(package='google.enum.v1', enums=(enum_pb, ), locations=( L(path=(5, 0), leading_comments='This is the Silly enum.'), L(path=(5, 0, 2, 0), leading_comments='This is the zero value.'), L(path=(5, 0, 2, 1), leading_comments='This is the one value.'), )) proto = api.Proto.build(fdp, file_to_generate=True, naming=make_naming()) assert len(proto.enums) == 1 enum = proto.enums['google.enum.v1.Silly'] assert enum.meta.doc == 'This is the Silly enum.' assert isinstance(enum, wrappers.EnumType) assert len(enum.values) == 3 assert all([isinstance(i, wrappers.EnumValueType) for i in enum.values]) assert enum.values[0].name == 'ZERO' assert enum.values[0].meta.doc == 'This is the zero value.' assert enum.values[1].name == 'ONE' assert enum.values[1].meta.doc == 'This is the one value.' assert enum.values[2].name == 'THREE' assert enum.values[2].meta.doc == ''
def test_messages_nested(): # Test that a nested message works properly. message_pbs = (make_message_pb2( name='Foo', nested_type=(make_message_pb2(name='Bar'), )), ) fdp = make_file_pb2( messages=message_pbs, package='google.example.v3', ) # Make the proto object. proto = api.Proto.build(fdp, file_to_generate=True, naming=make_naming()) # Set short variables for the names. foo = 'google.example.v3.Foo' bar = 'google.example.v3.Foo.Bar' # Get the message. assert len(proto.all_messages) == 2 assert proto.all_messages[foo].name == 'Foo' assert proto.all_messages[bar].name == 'Bar' # Assert that the `messages` property only shows top-level messages. assert len(proto.messages) == 1 assert proto.messages[foo] is proto.messages[foo] assert bar not in proto.messages
def test_lro_missing_annotation(): # Set up a prior proto that mimics google/protobuf/empty.proto lro_proto = api.Proto.build(make_file_pb2( name='operations.proto', package='google.longrunning', messages=(make_message_pb2(name='Operation'), ), ), file_to_generate=False, naming=make_naming()) # Set up a method with an LRO but no annotation. method_pb2 = descriptor_pb2.MethodDescriptorProto( name='AsyncDoThing', input_type='google.example.v3.AsyncDoThingRequest', output_type='google.longrunning.Operation', ) # Set up the service with an RPC. service_pb = descriptor_pb2.ServiceDescriptorProto( name='LongRunningService', method=(method_pb2, ), ) # Set up the messages, including the annotated ones. messages = (make_message_pb2(name='AsyncDoThingRequest', fields=()), ) # Finally, set up the file that encompasses these. fdp = make_file_pb2( package='google.example.v3', messages=messages, services=(service_pb, ), ) # Make the proto object. with pytest.raises(TypeError): api.Proto.build(fdp, file_to_generate=True, prior_protos={ 'google/longrunning/operations.proto': lro_proto, }, naming=make_naming())
def test_not_target_file(): """Establish that services are not ignored for untargeted protos.""" message_pb = make_message_pb2( name='Foo', fields=(make_field_pb2(name='bar', type=3, number=1),) ) service_pb = descriptor_pb2.ServiceDescriptorProto() fdp = make_file_pb2(messages=(message_pb,), services=(service_pb,)) # Actually make the proto object. proto = api.Proto.build(fdp, file_to_generate=False, naming=make_naming()) # The proto object should have the message, but no service. assert len(proto.messages) == 1 assert len(proto.services) == 0
def test_messages_recursive(): # Test that if a message is used inside itself, that things will still # work. message_pbs = (make_message_pb2( name='Foo', fields=(make_field_pb2(name='foo', number=1, type_name='.google.example.v3.Foo'), ), ), ) fdp = make_file_pb2( messages=message_pbs, package='google.example.v3', ) # Make the proto object. proto = api.Proto.build(fdp, file_to_generate=True, naming=make_naming()) # Get the message. assert len(proto.messages) == 1 Foo = proto.messages['google.example.v3.Foo'] assert Foo.fields['foo'].message == proto.messages['google.example.v3.Foo']
def test_proto_builder_constructor(): sentinel_message = descriptor_pb2.DescriptorProto() sentinel_enum = descriptor_pb2.EnumDescriptorProto() sentinel_service = descriptor_pb2.ServiceDescriptorProto() # Create a file descriptor proto. It does not matter that none # of the sentinels have actual data because this test just ensures # they are sent off to the correct methods unmodified. fdp = make_file_pb2( messages=(sentinel_message, ), enums=(sentinel_enum, ), services=(sentinel_service, ), ) # Test the load function. with mock.patch.object(api._ProtoBuilder, '_load_children') as lc: pb = api._ProtoBuilder( fdp, file_to_generate=True, naming=make_naming(), ) # There should be three total calls to load the different types # of children. assert lc.call_count == 3 # The enum type should come first. _, args, _ = lc.mock_calls[0] assert args[0][0] == sentinel_enum assert args[1] == pb._load_enum # The message type should come second. _, args, _ = lc.mock_calls[1] assert args[0][0] == sentinel_message assert args[1] == pb._load_message # The services should come third. _, args, _ = lc.mock_calls[2] assert args[0][0] == sentinel_service assert args[1] == pb._load_service
def test_messages_reverse_declaration_order(): # Test that if a message is used as a field higher in the same file, # that things still work. message_pbs = ( make_message_pb2(name='Foo', fields=( make_field_pb2(name='bar', number=1, type_name='.google.example.v3.Bar'), ), ), make_message_pb2(name='Bar'), ) fdp = make_file_pb2( messages=message_pbs, package='google.example.v3', ) # Make the proto object. proto = api.Proto.build(fdp, file_to_generate=True, naming=make_naming()) # Get the message. assert len(proto.messages) == 2 Foo = proto.messages['google.example.v3.Foo'] assert Foo.fields['bar'].message == proto.messages['google.example.v3.Bar']
def test_warehouse_package_name_multiple_words(): n = make_naming(name='Big Query', namespace=[]) assert n.warehouse_package_name == 'big-query'
def test_warehouse_package_name_with_namespace(): n = make_naming( name='BigQuery', namespace=('Google', 'Cloud'), ) assert n.warehouse_package_name == 'google-cloud-bigquery'
def test_warehouse_package_name_no_namespace(): n = make_naming(name='BigQuery', namespace=[]) assert n.warehouse_package_name == 'bigquery'
def test_namespace_packages(): n = make_naming(name='BigQuery', namespace=('Google', 'Cloud')) assert n.namespace_packages == ('google', 'google.cloud')
def test_proto_names_import_collision_flattening(): lro_proto = api.Proto.build(make_file_pb2( name='operations.proto', package='google.longrunning', messages=(make_message_pb2(name='Operation'), ), ), file_to_generate=False, naming=make_naming()) fd = ( make_file_pb2( name='mollusc.proto', package='google.animalia.mollusca', messages=( make_message_pb2(name='Mollusc', ), make_message_pb2(name='MolluscResponse', ), make_message_pb2(name='MolluscMetadata', ), ), ), make_file_pb2( name='squid.proto', package='google.animalia.mollusca', messages=( make_message_pb2( name='IdentifySquidRequest', fields=(make_field_pb2( name='mollusc', number=1, type_name='.google.animalia.mollusca.Mollusc'), ), ), make_message_pb2( name='IdentifySquidResponse', fields=(), ), ), services=(descriptor_pb2.ServiceDescriptorProto( name='SquidIdentificationService', method=(descriptor_pb2.MethodDescriptorProto( name='IdentifyMollusc', input_type='google.animalia.mollusca.IdentifySquidRequest', output_type='google.longrunning.Operation', ), ), ), ), ), ) method_options = fd[1].service[0].method[0].options # Notice that a signature field collides with the name of an imported module method_options.Extensions[client_pb2.method_signature].append('mollusc') method_options.Extensions[operations_pb2.operation_info].MergeFrom( operations_pb2.OperationInfo( response_type='google.animalia.mollusca.MolluscResponse', metadata_type='google.animalia.mollusca.MolluscMetadata', )) api_schema = api.API.build(fd, package='google.animalia.mollusca', prior_protos={ 'google/longrunning/operations.proto': lro_proto, }) actual_imports = { ref_type.ident.python_import for service in api_schema.services.values() for method in service.methods.values() for ref_type in method.ref_types } expected_imports = { imp.Import( package=('google', 'animalia', 'mollusca', 'types'), module='mollusc', alias='gam_mollusc', ), imp.Import( package=('google', 'animalia', 'mollusca', 'types'), module='squid', ), imp.Import( package=('google', 'api_core'), module='operation', ), imp.Import( package=('google', 'api_core'), module='operation_async', ), } assert expected_imports == actual_imports method = (api_schema. services['google.animalia.mollusca.SquidIdentificationService']. methods['IdentifyMollusc']) actual_response_import = method.lro.response_type.ident.python_import expected_response_import = imp.Import( package=('google', 'animalia', 'mollusca', 'types'), module='mollusc', alias='gam_mollusc', ) assert actual_response_import == expected_response_import
def test_long_name(): n = make_naming(name='Genie', namespace=['Agrabah', 'Lamp']) assert n.long_name == 'Agrabah Lamp Genie'
def test_cross_file_lro(): # Protobuf annotations for longrunning operations use strings to name types. # As far as the protobuf compiler is concerned they don't reference the # _types_ at all, so the corresponding proto file that owns the types # does not need to be imported. # This creates a potential issue when building rich structures around # LRO returning methods. This test is intended to verify that the issue # is handled correctly. # Set up a prior proto that mimics google/protobuf/empty.proto lro_proto = api.Proto.build(make_file_pb2( name='operations.proto', package='google.longrunning', messages=(make_message_pb2(name='Operation'), ), ), file_to_generate=False, naming=make_naming()) # Set up a method with LRO annotations. method_pb2 = descriptor_pb2.MethodDescriptorProto( name='AsyncDoThing', input_type='google.example.v3.AsyncDoThingRequest', output_type='google.longrunning.Operation', ) method_pb2.options.Extensions[operations_pb2.operation_info].MergeFrom( operations_pb2.OperationInfo( response_type='google.example.v3.AsyncDoThingResponse', metadata_type='google.example.v3.AsyncDoThingMetadata', ), ) # Set up the service with an RPC. service_file = make_file_pb2( name='service_file.proto', package='google.example.v3', messages=(make_message_pb2(name='AsyncDoThingRequest', fields=()), ), services=(descriptor_pb2.ServiceDescriptorProto( name='LongRunningService', method=(method_pb2, ), ), )) # Set up the messages, including the annotated ones. # This file is distinct and is not explicitly imported # into the file that defines the service. messages_file = make_file_pb2( name='messages_file.proto', package='google.example.v3', messages=( make_message_pb2(name='AsyncDoThingResponse', fields=()), make_message_pb2(name='AsyncDoThingMetadata', fields=()), ), ) api_schema = api.API.build( file_descriptors=( service_file, messages_file, ), package='google.example.v3', prior_protos={ 'google/longrunning/operations.proto': lro_proto, }, ) method = (api_schema.all_protos['service_file.proto'].services[ 'google.example.v3.LongRunningService'].methods['AsyncDoThing']) assert method.lro assert method.lro.response_type.name == 'AsyncDoThingResponse' assert method.lro.metadata_type.name == 'AsyncDoThingMetadata'