class ClientSettings(SOASettings): """ Base settings class for all clients, whose `middleware` values are restricted to subclasses of `ClientMiddleware` and whose `transport` values are restricted to subclasses of `BaseClientTransport`. Middleware and transport configuration settings schemas will automatically switch based on the configuration settings schema for the `path` for each. """ schema = { 'middleware': fields.List( fields.ClassConfigurationSchema(base_class=ClientMiddleware), description= 'The list of all `ClientMiddleware` objects that should be applied to requests made from this ' 'client to the associated service', ), 'transport': fields.ClassConfigurationSchema(base_class=BaseClientTransport), } # type: SettingsSchema defaults = { 'transport': { 'path': 'pysoa.common.transport.redis_gateway.client:RedisClientTransport', }, } # type: SettingsData
def test_prefix_with_config(self): field = fields.ClassConfigurationSchema(base_class=MetricsRecorder) config = { 'path': 'pymetrics.recorders.default.DefaultMetricsRecorder', 'kwargs': { 'prefix': 'goodbye.mars', 'config': { 'version': 2, 'publishers': [{ 'path': 'pymetrics.publishers.null.NullPublisher' }], }, }, } # type: Dict[six.text_type, Any] assert not field.errors(config) recorder = config['object'](**config['kwargs']) assert isinstance(recorder, DefaultMetricsRecorder) assert recorder.prefix == 'goodbye.mars' assert recorder._configuration is not None assert len(recorder._configuration.publishers) == 1 assert isinstance(recorder._configuration.publishers[0], NullPublisher)
class SettingsToTest(settings.Settings): schema: settings.SettingsSchema = { 'one': fields.Dictionary({ 'a': fields.ClassConfigurationSchema(base_class=ClassUsingAttrs27HintsToTest, description='Nifty schema.'), 'b': fields.PythonPath(value_schema=fields.UnicodeString(), description='Must be a path, yo.'), 'c': fields.TypeReference(base_classes=ClassHoldingSigsToTest, description='Refer to that thing!'), }), 'two': fields.SchemalessDictionary(key_type=fields.UnicodeString(), value_type=fields.Boolean()), 'three': fields.List(fields.Integer()), 'four': fields.Nullable(fields.Set(fields.ByteString())), 'five': fields.Any(fields.Integer(), fields.Float()), 'six': fields.ObjectInstance(valid_type=ClassUsingAttrs27HintsToTest, description='Y u no instance?'), 'seven': fields.Polymorph( 'thing', { 'thing1': fields.Dictionary({'z': fields.Boolean()}, allow_extra_keys=True), 'thing2': fields.Dictionary({'y': fields.Boolean()}, allow_extra_keys=True, optional_keys=('y', )), }, ), } defaults: settings.SettingsData = { 'one': { 'b': 'foo.bar:Class', }, 'three': [1, 5, 7], }
class LocalServerSettings(ServerSettings): defaults = { 'transport': { 'path': 'pysoa.common.transport.local:LocalServerTransport', } } schema = { 'transport': fields.ClassConfigurationSchema(base_class=LocalServerTransport), }
class RedisServerSettings(ServerSettings): defaults = { 'transport': { 'path': 'pysoa.common.transport.redis_gateway.server:RedisServerTransport', } } schema = { 'transport': fields.ClassConfigurationSchema(base_class=RedisServerTransport), }
class Http2TransportSchema(fields.Dictionary): contents = { 'backend_layer_kwargs': fields.Dictionary( { 'http_host': fields.UnicodeString(), 'http_port': fields.UnicodeString(), }, optional_keys=(), allow_extra_keys=False, ), 'backend_type': fields.Constant( *HTTP2_BACKEND_TYPES, description= 'Which backend (hyper-h2 or twisted) should be used for this Http2 transport' ), 'message_expiry_in_seconds': fields.Integer( description= 'How long after a message is sent that it is considered expired, dropped from queue', ), 'queue_capacity': fields.Integer( description= 'The capacity of the message queue to which this transport will send messages', ), 'queue_full_retries': fields.Integer( description= 'How many times to retry sending a message to a full queue before giving up', ), 'receive_timeout_in_seconds': fields.Integer( description='How long to block waiting on a message to be received', ), 'default_serializer_config': fields.ClassConfigurationSchema( base_class=BaseSerializer, description= 'The configuration for the serializer this transport should use.', ), } optional_keys = ( 'backend_layer_kwargs', 'message_expiry_in_seconds', 'queue_capacity', 'queue_full_retries', 'receive_timeout_in_seconds', 'default_serializer_config', ) description = 'The constructor kwargs for the Http2 client and server transports.'
class SOASettings(ConformitySettings): """ Settings shared between client and server. """ schema = { # Paths to the classes to use and then kwargs to pass 'transport': fields.ClassConfigurationSchema(), 'middleware': fields.List( fields.ClassConfigurationSchema(), description='The list of all middleware objects that should be applied to this server or client', ), 'metrics': fields.ClassConfigurationSchema( base_class=MetricsRecorder, description='Configuration for defining a usage and performance metrics recorder.', ), } # type: SettingsSchema defaults = { 'middleware': [], 'metrics': {'path': 'pymetrics.recorders.noop:NonOperationalMetricsRecorder'}, } # type: SettingsData
def test_prefix_no_config(self): field = fields.ClassConfigurationSchema(base_class=MetricsRecorder) config = { 'path': 'pymetrics.recorders.default.DefaultMetricsRecorder', 'kwargs': { 'prefix': 'hello.world', }, } # type: Dict[six.text_type, Any] assert not field.errors(config) recorder = config['object'](**config['kwargs']) assert isinstance(recorder, DefaultMetricsRecorder) assert recorder.prefix == 'hello.world' assert recorder._configuration is None
class SOASettings(Settings): """ Settings shared between client and server. """ schema = { # Paths to the classes to use and then kwargs to pass 'transport': fields.ClassConfigurationSchema(), 'middleware': fields.List( fields.ClassConfigurationSchema(), description= 'The list of all middleware objects that should be applied to this server or client', ), 'metrics': MetricsSchema(), } defaults = { 'middleware': [], 'metrics': { 'path': 'pysoa.common.metrics:NoOpMetricsRecorder' }, }
class RedisTransportSchema(fields.Dictionary): contents = { 'backend_layer_kwargs': fields.Dictionary( { 'connection_kwargs': fields.SchemalessDictionary( description='The arguments used when creating all Redis connections (see Redis-Py docs)', ), 'hosts': fields.List( fields.Any( fields.Tuple(fields.UnicodeString(), fields.Integer()), fields.UnicodeString(), ), description='The list of Redis hosts, where each is a tuple of `("address", port)` or the ' 'simple string address.', ), 'redis_db': fields.Integer( description='The Redis database, a shortcut for putting this in `connection_kwargs`.', ), 'redis_port': fields.Integer( description='The port number, a shortcut for putting this on all hosts', ), 'sentinel_failover_retries': fields.Integer( description='How many times to retry (with a delay) getting a connection from the Sentinel ' 'when a master cannot be found (cluster is in the middle of a failover); ' 'should only be used for Sentinel backend type' ), 'sentinel_kwargs': fields.SchemalessDictionary( description='The arguments used when creating all Sentinel connections (see Redis-Py docs); ' 'should only be used for Sentinel backend type; similar to `connection_kwargs`, but ' 'you may need to specify both (one for Sentinel connections, one for Redis ' 'connections)', ), 'sentinel_services': fields.List( fields.UnicodeString(), description='A list of Sentinel services (will be discovered by default); should only be ' 'used for Sentinel backend type', ), }, optional_keys=( 'connection_kwargs', 'hosts', 'redis_db', 'redis_port', 'sentinel_failover_retries', 'sentinel_kwargs', 'sentinel_services', ), allow_extra_keys=False, description='The arguments passed to the Redis connection manager', ), 'backend_type': fields.Constant( *REDIS_BACKEND_TYPES, description='Which backend (standard or sentinel) should be used for this Redis transport' ), 'log_messages_larger_than_bytes': fields.Integer( description='By default, messages larger than 100KB that do not trigger errors (see ' '`maximum_message_size_in_bytes`) will be logged with level WARNING to a logger named ' '`pysoa.transport.oversized_message`. To disable this behavior, set this setting to ' '0. Or, you can set it to some other number to change the threshold that triggers ' 'logging.', ), 'maximum_message_size_in_bytes': fields.Integer( description='The maximum message size, in bytes, that is permitted to be transmitted over this ' 'transport (defaults to 100KB on the client and 250KB on the server)', ), 'message_expiry_in_seconds': fields.Integer( description='How long after a message is sent that it is considered expired, dropped from queue', ), 'queue_capacity': fields.Integer( description='The capacity of the message queue to which this transport will send messages', ), 'queue_full_retries': fields.Integer( description='How many times to retry sending a message to a full queue before giving up', ), 'receive_timeout_in_seconds': fields.Integer( description='How long to block waiting on a message to be received', ), 'default_serializer_config': fields.ClassConfigurationSchema( base_class=BaseSerializer, description='The configuration for the serializer this transport should use.', ), } optional_keys = ( 'backend_layer_kwargs', 'log_messages_larger_than_bytes', 'maximum_message_size_in_bytes', 'message_expiry_in_seconds', 'queue_capacity', 'queue_full_retries', 'receive_timeout_in_seconds', 'default_serializer_config', ) description = 'The constructor kwargs for the Redis client and server transports.'
self.loop.call_soon_threadsafe(self._loop_stop_callback) self._done.wait() else: self._logger.warning('Async event loop is already not running!') # noinspection PyCompatibility super().join(timeout) def run_coroutine( self, coroutine): # type: (Coroutine) -> concurrent.futures.Future for middleware_obj in self._coroutine_middleware: middleware_obj.before_run_coroutine() for middleware_obj in reversed(self._coroutine_middleware): coroutine = middleware_obj.coroutine(coroutine) return asyncio.run_coroutine_threadsafe(coroutine, self.loop) coroutine_middleware_config = fields.List( fields.ClassConfigurationSchema(base_class=CoroutineMiddleware), description= 'The list of all `CoroutineMiddleware` classes that should be constructed and applied to ' '`request.run_coroutine` calls processed by this server. By default, ' '`pysoa.server.coroutine:DefaultCoroutineMiddleware` will be configured first. You can change and/or ' 'add to this, but we recommend that you always configure `DefaultCoroutineMiddleware` as the first ' 'middleware.', ) coroutine_middleware_config.contents.initiate_cache_for( 'pysoa.server.coroutine:DefaultCoroutineMiddleware')
'enable_meta_metrics': fields.Boolean( description= 'If true, meta-metrics will be recorded documenting the performance of ' 'PyMetrics itself.', ), 'error_logger_name': fields.UnicodeString( description= 'By default, errors encountered when publishing metrics are suppressed and lost. If ' 'this value is truthy, a Logger is created with this name and used to log publication ' 'errors.', ), 'publishers': fields.Sequence( fields.ClassConfigurationSchema( base_class=MetricsPublisher, description= 'Import path and arguments for a publisher.', ), min_length=1, description='The configuration for all publishers.', ), }, optional_keys=('enable_meta_metrics', 'error_logger_name'), ), }, description= 'The configuration schema changes slightly based on which config version you specify.', ) """""" # Empty docstring to make autodoc document this data
class ServerSettings(SOASettings): """ Base settings class for all servers, whose `middleware` values are restricted to subclasses of `ServerMiddleware` and whose `transport` values are restricted to subclasses of `BaseServerTransport`. Middleware and transport configuration settings schemas will automatically switch based on the configuration settings schema for the `path` for each. """ schema = { 'transport': fields.ClassConfigurationSchema(base_class=BaseServerTransport), 'middleware': fields.List( fields.ClassConfigurationSchema(base_class=ServerMiddleware), description= 'The list of all `ServerMiddleware` objects that should be applied to requests processed by ' 'this server', ), 'client_routing': fields.SchemalessDictionary( key_type=fields.UnicodeString(), value_type=fields.SchemalessDictionary(), description= 'Client settings for sending requests to other services; keys should be service names, and ' 'values should be the corresponding configuration dicts, which will be validated using the ' 'ClientSettings schema.', ), 'logging': fields.Dictionary( { 'version': fields.Integer(gte=1, lte=1), 'formatters': fields.SchemalessDictionary( key_type=fields.UnicodeString(), value_type=fields.Dictionary( { 'format': fields.UnicodeString(), 'datefmt': fields.UnicodeString(), }, optional_keys=('datefmt', ), ), ), 'filters': fields.SchemalessDictionary( key_type=fields.UnicodeString(), value_type=fields.Dictionary( { '()': fields.Anything( description='The optional filter class'), 'name': fields.UnicodeString( description='The optional filter name'), }, optional_keys=('()', 'name'), ), ), 'handlers': fields.SchemalessDictionary( key_type=fields.UnicodeString(), value_type=fields.Dictionary( { 'class': fields.UnicodeString(), 'level': fields.UnicodeString(), 'formatter': fields.UnicodeString(), 'filters': fields.List(fields.UnicodeString()), }, optional_keys=('level', 'formatter', 'filters'), allow_extra_keys=True, ), ), 'loggers': fields.SchemalessDictionary( key_type=fields.UnicodeString(), value_type=_logger_schema, ), 'root': _logger_schema, 'incremental': fields.Boolean(), 'disable_existing_loggers': fields.Boolean(), }, optional_keys=( 'version', 'formatters', 'filters', 'handlers', 'root', 'loggers', 'incremental', ), description= 'Settings for service logging, which should follow the standard Python logging configuration', ), 'harakiri': fields.Dictionary( { 'timeout': fields.Integer( gte=0, description= 'Seconds of inactivity before harakiri is triggered; 0 to disable, defaults to 300', ), 'shutdown_grace': fields.Integer( gt=0, description= 'Seconds to forcefully shutdown after harakiri is triggered if shutdown does not occur', ), }, description= 'Instructions for automatically terminating a server process when request processing takes ' 'longer than expected.', ), 'request_log_success_level': log_level_schema( description= 'The logging level at which full request and response contents will be logged for successful ' 'requests', ), 'request_log_error_level': log_level_schema( description= 'The logging level at which full request and response contents will be logged for requests ' 'whose responses contain errors (setting this to a more severe level than ' '`request_log_success_level` will allow you to easily filter for unsuccessful requests)', ), 'heartbeat_file': fields.Nullable( fields.UnicodeString( description= 'If specified, the server will create a heartbeat file at the specified path on startup, ' 'update the timestamp in that file after the processing of every request or every time ' 'idle operations are processed, and delete the file when the server shuts down. The file name ' 'can optionally contain the specifier {{pid}}, which will be replaced with the server process ' 'PID. Finally, the file name can optionally contain the specifier {{fid}}, which will be ' 'replaced with the unique-and-deterministic forked process ID whenever the server is started ' 'with the --fork option (the minimum value is always 1 and the maximum value is always equal ' 'to the value of the --fork option).', )), 'extra_fields_to_redact': fields.Set( fields.UnicodeString(), description= 'Use this field to supplement the set of fields that are automatically redacted/censored in ' 'request and response fields with additional fields that your service needs redacted.', ), } # type: Dict[six.text_type, fields.Base] defaults = { 'client_routing': {}, 'logging': { 'version': 1, 'formatters': { 'console': { 'format': '%(asctime)s %(levelname)7s %(correlation_id)s %(request_id)s: %(message)s' }, 'syslog': { 'format': ('%(service_name)s_service: %(name)s %(levelname)s %(module)s %(process)d ' 'correlation_id %(correlation_id)s request_id %(request_id)s %(message)s' ), }, }, 'filters': { 'pysoa_logging_context_filter': { '()': 'pysoa.common.logging.PySOALogContextFilter', }, }, 'handlers': { 'console': { 'level': 'INFO', 'class': 'logging.StreamHandler', 'formatter': 'console', 'filters': ['pysoa_logging_context_filter'], }, 'syslog': { 'level': 'INFO', 'class': 'pysoa.common.logging.SyslogHandler', 'facility': SyslogHandler.LOG_LOCAL7, 'address': ('localhost', 514), 'formatter': 'syslog', 'filters': ['pysoa_logging_context_filter'], }, }, 'loggers': {}, 'root': { 'handlers': ['console'], 'level': 'INFO', }, 'disable_existing_loggers': False, }, 'harakiri': { 'timeout': 300, 'shutdown_grace': 30, }, 'request_log_success_level': 'INFO', 'request_log_error_level': 'INFO', 'heartbeat_file': None, 'extra_fields_to_redact': set(), 'transport': { 'path': 'pysoa.common.transport.redis_gateway.server:RedisServerTransport', } } # type: Dict[six.text_type, Any]