def __init__(self, app: AppT, pm_config: PrometheusMonitorConfig = None, **kwargs) -> None: self.app = app if pm_config is None: self.pm_config = PrometheusMonitorConfig else: self.pm_config = pm_config if prometheus_client is None: raise ImproperlyConfigured( 'prometheus_client requires `pip install prometheus_client`.') self._python_gc_metrics() self._metrics = PrometheusMetrics(self.pm_config) self.expose_metrics() super().__init__(**kwargs)
def _create_worker_consumer( self, transport: 'Transport', loop: asyncio.AbstractEventLoop) -> aiokafka.AIOKafkaConsumer: isolation_level: str = 'read_uncommitted' conf = self.app.conf if self.consumer.in_transaction: isolation_level = 'read_committed' self._assignor = self.app.assignor auth_settings = credentials_to_aiokafka_auth(conf.broker_credentials, conf.ssl_context) max_poll_interval = conf.broker_max_poll_interval or 0 request_timeout = conf.broker_request_timeout session_timeout = conf.broker_session_timeout if session_timeout > request_timeout: raise ImproperlyConfigured( f'Setting broker_session_timeout={session_timeout} ' f'cannot be greater than ' f'broker_request_timeout={request_timeout}') return aiokafka.AIOKafkaConsumer( loop=loop, client_id=conf.broker_client_id, group_id=conf.id, bootstrap_servers=server_list(transport.url, transport.default_port), partition_assignment_strategy=[self._assignor], enable_auto_commit=False, auto_offset_reset=conf.consumer_auto_offset_reset, max_poll_records=conf.broker_max_poll_records, max_poll_interval_ms=int(max_poll_interval * 1000.0), max_partition_fetch_bytes=conf.consumer_max_fetch_size, fetch_max_wait_ms=1500, request_timeout_ms=int(request_timeout * 1000.0), check_crcs=conf.broker_check_crcs, session_timeout_ms=int(session_timeout * 1000.0), heartbeat_interval_ms=int(conf.broker_heartbeat_interval * 1000.0), isolation_level=isolation_level, traced_from_parent_span=self.traced_from_parent_span, start_rebalancing_span=self.start_rebalancing_span, start_coordinator_span=self.start_coordinator_span, **auth_settings, )
def __init__(self, fun: AgentFun, *, app: AppT, name: str = None, channel: Union[str, ChannelT] = None, concurrency: int = 1, sink: Iterable[SinkT] = None, on_error: AgentErrorHandler = None, supervisor_strategy: Type[SupervisorStrategyT] = None, help: str = None, key_type: ModelArg = None, value_type: ModelArg = None, isolated_partitions: bool = False, use_reply_headers: bool = None, **kwargs: Any) -> None: self.app = app self.fun: AgentFun = fun self.name = name or canonshortname(self.fun) # key-type/value_type arguments only apply when a channel # is not set if key_type is not None: assert channel is None or isinstance(channel, str) self._key_type = key_type if value_type is not None: assert channel is None or isinstance(channel, str) self._value_type = value_type self._channel_arg = channel self._channel_kwargs = kwargs self.concurrency = concurrency or 1 self.isolated_partitions = isolated_partitions self.help = help or '' self._sinks = list(sink) if sink is not None else [] self._on_error: Optional[AgentErrorHandler] = on_error self.supervisor_strategy = supervisor_strategy self._actors = WeakSet() self._actor_by_partition = WeakValueDictionary() if self.isolated_partitions and self.concurrency > 1: raise ImproperlyConfigured( 'Agent concurrency must be 1 when using isolated partitions') self.use_reply_headers = use_reply_headers Service.__init__(self)
def setup(app: AppT, *, dsn: str = None, workers: int = 4, max_queue_size: int = 1000, loglevel: int = None) -> None: sentry_handler = handler_from_dsn( dsn=dsn, workers=workers, qsize=max_queue_size, loglevel=loglevel, ) if sentry_handler is not None: if sentry_sdk is None or _sdk_aiohttp is None: raise ImproperlyConfigured( 'faust.contrib.sentry requires the `sentry_sdk` library.') sentry_sdk.init( dsn=dsn, integrations=[_sdk_aiohttp.AioHttpIntegration()], ) app.conf.loghandlers.append(sentry_handler)
def setup_prometheus_sensors(app: AppT, pattern: str = "/metrics", registry: CollectorRegistry = REGISTRY) -> None: if prometheus_client is None: raise ImproperlyConfigured( "prometheus_client requires `pip install prometheus_client`.") faust_metrics = FaustMetrics.create(registry) app.monitor = PrometheusMonitor(metrics=faust_metrics) @app.page(pattern) async def metrics_handler(self: _web.View, request: _web.Request) -> _web.Response: headers = {"Content-Type": CONTENT_TYPE_LATEST} return cast( _web.Response, Response(body=generate_latest(REGISTRY), headers=headers, status=200), )
def __init__(self, url: Union[str, URL], app: AppT, table: CollectionT, *, key_index_size: int = None, options: Mapping[str, Any] = None, **kwargs: Any) -> None: if rocksdb is None: raise ImproperlyConfigured( 'RocksDB bindings not installed: pip install python-rocksdb') super().__init__(url, app, table, **kwargs) if not self.url.path: self.url /= self.table_name self.options = options or {} self.rocksdb_options = RocksDBOptions(**self.options) if key_index_size is None: key_index_size = app.conf.table_key_index_size self.key_index_size = key_index_size self._dbs = {} self._key_index = LRUCache(limit=self.key_index_size)
def _prepare_compat_settings(self, options: MutableMapping) -> Mapping: COMPAT_OPTIONS = { 'client_id': 'broker_client_id', 'commit_interval': 'broker_commit_interval', 'create_reply_topic': 'reply_create_topic', 'num_standby_replicas': 'table_standby_replicas', 'default_partitions': 'topic_partitions', 'replication_factor': 'topic_replication_factor', } for old, new in COMPAT_OPTIONS.items(): val = options.get(new) try: options[new] = options[old] except KeyError: pass else: if val is not None: raise ImproperlyConfigured( f'Cannot use both compat option {old!r} and {new!r}') warnings.warn( FutureWarning(W_OPTION_DEPRECATED.format(old=old, new=new))) return options
def credentials_to_aiokafka_auth(credentials: CredentialsT = None, ssl_context: Any = None) -> Mapping: if credentials is not None: if isinstance(credentials, SSLCredentials): return { 'security_protocol': credentials.protocol.value, 'ssl_context': credentials.context, } elif isinstance(credentials, SASLCredentials): return { 'security_protocol': credentials.protocol.value, 'sasl_mechanism': credentials.mechanism.value, 'sasl_plain_username': credentials.username, 'sasl_plain_password': credentials.password, 'ssl_context': credentials.ssl_context, } elif isinstance(credentials, GSSAPICredentials): return { 'security_protocol': credentials.protocol.value, 'sasl_mechanism': credentials.mechanism.value, 'sasl_kerberos_service_name': credentials.kerberos_service_name, 'sasl_kerberos_domain_name': credentials.kerberos_domain_name, 'ssl_context': credentials.ssl_context, } else: raise ImproperlyConfigured( f'aiokafka does not support {credentials}') elif ssl_context is not None: return { 'security_protocol': 'SSL', 'ssl_context': ssl_context, } else: return {'security_protocol': 'PLAINTEXT'}
def version(self, version: int) -> None: if not version: raise ImproperlyConfigured( f'Version cannot be {version}, please start at 1') self._version = version
def _out_of_range(self, value: float) -> ImproperlyConfigured: return ImproperlyConfigured( f"Value {value} is out of range for {self.class_name} " f"(min={self.min_value} max={self.max_value})")
def _dumps(self, s: Any) -> bytes: if _yaml is None: raise ImproperlyConfigured('Missing yaml: pip install PyYAML') return want_bytes(_yaml.safe_dump(s))
def _loads(self, s: bytes) -> Any: if _yaml is None: raise ImproperlyConfigured('Missing yaml: pip install PyYAML') return _yaml.safe_load(want_str(s))
async def on_start(self) -> None: if aredis is None: raise ImproperlyConfigured( 'Redis cache backend requires `pip install aredis`') await self.connect()
async def on_start(self) -> None: """Call when Redis backend starts.""" if aredis is None: raise ImproperlyConfigured( "Redis cache backend requires `pip install aredis`") await self.connect()