def __init__(self, name: str, current_instance: int, nb_replica: int, topic_store: str, serializer: BaseSerializer, local_store: BaseLocalStore, global_store: BaseGlobalStore, bootstrap_server: str, cluster_metadata: ClusterMetadata, cluster_admin: KafkaAdminClient, loop: AbstractEventLoop, rebuild: bool = False, event_sourcing: bool = False) -> None: """ StoreBuilder constructor Args: name: StoreBuilder name current_instance: Current service instance nb_replica: Number of service instance topic_store: Name topic where store event was send serializer: Serializer, this param was sends by tonga local_store: Local store instance Memory / Shelve / RockDB / ... global_store: Global store instance Memory / Shelve / RockDB / ... cluster_metadata: ClusterMetadata from kafka-python go to for more details cluster_admin: KafkaAdminClient from kafka-python go to for more detail loop: Asyncio loop rebuild: If is true store is rebuild from first offset of topic / partition event_sourcing: If is true StateBuilder block instance for write in local & global store, storage will be only updated by handle store function, more details in StorageBuilder. Otherwise instance can only write in own local store, global store is only read only """ self.name = name self._current_instance = current_instance self._nb_replica = nb_replica self._rebuild = rebuild self._event_sourcing = event_sourcing self._bootstrap_server = bootstrap_server self._serializer = serializer self._topic_store = topic_store self._local_store = local_store self._global_store = global_store self._cluster_metadata = cluster_metadata self._cluster_admin = cluster_admin self._logger = logging.getLogger('tonga') self._loop = loop self._store_consumer = KafkaConsumer(name=f'{self.name}_consumer', serializer=self._serializer, bootstrap_servers=self._bootstrap_server, auto_offset_reset='earliest', client_id=f'{self.name}_consumer_{self._current_instance}', topics=[self._topic_store], group_id=f'{self.name}_consumer', loop=self._loop, isolation_level='read_committed', assignors_data={'instance': self._current_instance, 'nb_replica': self._nb_replica, 'assignor_policy': 'all'}, store_builder=self) partitioner = StatefulsetPartitioner(instance=self._current_instance) self._store_producer = KafkaProducer(name=f'{self.name}_producer', bootstrap_servers=self._bootstrap_server, client_id=f'{self.name}_producer_{self._current_instance}', partitioner=partitioner, loop=self._loop, serializer=self._serializer, acks='all') self._stores_partitions = list()
# Creates & registers store builder cash_register_app['store_builder'] = KafkaStoreManager(name=f'cash-register-{cur_instance}-store-builder', client=cash_register_app['kafka_client'], topic_store='cash-register-stores', serializer=cash_register_app['serializer'], local_store=cash_register_app['local_store'], global_store=cash_register_app['global_store'], loop=cash_register_app['loop'], rebuild=True, event_sourcing=False) # Creates & register KafkaProducer transactional_id = f'cash-register-{cash_register_app["kafka_client"].cur_instance}' cash_register_app['transactional_producer'] = KafkaProducer(client=cash_register_app['kafka_client'], serializer=cash_register_app['serializer'], loop=cash_register_app['loop'], partitioner=KeyPartitioner(), acks='all', transactional_id=transactional_id) cash_register_app['transactional_manager'] = transactional_manager cash_register_app['transactional_manager'].set_transactional_producer(cash_register_app['transactional_producer']) # Initializes cash register handlers store_record_handler = StoreRecordHandler(cash_register_app['store_builder']) coffee_ordered_handler = CoffeeOrderedHandler(cash_register_app['store_builder'], cash_register_app['transactional_producer']) coffee_served_handler = CoffeeServedHandler(cash_register_app['store_builder'], cash_register_app['transactional_producer']) # Registers events / handlers in serializer