def _load_logs_for_day( self, logs_date, performer_id, repository_id, account_id, filter_kinds, after_datetime=None, after_random_id=None, size=PAGE_SIZE, ): index = self._es_client.index_name(logs_date) if not self._es_client.index_exists(index): return [] search = self._base_query(performer_id, repository_id, account_id, filter_kinds, index=index) search = search.sort({"datetime": "desc"}, {"random_id.keyword": "desc"}) search = search.extra(size=size) if after_datetime is not None and after_random_id is not None: after_datetime_epoch_ms = epoch_ms(after_datetime) search = search.extra( search_after=[after_datetime_epoch_ms, after_random_id]) return search.execute()
def send(self, logentry): try: # send() has a (max_block_ms) timeout and get() has a (max_block_ms) timeout # for an upper bound of 2x(max_block_ms) before guaranteed delivery future = self._producer.send(self.topic, logentry.to_dict(), timestamp_ms=epoch_ms( logentry.datetime)) record_metadata = future.get(timeout=self.max_block_ms) assert future.succeeded except KafkaTimeoutError as kte: logger.exception( 'KafkaLogsProducer timeout sending log to Kafka: %s', kte) raise LogSendException( 'KafkaLogsProducer timeout sending log to Kafka: %s' % kte) except KafkaError as ke: logger.exception( 'KafkaLogsProducer error sending log to Kafka: %s', ke) raise LogSendException( 'KafkaLogsProducer error sending log to Kafka: %s' % ke) except Exception as e: logger.exception( 'KafkaLogsProducer exception sending log to Kafka: %s', e) raise LogSendException( 'KafkaLogsProducer exception sending log to Kafka: %s' % e)