def commence_cw_log_streaming(stream_name): logger = get_logger(__name__) root_logger = logging.getLogger() if CW_ENABLED is False: logger.warning(f"{module_prefix} - Disabled") return if all((AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, AWS_REGION_NAME, AWS_LOG_GROUP)) is False: logger.error(f"{module_prefix} - Insufficient constant values") return try: boto3_client = boto3.client( 'logs', aws_access_key_id=AWS_ACCESS_KEY_ID, aws_secret_access_key=AWS_SECRET_ACCESS_KEY, region_name=AWS_REGION_NAME ) watchtower_handler = watchtower.CloudWatchLogHandler( boto3_client=boto3_client, log_group=AWS_LOG_GROUP, stream_name=stream_name ) except ClientError as e: logger.exception(f"{module_prefix} - Failed; error: {e}") else: logger.info(f"{module_prefix} - Streaming in progress - Log group: {AWS_LOG_GROUP}") watchtower_handler.setLevel(logging.INFO) watchtower_handler.setFormatter(logging.Formatter(fmt=CW_LOGGING_FORMAT)) root_logger.addHandler(watchtower_handler)
import json from confluent_kafka import Consumer, KafkaException from ros.lib.config import INSIGHTS_KAFKA_ADDRESS, INVENTORY_EVENTS_TOPIC, GROUP_ID, get_logger from ros.lib.app import app, db from ros.lib.models import RhAccount, System from ros.lib.utils import get_or_create from ros.processor.metrics import (processor_requests_success, processor_requests_failures, kafka_failures) LOG = get_logger(__name__) class InventoryEventsConsumer: """Inventory events consumer.""" def __init__(self): """Create a Inventory Events Consumer.""" self.consumer = Consumer({ 'bootstrap.servers': INSIGHTS_KAFKA_ADDRESS, 'group.id': GROUP_ID, 'enable.auto.commit': False }) # Subscribe to topic self.consumer.subscribe([INVENTORY_EVENTS_TOPIC]) self.event_type_map = { 'delete': self.host_delete_event, 'created': self.host_create_update_events, 'updated': self.host_create_update_events } self.prefix = 'INVENTORY EVENTS' self.reporter = 'INVENTORY EVENTS'