def test_create_provider_event_invalid_auth_header(self): """Tests creating a source db record with invalid auth_header.""" test_source_id = 2 test_offset = 3 storage.create_provider_event(test_source_id, 'bad', test_offset) with self.assertRaises(Sources.DoesNotExist): Sources.objects.get(source_id=test_source_id)
async def process_messages(msg_pending_queue): # pragma: no cover """ Process messages from Platform-Sources kafka service. Handler for various application/source create and delete events. 'create' events: Issues a Sources REST API call to get additional context for the Platform-Sources kafka event. This information is stored in the Sources database table. 'destroy' events: Enqueues a source delete event which will be processed in the synchronize_sources method. Args: msg_pending_queue (Asyncio queue): Queue to hold kafka messages to be filtered Returns: None """ LOG.info('Waiting to process incoming kafka messages...') while True: msg_data = await msg_pending_queue.get() LOG.info(f'Processing Event: {str(msg_data)}') try: if msg_data.get('event_type') in (KAFKA_APPLICATION_CREATE, KAFKA_SOURCE_UPDATE): storage.create_provider_event(msg_data.get('source_id'), msg_data.get('auth_header'), msg_data.get('offset')) with concurrent.futures.ThreadPoolExecutor() as pool: await EVENT_LOOP.run_in_executor( pool, sources_network_info, msg_data.get('source_id'), msg_data.get('auth_header')) elif msg_data.get('event_type') in (KAFKA_AUTHENTICATION_CREATE, KAFKA_AUTHENTICATION_UPDATE): with concurrent.futures.ThreadPoolExecutor() as pool: await EVENT_LOOP.run_in_executor( pool, sources_network_auth_info, msg_data.get('resource_id'), msg_data.get('auth_header')) msg_data['source_id'] = storage.get_source_from_endpoint( msg_data.get('resource_id')) elif msg_data.get('event_type') in (KAFKA_APPLICATION_DESTROY, KAFKA_SOURCE_DESTROY): storage.enqueue_source_delete(msg_data.get('source_id')) if msg_data.get('event_type') in (KAFKA_SOURCE_UPDATE, KAFKA_AUTHENTICATION_UPDATE): storage.enqueue_source_update(msg_data.get('source_id')) except Exception as error: # The reason for catching all exceptions is to ensure that the event # loop remains active in the event that message processing fails unexpectedly. source_id = str(msg_data.get('source_id', 'unknown')) LOG.error( f'Source {source_id} Unexpected message processing error: {str(error)}' )
def test_create_provider_event(self): """Tests that a source can be created.""" test_source_id = 2 test_offset = 3 storage.create_provider_event(test_source_id, Config.SOURCES_FAKE_HEADER, test_offset) db_obj = Sources.objects.get(source_id=test_source_id) self.assertEqual(db_obj.source_id, test_source_id) self.assertEqual(db_obj.auth_header, Config.SOURCES_FAKE_HEADER) self.assertEqual(db_obj.offset, test_offset)
async def process_messages(msg_pending_queue, in_progress_queue): # pragma: no cover """ Process messages from Platform-Sources kafka service. Handler for various application/source create and delete events. 'create' events: Issues a Sources REST API call to get additional context for the Platform-Sources kafka event. This information is stored in the Sources database table. 'destroy' events: Enqueues a source delete event which will be processed in the synchronize_sources method. Args: msg_pending_queue (Asyncio queue): Queue to hold kafka messages to be filtered in_progress_queue (Asyncio queue): Queue for filtered cost management messages awaiting Koku-Provider synchronization. application_source_id (Integer): Cost Management's current Application Source ID. Used for kafka message filtering. Returns: None """ LOG.info('Waiting to process incoming kafka messages...') while True: msg_data = await msg_pending_queue.get() LOG.info(f'Processing Event: {str(msg_data)}') if msg_data.get('event_type') == KAFKA_APPLICATION_CREATE: storage.create_provider_event(msg_data.get('source_id'), msg_data.get('auth_header'), msg_data.get('offset')) with concurrent.futures.ThreadPoolExecutor() as pool: await EVENT_LOOP.run_in_executor(pool, sources_network_info, msg_data.get('source_id'), msg_data.get('auth_header')) elif msg_data.get('event_type') == KAFKA_AUTHENTICATION_CREATE: with concurrent.futures.ThreadPoolExecutor() as pool: await EVENT_LOOP.run_in_executor(pool, sources_network_auth_info, msg_data.get('resource_id'), msg_data.get('auth_header')) elif msg_data.get('event_type') in (KAFKA_APPLICATION_DESTROY, KAFKA_SOURCE_DESTROY): await storage.enqueue_source_delete(in_progress_queue, msg_data.get('source_id'))