def process_message(app_type_id, msg): # noqa: C901 """ Process message from Platform-Sources kafka service. Handler for various application/source create and delete events. 'create' events: Issues a Sources REST API call to get additional context for the Platform-Sources kafka event. This information is stored in the Sources database table. 'destroy' events: Enqueues a source delete event which will be processed in the synchronize_sources method. Args: app_type_id - application type identifier msg - kafka message Returns: None """ LOG.info(f"Processing Event: {msg}") msg_data = None try: msg_data = cost_mgmt_msg_filter(msg) except SourceNotFoundError: LOG.warning(f"Source not found in platform sources. Skipping msg: {msg}") return if not msg_data: LOG.debug(f"Message not intended for cost management: {msg}") return if msg_data.get("event_type") in (KAFKA_APPLICATION_CREATE,): storage.create_source_event(msg_data.get("source_id"), msg_data.get("auth_header"), msg_data.get("offset")) if storage.is_known_source(msg_data.get("source_id")): sources_network_info(msg_data.get("source_id"), msg_data.get("auth_header")) elif msg_data.get("event_type") in (KAFKA_AUTHENTICATION_CREATE, KAFKA_AUTHENTICATION_UPDATE): if msg_data.get("event_type") in (KAFKA_AUTHENTICATION_CREATE,): storage.create_source_event( # this will create source _only_ if it does not exist. msg_data.get("source_id"), msg_data.get("auth_header"), msg_data.get("offset") ) save_auth_info(msg_data.get("auth_header"), msg_data.get("source_id")) elif msg_data.get("event_type") in (KAFKA_SOURCE_UPDATE,): if storage.is_known_source(msg_data.get("source_id")) is False: LOG.info("Update event for unknown source id, skipping...") return sources_network_info(msg_data.get("source_id"), msg_data.get("auth_header")) elif msg_data.get("event_type") in (KAFKA_APPLICATION_DESTROY,): storage.enqueue_source_delete(msg_data.get("source_id"), msg_data.get("offset"), allow_out_of_order=True) elif msg_data.get("event_type") in (KAFKA_SOURCE_DESTROY,): storage.enqueue_source_delete(msg_data.get("source_id"), msg_data.get("offset")) if msg_data.get("event_type") in (KAFKA_SOURCE_UPDATE, KAFKA_AUTHENTICATION_UPDATE): storage.enqueue_source_update(msg_data.get("source_id"))
def process(self): """Process the message.""" if self.event_type in (KAFKA_APPLICATION_CREATE,): storage.create_source_event(self.source_id, self.auth_header, self.offset) if storage.is_known_source(self.source_id): if self.event_type in (KAFKA_APPLICATION_CREATE,): self.save_sources_details() self.save_source_info(bill=True) # _Authentication_ messages are responsible for saving credentials. # However, OCP does not send an Auth message. Therefore, we need # to run the following branch for OCP which completes the source # creation cycle for an OCP source. if storage.get_source_type(self.source_id) == Provider.PROVIDER_OCP: self.save_source_info(auth=True) if self.event_type in (KAFKA_APPLICATION_UPDATE,): if storage.get_source_type(self.source_id) == Provider.PROVIDER_AZURE: # Because azure auth is split in Sources backend, we need to check both # auth and billing when we recieve either auth update or app update event updated = self.save_source_info(auth=True, bill=True) else: updated = self.save_source_info(bill=True) if updated: LOG.info(f"[ApplicationMsgProcessor] source_id {self.source_id} updated") storage.enqueue_source_create_or_update(self.source_id) else: LOG.info(f"[ApplicationMsgProcessor] source_id {self.source_id} not updated. No changes detected.") if self.event_type in (KAFKA_APPLICATION_DESTROY,): storage.enqueue_source_delete(self.source_id, self.offset, allow_out_of_order=True)
def process(self): """Process the message.""" if self.event_type in (KAFKA_AUTHENTICATION_CREATE): LOG.debug( f"[AuthenticationMsgProcessor] creating source for source_id: {self.source_id}" ) storage.create_source_event(self.source_id, self.account_number, self.auth_header, self.offset) if storage.is_known_source(self.source_id): if self.event_type in (KAFKA_AUTHENTICATION_CREATE): self.save_source_info(auth=True) if self.event_type in (KAFKA_AUTHENTICATION_UPDATE): if storage.get_source_type( self.source_id) == Provider.PROVIDER_AZURE: # Because azure auth is split in Sources backend, we need to check both # auth and billing when we recieve either auth update or app update event updated = self.save_source_info(auth=True, bill=True) else: updated = self.save_source_info(auth=True) if updated: LOG.info( f"[AuthenticationMsgProcessor] source_id {self.source_id} updated" ) storage.enqueue_source_create_or_update(self.source_id) else: LOG.info( f"[AuthenticationMsgProcessor] source_id {self.source_id} not updated. No changes detected." )
def set_source_status(self, error_msg, cost_management_type_id=None): """Set the source status with error message.""" if storage.is_known_source(self._source_id): storage.clear_update_flag(self._source_id) status_header = self.build_status_header() if not status_header: return False if not cost_management_type_id: cost_management_type_id = self.get_cost_management_application_type_id( ) application_query_url = ( f"{self._base_url}/{ENDPOINT_APPLICATIONS}" f"?filter[application_type_id]={cost_management_type_id}&filter[source_id]={self._source_id}" ) application_query_response = self._get_network_response( application_query_url, "[set_source_status] unable to get application") response_data = (application_query_response.get("data") or [None])[0] if response_data: application_id = response_data.get("id") application_url = f"{self._base_url}/{ENDPOINT_APPLICATIONS}/{application_id}" json_data = self.build_source_status(error_msg) if storage.save_status(self._source_id, json_data): LOG.info( f"[set_source_status] source_id: {self._source_id}: {json_data}" ) application_response = requests.patch(application_url, json=json_data, headers=status_header) error_message = ( f"[set_source_status] error: Status code: " f"{application_response.status_code}. Response: {application_response.text}." ) if application_response.status_code != 204: if application_response.status_code != 404: raise SourcesHTTPClientError(error_message) else: LOG.info(error_message) return True return False
def process(self): """Process the message.""" # We have no `self.event_type in (Source.X,)` statements here because we will only # process Source.update. All non-update events are filtered in `msg_for_cost_mgmt` if not storage.is_known_source(self.source_id): LOG.info( "[SourceMsgProcessor] update event for unknown source_id, skipping..." ) return updated = self.save_sources_details() if storage.get_source_type(self.source_id) == Provider.PROVIDER_OCP: updated |= self.save_source_info(auth=True) if updated: LOG.info( f"[SourceMsgProcessor] source_id {self.source_id} updated") storage.enqueue_source_create_or_update(self.source_id) else: LOG.info( f"[SourceMsgProcessor] source_id {self.source_id} not updated. No changes detected." )
def test_is_known_souce_db_down(self, mock_objects): """Test InterfaceError in is_known_souce.""" mock_objects.get.side_effect = InterfaceError("test_exception") with self.assertRaises(InterfaceError): storage.is_known_source(self.test_source_id)
def test_is_known_source(self): """Tests is_known_source method.""" self.assertTrue(storage.is_known_source(self.test_source_id)) self.assertFalse(storage.is_known_source(self.test_source_id + 1))
async def process_messages(msg_pending_queue): # pragma: no cover """ Process messages from Platform-Sources kafka service. Handler for various application/source create and delete events. 'create' events: Issues a Sources REST API call to get additional context for the Platform-Sources kafka event. This information is stored in the Sources database table. 'destroy' events: Enqueues a source delete event which will be processed in the synchronize_sources method. Args: msg_pending_queue (Asyncio queue): Queue to hold kafka messages to be filtered Returns: None """ LOG.info('Waiting to process incoming kafka messages...') while True: msg_data = await msg_pending_queue.get() LOG.info(f'Processing Event: {str(msg_data)}') try: if msg_data.get('event_type') in (KAFKA_APPLICATION_CREATE, ): storage.create_provider_event(msg_data.get('source_id'), msg_data.get('auth_header'), msg_data.get('offset')) with concurrent.futures.ThreadPoolExecutor() as pool: await EVENT_LOOP.run_in_executor(pool, sources_network_info, msg_data.get('source_id'), msg_data.get('auth_header')) elif msg_data.get('event_type') in (KAFKA_SOURCE_UPDATE, ): with concurrent.futures.ThreadPoolExecutor() as pool: if storage.is_known_source(msg_data.get('source_id')) is False: LOG.info(f'Update event for unknown source id, skipping...') continue await EVENT_LOOP.run_in_executor(pool, sources_network_info, msg_data.get('source_id'), msg_data.get('auth_header')) elif msg_data.get('event_type') in (KAFKA_AUTHENTICATION_CREATE, KAFKA_AUTHENTICATION_UPDATE): with concurrent.futures.ThreadPoolExecutor() as pool: await EVENT_LOOP.run_in_executor(pool, sources_network_auth_info, msg_data.get('resource_id'), msg_data.get('auth_header')) msg_data['source_id'] = storage.get_source_from_endpoint(msg_data.get('resource_id')) elif msg_data.get('event_type') in (KAFKA_APPLICATION_DESTROY, KAFKA_SOURCE_DESTROY): storage.enqueue_source_delete(msg_data.get('source_id')) if msg_data.get('event_type') in (KAFKA_SOURCE_UPDATE, KAFKA_AUTHENTICATION_UPDATE): storage.enqueue_source_update(msg_data.get('source_id')) except Exception as error: # The reason for catching all exceptions is to ensure that the event # loop remains active in the event that message processing fails unexpectedly. source_id = str(msg_data.get('source_id', 'unknown')) LOG.error(f'Source {source_id} Unexpected message processing error: {str(error)}')
async def process_messages(msg_pending_queue): # noqa: C901; pragma: no cover """ Process messages from Platform-Sources kafka service. Handler for various application/source create and delete events. 'create' events: Issues a Sources REST API call to get additional context for the Platform-Sources kafka event. This information is stored in the Sources database table. 'destroy' events: Enqueues a source delete event which will be processed in the synchronize_sources method. Args: msg_pending_queue (Asyncio queue): Queue to hold kafka messages to be filtered Returns: None """ LOG.info("Waiting to process incoming kafka messages...") while True: msg_data = await msg_pending_queue.get() LOG.info(f"Processing Event: {str(msg_data)}") try: if msg_data.get("event_type") in (KAFKA_APPLICATION_CREATE, KAFKA_AUTHENTICATION_CREATE): if msg_data.get("event_type") == KAFKA_AUTHENTICATION_CREATE: sources_network = SourcesHTTPClient( msg_data.get("auth_header")) msg_data[ "source_id"] = sources_network.get_source_id_from_endpoint_id( msg_data.get("resource_id")) storage.create_source_event(msg_data.get("source_id"), msg_data.get("auth_header"), msg_data.get("offset")) with concurrent.futures.ThreadPoolExecutor() as pool: await EVENT_LOOP.run_in_executor( pool, sources_network_info, msg_data.get("source_id"), msg_data.get("auth_header")) elif msg_data.get("event_type") in (KAFKA_SOURCE_UPDATE, ): with concurrent.futures.ThreadPoolExecutor() as pool: if storage.is_known_source( msg_data.get("source_id")) is False: LOG.info( f"Update event for unknown source id, skipping...") continue await EVENT_LOOP.run_in_executor( pool, sources_network_info, msg_data.get("source_id"), msg_data.get("auth_header")) elif msg_data.get("event_type") in (KAFKA_AUTHENTICATION_UPDATE, ): msg_data["source_id"] = storage.get_source_from_endpoint( msg_data.get("resource_id")) with concurrent.futures.ThreadPoolExecutor() as pool: await EVENT_LOOP.run_in_executor( pool, save_auth_info, msg_data.get("auth_header"), msg_data.get("source_id")) elif msg_data.get("event_type") in (KAFKA_APPLICATION_DESTROY, KAFKA_SOURCE_DESTROY): storage.enqueue_source_delete(msg_data.get("source_id")) if msg_data.get("event_type") in (KAFKA_SOURCE_UPDATE, KAFKA_AUTHENTICATION_UPDATE): storage.enqueue_source_update(msg_data.get("source_id")) except (InterfaceError, OperationalError) as error: LOG.error( f"[process_messages] Closing DB connection and re-queueing failed operation." f" Encountered {type(error).__name__}: {error}") connection.close() await asyncio.sleep(Config.RETRY_SECONDS) await msg_pending_queue.put(msg_data) LOG.info( f'Requeued failed operation: {msg_data.get("event_type")} ' f'for Source ID: {str(msg_data.get("source_id"))}.') except Exception as error: # The reason for catching all exceptions is to ensure that the event # loop remains active in the event that message processing fails unexpectedly. source_id = str(msg_data.get("source_id", "unknown")) LOG.error( f"Source {source_id} Unexpected message processing error: {str(error)}", exc_info=True)