示例#1
0
    def test_get_pending_requests_retrieves_empty_list_of_requests_when_no_pending_requests_in_db(
            self):
        # Given
        consumer = RequestDBConsumer(
            "someRequest",
            request_processing_limit=REQUEST_PROCESSING_LIMIT,
        )

        # When
        reqs = consumer.get_pending_requests(self.session)

        # Then
        self.assertEqual(0, len(list(reqs.values())[0]))
示例#2
0
    def test_get_pending_requests_retrieves_pending_requests_only(self):
        # Given
        consumer = RequestDBConsumer(
            "someRequest",
            request_processing_limit=REQUEST_PROCESSING_LIMIT,
        )

        self._prepare_two_pending_and_one_processed_request()

        # When
        reqs = consumer.get_pending_requests(self.session)

        # Then
        self.assertEqual(2, len(list(reqs.values())[0]))
示例#3
0
    def test_different_processes_dont_pick_up_each_others_requests(
            self, max_batch_size, req_count_1, req_count_2):
        """
        This is a test for horizontal scaling functionality of the Configuration Controller.
        It tests if two processes (in this case associated with different Session instances) only pick those requests
        that have no lock on them.
        """
        # Given
        config = self.get_config()
        config.REQUEST_PROCESSING_LIMIT = max_batch_size
        session1 = Session(bind=self.engine)
        session2 = Session(bind=self.engine)

        consumer = RequestDBConsumer(
            "someRequest",
            request_processing_limit=config.REQUEST_PROCESSING_LIMIT,
        )
        self._prepare_two_pending_and_one_processed_request()

        # When
        reqs1 = consumer.get_pending_requests(session1)
        reqs2 = consumer.get_pending_requests(session2)

        reqs1_list = list(reqs1.values())[0]
        reqs2_list = list(reqs2.values())[0]

        session1.commit()
        session2.commit()

        # Then
        self.assertEqual(req_count_1, len(reqs1_list))
        self.assertEqual(req_count_2, len(reqs2_list))
        if reqs1_list and reqs2_list:
            # Making sure we're not getting the same requests in both sessions
            self.assertNotEqual(reqs1_list[0].cbsd_id, reqs2_list[0].cbsd_id)

        session1.close()
        session2.close()
示例#4
0
def run():
    """
    Top-level function for configuration controller
    """
    config = get_config()
    scheduler = BackgroundScheduler()
    db_engine = create_engine(
        url=config.SQLALCHEMY_DB_URI,
        encoding=config.SQLALCHEMY_DB_ENCODING,
        echo=config.SQLALCHEMY_ECHO,
        future=config.SQLALCHEMY_FUTURE,
        pool_size=config.SQLALCHEMY_ENGINE_POOL_SIZE,
        max_overflow=config.SQLALCHEMY_ENGINE_MAX_OVERFLOW,
    )
    session_manager = SessionManager(db_engine=db_engine)
    router = RequestRouter(
        sas_url=config.SAS_URL,
        rc_ingest_url=config.RC_INGEST_URL,
        cert_path=config.CC_CERT_PATH,
        ssl_key_path=config.CC_SSL_KEY_PATH,
        request_mapping=request_mapping,
        ssl_verify=config.SAS_CERT_PATH,
    )
    fluentd_client = FluentdClient()
    for request_type in RequestTypes:
        req_type = request_type.value
        response_type = request_response[req_type]
        consumer = RequestDBConsumer(
            request_type=req_type,
            request_processing_limit=config.REQUEST_PROCESSING_LIMIT,
        )
        processor = ResponseDBProcessor(
            response_type=response_type,
            process_responses_func=processor_strategies[req_type]["process_responses"],
            fluentd_client=fluentd_client,
        )

        scheduler.add_job(
            process_requests,
            args=[consumer, processor, router, session_manager, fluentd_client],
            trigger=IntervalTrigger(
                seconds=config.REQUEST_PROCESSING_INTERVAL_SEC,
            ),
            max_instances=1,
            name=f"{req_type}_job",
        )
    scheduler.start()

    while True:
        time.sleep(1)
示例#5
0
def process_requests(
        consumer: RequestDBConsumer,
        processor: ResponseDBProcessor,
        router: RequestRouter,
        session_manager: SessionManager,
        fluentd_client: FluentdClient,
) -> Optional[requests.Response]:
    """
    Process SAS requests
    """

    with session_manager.session_scope() as session:
        requests_map = consumer.get_pending_requests(session)
        requests_type = next(iter(requests_map))
        requests_list = requests_map[requests_type]

        if not requests_list:
            logger.debug(f"Received no {requests_type} requests.")
            return None

        no_of_requests = len(requests_list)
        logger.info(
            f'Processing {no_of_requests} {requests_type} requests',
        )
        bulked_sas_requests = merge_requests(requests_map)

        _log_requests_map(requests_map, fluentd_client)
        try:
            sas_response = router.post_to_sas(bulked_sas_requests)
            logger.info(
                f"Sent {bulked_sas_requests} to SAS and got the following response: {sas_response.content}",
            )
        except RequestRouterError as e:
            logging.error(f"Error posting request to SAS: {e}")
            return None

        logger.info(f"About to process responses {sas_response=}")
        processor.process_response(requests_list, sas_response, session)

        session.commit()

        return sas_response