def run_query( connection: ClickhousePool, query: str, records_count: int, metrics: MetricsBackend, ) -> None: connection.execute_robust(query) queries[connection.host].append(query)
def test_robust_concurrency_limit() -> None: connection = mock.Mock() connection.execute.side_effect = ClickhouseError("some error", extra_data={"code": 1}) pool = ClickhousePool("host", 100, "test", "test", "test") pool.pool = queue.LifoQueue(1) pool.pool.put(connection, block=False) with pytest.raises(ClickhouseError): pool.execute_robust("SELECT something") connection.execute.assert_called_once() connection.reset_mock(side_effect=True) connection.execute.side_effect = ClickhouseError( "some error", code=errors.ErrorCodes.TOO_MANY_SIMULTANEOUS_QUERIES, ) with pytest.raises(ClickhouseError): pool.execute_robust("SELECT something") assert connection.execute.call_count == 3, "Expected three attempts"
def run_query( connection: ClickhousePool, query: str, records_count: int, metrics: MetricsBackend, ) -> None: t = time.time() logger.debug("Executing replace query: %s" % query) connection.execute_robust(query) duration = int((time.time() - t) * 1000) logger.info("Replacing %s rows took %sms" % (records_count, duration)) metrics.timing( "replacements.count", records_count, tags={"host": connection.host}, ) metrics.timing( "replacements.duration", duration, tags={"host": connection.host}, )