def _stream_log(self, statement_execution_id: int, log: str, clear_cache: bool = False): """ Persists the log in DB that's over description_length for them to be read from frontend while query is running Arguments: statement_execution_id {int} log {str} -- Incoming new log Keyword Arguments: clear_cache {bool} -- [If true, will push all _log_cache into mysql DB] (default: {False}) """ merged_log = merge_str(self._log_cache, log) created_log = False chunk_size = description_length cache_length = 0 if clear_cache else chunk_size with DBSession() as session: while len(merged_log) > cache_length: size_of_chunk = min(len(merged_log), chunk_size) log_chunk = merged_log[:size_of_chunk] qe_logic.create_statement_execution_stream_log( statement_execution_id, log_chunk, commit=False, session=session) created_log = True merged_log = merged_log[size_of_chunk:] if not self._has_log and created_log: qe_logic.update_statement_execution( statement_execution_id, has_log=True, log_path="stream://", session=session, ) self._has_log = True session.commit() self._log_cache = merged_log
def test_one_empty(self): self.assertEqual(merge_str("", "hello"), "hello") self.assertEqual(merge_str("hello", ""), "hello") self.assertEqual(merge_str("", ""), "")
def test_both_not_empty(self): self.assertEqual(merge_str("hello", "world", " "), "hello world") self.assertEqual(merge_str("left", "right", ","), "left,right")