def on_cancel(self): utcnow = datetime.datetime.utcnow() if len(self.statement_execution_ids) > 0: statement_execution_id = self.statement_execution_ids[-1] upload_path, has_log = self._upload_log(statement_execution_id) qe_logic.update_statement_execution( statement_execution_id, status=StatementExecutionStatus.CANCEL, completed_at=utcnow, has_log=self._has_log, log_path=upload_path if has_log else None, ) with DBSession() as session: query_execution = qe_logic.update_query_execution( self._query_execution_id, status=QueryExecutionStatus.CANCEL, completed_at=utcnow, session=session, ).to_dict() socketio.emit( "query_cancel", query_execution, namespace=QUERY_EXECUTION_NAMESPACE, room=self._query_execution_id, )
def on_statement_update( self, log: str = "", meta_info: str = None, percent_complete=None, ): statement_execution_id = self.statement_execution_ids[-1] updated_meta_info = False if meta_info is not None and self._meta_info != meta_info: self._meta_info = meta_info qe_logic.update_statement_execution( statement_execution_id, meta_info=meta_info ) updated_meta_info = True has_log = len(log) if has_log: self._stream_log(statement_execution_id, log) percent_complete_change = ( percent_complete is not None and self._percent_complete != percent_complete ) if percent_complete_change: self._percent_complete = percent_complete if updated_meta_info or has_log or percent_complete_change: statement_update_dict = { "query_execution_id": self._query_execution_id, "id": statement_execution_id, } if updated_meta_info: statement_update_dict["meta_info"] = meta_info if has_log: statement_update_dict["log"] = [log] if percent_complete_change: statement_update_dict["percent_complete"] = percent_complete self._statement_progress = { statement_execution_id: { "percent_complete": percent_complete, } } self.update_progress() socketio.emit( "statement_update", statement_update_dict, namespace=QUERY_EXECUTION_NAMESPACE, room=self._query_execution_id, )
def _stream_log(self, statement_execution_id: int, log: str, clear_cache: bool = False): """ Persists the log in DB that's over description_length for them to be read from frontend while query is running Arguments: statement_execution_id {int} log {str} -- Incoming new log Keyword Arguments: clear_cache {bool} -- [If true, will push all _log_cache into mysql DB] (default: {False}) """ merged_log = merge_str(self._log_cache, log) created_log = False chunk_size = description_length cache_length = 0 if clear_cache else chunk_size with DBSession() as session: while len(merged_log) > cache_length: size_of_chunk = min(len(merged_log), chunk_size) log_chunk = merged_log[:size_of_chunk] qe_logic.create_statement_execution_stream_log( statement_execution_id, log_chunk, commit=False, session=session) created_log = True merged_log = merged_log[size_of_chunk:] if not self._has_log and created_log: qe_logic.update_statement_execution( statement_execution_id, has_log=True, log_path="stream://", session=session, ) self._has_log = True session.commit() self._log_cache = merged_log
def on_exception(self, error_type: int, error_str: str, error_extracted: str): utcnow = datetime.datetime.utcnow() error_extracted = ( error_extracted[:5000] if error_extracted is not None and len(error_extracted) > 5000 else error_extracted ) with DBSession() as session: if len(self.statement_execution_ids) > 0: statement_execution_id = self.statement_execution_ids[-1] upload_path, has_log = self._upload_log(statement_execution_id) qe_logic.update_statement_execution( statement_execution_id, status=StatementExecutionStatus.ERROR, completed_at=utcnow, has_log=self._has_log, log_path=upload_path if has_log else None, session=session, ) qe_logic.create_query_execution_error( self._query_execution_id, error_type=error_type, error_message_extracted=error_extracted, error_message=error_str, session=session, ) query_execution = qe_logic.update_query_execution( self._query_execution_id, status=QueryExecutionStatus.ERROR, completed_at=utcnow, session=session, ).to_dict() socketio.emit( "query_exception", query_execution, namespace=QUERY_EXECUTION_NAMESPACE, room=self._query_execution_id, )
def on_statement_end(self, cursor): statement_execution_id = self.statement_execution_ids[-1] qe_logic.update_statement_execution( statement_execution_id, status=StatementExecutionStatus.UPLOADING, ) socketio.emit( "statement_update", { "query_execution_id": self._query_execution_id, "id": statement_execution_id, "status": StatementExecutionStatus.UPLOADING.value, }, namespace=QUERY_EXECUTION_NAMESPACE, room=self._query_execution_id, ) result_path, result_row_count = self._upload_query_result( cursor, statement_execution_id ) upload_path, has_log = self._upload_log(statement_execution_id) statement_execution = qe_logic.update_statement_execution( statement_execution_id, status=StatementExecutionStatus.DONE, completed_at=datetime.datetime.utcnow(), result_row_count=result_row_count, has_log=self._has_log, result_path=result_path, log_path=upload_path if has_log else None, ).to_dict() self._statement_progress = {} self.update_progress() socketio.emit( "statement_end", statement_execution, namespace=QUERY_EXECUTION_NAMESPACE, room=self._query_execution_id, )