def get_statement_execution_log(statement_execution_id):
    with DBSession() as session:
        statement_execution = logic.get_statement_execution_by_id(
            statement_execution_id, session=session)
        api_assert(statement_execution is not None,
                   message="Invalid statement execution")
        verify_query_execution_permission(
            statement_execution.query_execution_id, session=session)

        log_path = statement_execution.log_path
        try:
            if log_path.startswith("stream"):
                logs = logic.get_statement_execution_stream_logs(
                    statement_execution_id)
                return list(map(lambda log: log.log, logs))
            else:
                with DBSession() as session:
                    MAX_LOG_RETURN_LINES = 2000
                    result = ""

                    statement_execution = logic.get_statement_execution_by_id(
                        statement_execution_id, session=session)
                    if statement_execution is not None and statement_execution.has_log:
                        with GenericReader(
                                statement_execution.log_path) as reader:
                            result = reader.read_lines(
                                number_of_lines=MAX_LOG_RETURN_LINES)
                            if len(result) == MAX_LOG_RETURN_LINES:
                                result += [
                                    "---------------------------------------------------------------------------",
                                    f"We are truncating results since it reached limit of {MAX_LOG_RETURN_LINES} lines.",
                                ]
                            return result
        except FileDoesNotExist as e:
            abort(RESOURCE_NOT_FOUND_STATUS_CODE, str(e))
Exemple #2
0
    def _upload_log(self, statement_execution_id: int):
        db_read_limit = 50
        db_read_offset = 0

        try:
            self._stream_log(statement_execution_id, "", clear_cache=True)

            logs = []
            has_log = False
            log_path = None

            with DBSession() as session:
                while True:
                    log_rows = qe_logic.get_statement_execution_stream_logs(
                        statement_execution_id,
                        limit=db_read_limit,
                        offset=db_read_offset,
                        session=session,
                    )

                    logs += map(lambda log: log.log, log_rows)

                    if len(log_rows) < db_read_limit:
                        break
                    db_read_offset += db_read_limit

            if len(logs):
                has_log = True
                uri = f"querybook_temp/{statement_execution_id}/log.txt"
                with GenericUploader(uri) as uploader:
                    log_path = uploader.upload_url

                    for log in logs:
                        did_upload = uploader.write(log)
                        if not did_upload:
                            break
                qe_logic.delete_statement_execution_stream_log(
                    statement_execution_id, session=session
                )
            return log_path, has_log
        except Exception as e:
            import traceback

            LOG.error(
                f"{e}\n{traceback.format_exc()}"
                + "Failed to upload logs. Silently suppressing error"
            )
def on_join_room(query_execution_id):
    with DBSession() as session:
        execution = qe_logic.get_query_execution_by_id(
            query_execution_id, session=session
        )
        assert execution, "Invalid execution"
        verify_query_engine_permission(execution.engine_id, session=session)

        execution_dict = execution.to_dict(True) if execution is not None else None
        join_room(query_execution_id)

        if execution_dict and len(execution_dict.get("statement_executions", [])):
            statement_execution = execution_dict["statement_executions"][-1]
            # Format statement execution's logs
            if statement_execution["has_log"]:
                logs = qe_logic.get_statement_execution_stream_logs(
                    statement_execution["id"], from_end=True, session=session
                )
                statement_execution["log"] = [log.log for log in logs]

            # Getting task's running data
            if (
                "task_id" in execution_dict
                and execution_dict.get("status", None)
                == QueryExecutionStatus.RUNNING.value
            ):
                task = tasks.run_query_task.AsyncResult(execution_dict["task_id"])
                try:
                    if task is not None and task.info is not None:
                        progress = task.info
                        if str(statement_execution["id"]) in progress:
                            statement_execution["percent_complete"] = progress[
                                str(statement_execution["id"])
                            ].get("percent_complete")
                    execution_dict["total"] = progress.get("total", 0)

                except Exception as e:
                    LOG.info(e)

        emit(
            "query",
            execution_dict,
            namespace=QUERY_EXECUTION_NAMESPACE,
            room=query_execution_id,
        )