コード例 #1
0
ファイル: base_executor.py プロジェクト: czgu/querybook
    def _upload_query_result(self, cursor, statement_execution_id: int):
        # While uploading, the first few rows are fetched and stored as well
        # CACHE_ROW_SIZE = 50000  the number of rows stored to mysql, keep it < than previous value
        rows_uploaded = 0
        columns = cursor.get_columns()
        if (
            columns is None or len(columns) == 0
        ):  # No need to go through queries because no information
            return None, rows_uploaded

        key = "querybook_temp/%s/result.csv" % str(statement_execution_id)
        uploader = GenericUploader(key)
        uploader.start()

        uploader.write(row_to_csv(columns))
        rows_uploaded += 1  # 1 row for the column

        for row in cursor.get_rows_iter():
            did_upload = uploader.write(row_to_csv(row))
            if not did_upload:
                break
            rows_uploaded += 1
        uploader.end()

        return uploader.upload_url, rows_uploaded
コード例 #2
0
ファイル: base_executor.py プロジェクト: czgu/querybook
    def _upload_log(self, statement_execution_id: int):
        db_read_limit = 50
        db_read_offset = 0

        try:
            self._stream_log(statement_execution_id, "", clear_cache=True)

            logs = []
            has_log = False
            log_path = None

            with DBSession() as session:
                while True:
                    log_rows = qe_logic.get_statement_execution_stream_logs(
                        statement_execution_id,
                        limit=db_read_limit,
                        offset=db_read_offset,
                        session=session,
                    )

                    logs += map(lambda log: log.log, log_rows)

                    if len(log_rows) < db_read_limit:
                        break
                    db_read_offset += db_read_limit

            if len(logs):
                has_log = True
                uri = f"querybook_temp/{statement_execution_id}/log.txt"
                with GenericUploader(uri) as uploader:
                    log_path = uploader.upload_url

                    for log in logs:
                        did_upload = uploader.write(log)
                        if not did_upload:
                            break
                qe_logic.delete_statement_execution_stream_log(
                    statement_execution_id, session=session
                )
            return log_path, has_log
        except Exception as e:
            import traceback

            LOG.error(
                f"{e}\n{traceback.format_exc()}"
                + "Failed to upload logs. Silently suppressing error"
            )