Exemplo n.º 1
0
    def __init__(self, conn_str, current=None, **kwargs):
        super().__init__()
        self._parsed_conn = {}
        self.kql_engine = None
        if isinstance(conn_str, KqlEngine):
            self.kql_engine = conn_str
            database_name = self.kql_engine.get_database()
            cluster_name = self.kql_engine.get_cluster()
            conn_str = "{0}://cluster('{1}').database('{2}')".format(
                self._URI_SCHEMA_NAME, cluster_name, database_name)
        self._parsed_conn = self._parse_common_connection_str(
            conn_str, current, self._URI_SCHEMA_NAME, self._MANDATORY_KEY,
            self._ALT_URI_SCHEMA_NAMES, self._ALL_KEYS,
            self._VALID_KEYS_COMBINATIONS)
        self.database_name = self.database_name + '_at_' + self.cluster_name
        self.cluster_name = self._URI_SCHEMA_NAME
        self.client = CacheClient()

        database = self.get_database()
        database_name, cluster_name = database.split('_at_')
        folder_path = self.client._get_folder_path(database_name, cluster_name)
        validation_file_path = folder_path + '/' + 'validation_file.json'
        if not os.path.exists(validation_file_path):
            outfile = open(validation_file_path, "w")
            outfile.write(self.validate_json_file_content)
            outfile.flush()
            outfile.close()
Exemplo n.º 2
0
    def __init__(self,
                 conn_str,
                 user_ns: dict,
                 current=None,
                 cache_name=None,
                 **kwargs):
        super().__init__()
        self._parsed_conn = {}
        self.kql_engine = None
        if isinstance(conn_str, KqlEngine):
            self.kql_engine = conn_str
            folder_name = conn_str.get_database(
            ) + "_at_" + conn_str.get_cluster()
            conn_str = "{0}://{1}='{2}'".format(self._URI_SCHEMA_NAME,
                                                ConnStrKeys.FOLDER,
                                                folder_name)
        self._parsed_conn = self._parse_common_connection_str(
            conn_str, current, self._URI_SCHEMA_NAME, self._MANDATORY_KEY,
            self._VALID_KEYS_COMBINATIONS, user_ns)
        self.client = CacheClient()

        folder_path = self.client._get_folder_path(self.get_database(),
                                                   cache_name)
        validation_file_path = folder_path + "/" + self._VALIDATION_FILE_NAME
        if not os.path.exists(validation_file_path):
            outfile = open(validation_file_path, "w")
            outfile.write(self.validate_json_file_content)
            outfile.flush()
            outfile.close()
Exemplo n.º 3
0
    def get_schema_file_path(conn, **options):
        engine_type = (
            KustoEngine if isinstance(conn, KustoEngine) or
            (isinstance(conn, CacheEngine)
             and isinstance(conn.kql_engine, KustoEngine)) else
            AppinsightsEngine if isinstance(conn, AppinsightsEngine) or
            (isinstance(conn, CacheEngine)
             and isinstance(conn.kql_engine, AppinsightsEngine)) else
            LoganalyticsEngine if isinstance(conn, LoganalyticsEngine) or
            (isinstance(conn, CacheEngine)
             and isinstance(conn.kql_engine, LoganalyticsEngine)) else None)

        if engine_type is not None:
            if isinstance(conn, CacheEngine):
                database_name = conn.kql_engine.get_database()
                conn_name = conn.kql_engine.get_conn_name()
            else:
                database_name = conn.get_database()
                conn_name = conn.get_conn_name()

            if engine_type == KustoEngine:
                query = ".show schema"
                raw_query_result = conn.execute(query, **options)
                raw_schema_table = raw_query_result.tables[0]
                database_metadata_tree = Database_html._create_database_metadata_tree(
                    raw_schema_table.fetchall(), database_name)
                if options.get("cache") is not None and options.get(
                        "cache") != options.get("use_cache"):
                    CacheClient().save(raw_query_result, conn.get_database(),
                                       conn.get_cluster(), query, **options)

            elif engine_type == AppinsightsEngine or LoganalyticsEngine:
                query = ".show schema"
                metadata_result = conn.client_execute(query, **options)
                metadata_schema_table = metadata_result.table
                database_metadata_tree = Database_html._create_database_draft_metadata_tree(
                    metadata_schema_table)
                if options.get("cache") is not None and options.get(
                        "cache") != options.get("use_cache"):
                    CacheClient().save(metadata_result, conn.get_database(),
                                       conn.get_cluster(), query, **options)

            html_str = Database_html.convert_database_metadata_to_html(
                database_metadata_tree, conn_name)
            window_name = "_" + conn_name.replace("@", "_at_") + "_schema"
            return Display._html_to_file_path(html_str, window_name, **options)
        else:
            return None
Exemplo n.º 4
0
class CacheEngine(KqlEngine):
    _URI_SCHEMA_NAME = "cache"  # no spaces, underscores, and hyphe-minus, because they are ignored in parser
    _ALT_URI_SCHEMA_NAME = "file"  # no spaces, underscores, and hyphe-minus, because they are ignored in parser

    _ALT_URI_SCHEMA_NAMES = [_URI_SCHEMA_NAME, _ALT_URI_SCHEMA_NAME]
    _MANDATORY_KEY = ConnStrKeys.FOLDER
    _VALID_KEYS_COMBINATIONS = [[ConnStrKeys.FOLDER, ConnStrKeys.ALIAS]]

    _VALIDATION_FILE_NAME = "validation_file.json"

    @classmethod
    def tell_format(cls):
        return """
               {0}://{1}='<foldername>'
               """.format(cls._URI_SCHEMA_NAME, ConnStrKeys.FOLDER)

    # Object constructor
    def __init__(self, conn_str, user_ns: dict, current=None, **kwargs):
        super().__init__()
        self._parsed_conn = {}
        self.kql_engine = None
        if isinstance(conn_str, KqlEngine):
            self.kql_engine = conn_str
            folder_name = conn_str.get_database(
            ) + "_at_" + conn_str.get_cluster()
            conn_str = "{0}://{1}='{2}'".format(self._URI_SCHEMA_NAME,
                                                ConnStrKeys.FOLDER,
                                                folder_name)
        self._parsed_conn = self._parse_common_connection_str(
            conn_str, current, self._URI_SCHEMA_NAME, self._MANDATORY_KEY,
            self._VALID_KEYS_COMBINATIONS, user_ns)
        self.client = CacheClient()

        folder_path = self.client._get_folder_path(self.get_database())
        validation_file_path = folder_path + "/" + self._VALIDATION_FILE_NAME
        if not os.path.exists(validation_file_path):
            outfile = open(validation_file_path, "w")
            outfile.write(self.validate_json_file_content)
            outfile.flush()
            outfile.close()

    def validate(self, **kwargs):
        client = self.get_client()
        if not client:
            raise KqlEngineError("Client is not defined.")
        # query = "range c from 1 to 10 step 1 | count"
        filename = self._VALIDATION_FILE_NAME
        database = self.get_database()
        response = client.execute(database,
                                  filename,
                                  accept_partial_results=False,
                                  timeout=None)
        # print(response.json_response)
        table = KqlResponse(response, **kwargs).tables[0]
        if table.rowcount() != 1 or table.colcount() != 1 or [
                r for r in table.fetchall()
        ][0][0] != 10:
            raise KqlEngineError("Client failed to validate connection.")

    validate_json_file_content = """{"Tables": [{"TableName": "Table_0", "Columns": [{"ColumnName": "Count", "DataType": "Int64", "ColumnType": "long"}], "Rows": [[10]]}, {"TableName": "Table_1", "Columns": [{"ColumnName": "Value", "DataType": "String", "ColumnType": "string"}], "Rows": [["{\\"Visualization\\":null,\\"Title\\":null,\\"XColumn\\":null,\\"Series\\":null,\\"YColumns\\":null,\\"XTitle\\":null,\\"YTitle\\":null,\\"XAxis\\":null,\\"YAxis\\":null,\\"Legend\\":null,\\"YSplit\\":null,\\"Accumulate\\":false,\\"IsQuerySorted\\":false,\\"Kind\\":null}"]]}, {"TableName": "Table_2", "Columns": [{"ColumnName": "Timestamp", "DataType": "DateTime", "ColumnType": "datetime"}, {"ColumnName": "Severity", "DataType": "Int32", "ColumnType": "int"}, {"ColumnName": "SeverityName", "DataType": "String", "ColumnType": "string"}, {"ColumnName": "StatusCode", "DataType": "Int32", "ColumnType": "int"}, {"ColumnName": "StatusDescription", "DataType": "String", "ColumnType": "string"}, {"ColumnName": "Count", "DataType": "Int32", "ColumnType": "int"}, {"ColumnName": "RequestId", "DataType": "Guid", "ColumnType": "guid"}, {"ColumnName": "ActivityId", "DataType": "Guid", "ColumnType": "guid"}, {"ColumnName": "SubActivityId", "DataType": "Guid", "ColumnType": "guid"}, {"ColumnName": "ClientActivityId", "DataType": "String", "ColumnType": "string"}], "Rows": [["2018-09-17T01:45:07.5325114Z", 4, "Info", 0, "Query completed successfully", 1, "21d61568-0a1a-41e2-ab8c-7a85992a1f3b", "21d61568-0a1a-41e2-ab8c-7a85992a1f3b", "8a9c6cc6-f723-431f-9396-4c91ec9a8837", "9dff54f7-dd4c-445f-89e1-02b50661086e"], ["2018-09-17T01:45:07.5325114Z", 6, "Stats", 0, "{\\"ExecutionTime\\":0.0,\\"resource_usage\\":{\\"cache\\":{\\"memory\\":{\\"hits\\":0,\\"misses\\":0,\\"total\\":0},\\"disk\\":{\\"hits\\":0,\\"misses\\":0,\\"total\\":0}},\\"cpu\\":{\\"user\\":\\"00:00:00\\",\\"kernel\\":\\"00:00:00\\",\\"total cpu\\":\\"00:00:00\\"},\\"memory\\":{\\"peak_per_node\\":0}},\\"input_dataset_statistics\\":{\\"extents\\":{\\"total\\":0,\\"scanned\\":0},\\"rows\\":{\\"total\\":0,\\"scanned\\":0}},\\"dataset_statistics\\":[{\\"table_row_count\\":1,\\"table_size\\":8}]}", 1, "21d61568-0a1a-41e2-ab8c-7a85992a1f3b", "21d61568-0a1a-41e2-ab8c-7a85992a1f3b", "8a9c6cc6-f723-431f-9396-4c91ec9a8837", "9dff54f7-dd4c-445f-89e1-02b50661086e"]]}, {"TableName": "Table_3", "Columns": [{"ColumnName": "Ordinal", "DataType": "Int64", "ColumnType": "long"}, {"ColumnName": "Kind", "DataType": "String", "ColumnType": "string"}, {"ColumnName": "Name", "DataType": "String", "ColumnType": "string"}, {"ColumnName": "Id", "DataType": "String", "ColumnType": "string"}, {"ColumnName": "PrettyName", "DataType": "String", "ColumnType": "string"}], "Rows": [[0, "QueryResult", "PrimaryResult", "1bd5362f-e1f6-4258-abb3-9c2fedca8bdb", ""], [1, "QueryProperties", "@ExtendedProperties", "b1f9ef32-f6f7-4304-9e94-616a3472fb7e", ""], [2, "QueryStatus", "QueryStatus", "00000000-0000-0000-0000-000000000000", ""]]}]}"""
Exemplo n.º 5
0
class CacheEngine(KqlEngine):
    _URI_SCHEMA_NAME = "cache"
    _ALT_URI_SCHEMA_NAMES = [_URI_SCHEMA_NAME, "file"]
    _MANDATORY_KEY = "database"
    _VALID_KEYS_COMBINATIONS = [
        ["cluster", "database", "alias"],
    ]
    _ALL_KEYS = set()
    for c in _VALID_KEYS_COMBINATIONS:
        _ALL_KEYS.update(set(c))

    @classmethod
    def tell_format(cls):
        return """
               cache://cluster('clustername').database('databasename')
               cache://database('databasename')
                     # Note: current cluster is attached
               cache://cluster('clustername')
                     # Note: not enough for to submit a query, set current clustername
               """

    # Object constructor
    def __init__(self, conn_str, current=None, **kwargs):
        super().__init__()
        self._parsed_conn = {}
        self.kql_engine = None
        if isinstance(conn_str, KqlEngine):
            self.kql_engine = conn_str
            database_name = self.kql_engine.get_database()
            cluster_name = self.kql_engine.get_cluster()
            conn_str = "{0}://cluster('{1}').database('{2}')".format(
                self._URI_SCHEMA_NAME, cluster_name, database_name)
        self._parsed_conn = self._parse_common_connection_str(
            conn_str, current, self._URI_SCHEMA_NAME, self._MANDATORY_KEY,
            self._ALT_URI_SCHEMA_NAMES, self._ALL_KEYS,
            self._VALID_KEYS_COMBINATIONS)
        self.database_name = self.database_name + '_at_' + self.cluster_name
        self.cluster_name = self._URI_SCHEMA_NAME
        self.client = CacheClient()

        database = self.get_database()
        database_name, cluster_name = database.split('_at_')
        folder_path = self.client._get_folder_path(database_name, cluster_name)
        validation_file_path = folder_path + '/' + 'validation_file.json'
        if not os.path.exists(validation_file_path):
            outfile = open(validation_file_path, "w")
            outfile.write(self.validate_json_file_content)
            outfile.flush()
            outfile.close()

    def validate(self, **kwargs):
        ip = get_ipython()
        root_path = ip.starting_dir.replace("\\", "/")

        client = self.get_client()
        if not client:
            raise KqlEngineError("Client is not defined.")
        # query = "range c from 1 to 10 step 1 | count"
        filename = 'validation_file.json'
        database = self.get_database()
        response = client.execute(database,
                                  filename,
                                  accept_partial_results=False,
                                  timeout=None)
        # print(response.json_response)
        table = KqlResponse(response, **kwargs).tables[0]
        if table.rowcount() != 1 or table.colcount() != 1 or [
                r for r in table.fetchall()
        ][0][0] != 10:
            raise KqlEngineError("Client failed to validate connection.")

    validate_json_file_content = """{"Tables": [{"TableName": "Table_0", "Columns": [{"ColumnName": "Count", "DataType": "Int64", "ColumnType": "long"}], "Rows": [[10]]}, {"TableName": "Table_1", "Columns": [{"ColumnName": "Value", "DataType": "String", "ColumnType": "string"}], "Rows": [["{\\"Visualization\\":null,\\"Title\\":null,\\"XColumn\\":null,\\"Series\\":null,\\"YColumns\\":null,\\"XTitle\\":null,\\"YTitle\\":null,\\"XAxis\\":null,\\"YAxis\\":null,\\"Legend\\":null,\\"YSplit\\":null,\\"Accumulate\\":false,\\"IsQuerySorted\\":false,\\"Kind\\":null}"]]}, {"TableName": "Table_2", "Columns": [{"ColumnName": "Timestamp", "DataType": "DateTime", "ColumnType": "datetime"}, {"ColumnName": "Severity", "DataType": "Int32", "ColumnType": "int"}, {"ColumnName": "SeverityName", "DataType": "String", "ColumnType": "string"}, {"ColumnName": "StatusCode", "DataType": "Int32", "ColumnType": "int"}, {"ColumnName": "StatusDescription", "DataType": "String", "ColumnType": "string"}, {"ColumnName": "Count", "DataType": "Int32", "ColumnType": "int"}, {"ColumnName": "RequestId", "DataType": "Guid", "ColumnType": "guid"}, {"ColumnName": "ActivityId", "DataType": "Guid", "ColumnType": "guid"}, {"ColumnName": "SubActivityId", "DataType": "Guid", "ColumnType": "guid"}, {"ColumnName": "ClientActivityId", "DataType": "String", "ColumnType": "string"}], "Rows": [["2018-09-17T01:45:07.5325114Z", 4, "Info", 0, "Query completed successfully", 1, "21d61568-0a1a-41e2-ab8c-7a85992a1f3b", "21d61568-0a1a-41e2-ab8c-7a85992a1f3b", "8a9c6cc6-f723-431f-9396-4c91ec9a8837", "9dff54f7-dd4c-445f-89e1-02b50661086e"], ["2018-09-17T01:45:07.5325114Z", 6, "Stats", 0, "{\\"ExecutionTime\\":0.0,\\"resource_usage\\":{\\"cache\\":{\\"memory\\":{\\"hits\\":0,\\"misses\\":0,\\"total\\":0},\\"disk\\":{\\"hits\\":0,\\"misses\\":0,\\"total\\":0}},\\"cpu\\":{\\"user\\":\\"00:00:00\\",\\"kernel\\":\\"00:00:00\\",\\"total cpu\\":\\"00:00:00\\"},\\"memory\\":{\\"peak_per_node\\":0}},\\"input_dataset_statistics\\":{\\"extents\\":{\\"total\\":0,\\"scanned\\":0},\\"rows\\":{\\"total\\":0,\\"scanned\\":0}},\\"dataset_statistics\\":[{\\"table_row_count\\":1,\\"table_size\\":8}]}", 1, "21d61568-0a1a-41e2-ab8c-7a85992a1f3b", "21d61568-0a1a-41e2-ab8c-7a85992a1f3b", "8a9c6cc6-f723-431f-9396-4c91ec9a8837", "9dff54f7-dd4c-445f-89e1-02b50661086e"]]}, {"TableName": "Table_3", "Columns": [{"ColumnName": "Ordinal", "DataType": "Int64", "ColumnType": "long"}, {"ColumnName": "Kind", "DataType": "String", "ColumnType": "string"}, {"ColumnName": "Name", "DataType": "String", "ColumnType": "string"}, {"ColumnName": "Id", "DataType": "String", "ColumnType": "string"}, {"ColumnName": "PrettyName", "DataType": "String", "ColumnType": "string"}], "Rows": [[0, "QueryResult", "PrimaryResult", "1bd5362f-e1f6-4258-abb3-9c2fedca8bdb", ""], [1, "QueryProperties", "@ExtendedProperties", "b1f9ef32-f6f7-4304-9e94-616a3472fb7e", ""], [2, "QueryStatus", "QueryStatus", "00000000-0000-0000-0000-000000000000", ""]]}]}"""
Exemplo n.º 6
0
    def execute_query(self, parsed, user_ns, result_set=None):
        if Help_html.showfiles_base_url is None:
            window_location = user_ns.get("NOTEBOOK_URL")
            if window_location is not None:
                Help_html.flush(window_location,
                                notebook_app=self.notebook_app)
            else:
                self.submit_get_notebook_url()

        query = parsed["query"].strip()
        options = parsed["options"]
        suppress_results = options.get(
            "suppress_results", False) and options.get(
                "enable_suppress_result", self.enable_suppress_result)
        connection_string = parsed["connection"]

        special_info = False
        if options.get("version"):
            print("{0} version: {1}".format(Constants.MAGIC_PACKAGE_NAME,
                                            VERSION))
            special_info = True

        if options.get("palette"):
            palette = Palette(
                palette_name=options.get("palette_name", self.palette_name),
                n_colors=options.get("palette_colors", self.palette_colors),
                desaturation=options.get("palette_desaturation",
                                         self.palette_desaturation),
                to_reverse=options.get("palette_reverse", False),
            )
            html_str = palette._repr_html_()
            Display.show_html(html_str)
            special_info = True

        if options.get("popup_palettes"):
            n_colors = options.get("palette_colors", self.palette_colors)
            desaturation = options.get("palette_desaturation",
                                       self.palette_desaturation)
            palettes = Palettes(n_colors=n_colors, desaturation=desaturation)
            html_str = palettes._repr_html_()
            button_text = "popup {0} colors palettes".format(n_colors)
            file_name = "{0}_colors_palettes".format(n_colors)
            if desaturation is not None and desaturation != 1.0 and desaturation != 0:
                file_name += "_desaturation{0}".format(str(desaturation))
                button_text += " (desaturation {0})".format(str(desaturation))
            file_path = Display._html_to_file_path(html_str, file_name,
                                                   **options)
            Display.show_window(file_name,
                                file_path,
                                button_text=button_text,
                                onclick_visibility="visible")
            special_info = True

        if options.get("popup_help"):
            help_url = "http://aka.ms/kdocs"
            # 'https://docs.loganalytics.io/docs/Language-Reference/Tabular-operators'
            # 'http://aka.ms/kdocs'
            # 'https://kusdoc2.azurewebsites.net/docs/queryLanguage/query-essentials/readme.html'
            # import requests
            # f = requests.get(help_url)
            # html = f.text.replace('width=device-width','width=500')
            # Display.show(html, **{"popup_window" : True, 'name': 'KustoQueryLanguage'})
            button_text = "popup kql help "
            Display.show_window("KustoQueryLanguage",
                                help_url,
                                button_text,
                                onclick_visibility="visible")
            special_info = True

        if special_info and not query and not connection_string:
            return None

        try:
            #
            # set connection
            #
            conn = Connection.get_connection(connection_string, **options)

        # parse error
        except KqlEngineError as e:
            if options.get("short_errors", self.short_errors):
                msg = Connection.tell_format(connection_string)
                Display.showDangerMessage(str(e))
                Display.showInfoMessage(msg)
                return None
            else:
                raise

        # parse error
        except ConnectionError as e:
            if options.get("short_errors", self.short_errors):
                Display.showDangerMessage(str(e))
                self._show_connection_info(show_conn_info="list")
                return None
            else:
                raise

        try:
            # validate connection
            if not conn.options.get(
                    "validate_connection_string_done") and options.get(
                        "validate_connection_string",
                        self.validate_connection_string):
                retry_with_code = False
                try:
                    conn.validate(**options)
                    conn.set_validation_result(True)
                except Exception as e:
                    msg = str(e)
                    if msg.find("AADSTS50079") > 0 and msg.find(
                            "multi-factor authentication") > 0 and isinstance(
                                conn, KustoEngine):
                        Display.showDangerMessage(str(e))
                        retry_with_code = True
                    else:
                        raise e

                if retry_with_code:
                    Display.showInfoMessage(
                        "replaced connection with code authentication")
                    database_name = conn.get_database()
                    cluster_name = conn.get_cluster()
                    uri_schema_name = conn._URI_SCHEMA_NAME
                    connection_string = "{0}://code().cluster('{1}').database('{2}')".format(
                        uri_schema_name, cluster_name, database_name)
                    conn = Connection.get_connection(connection_string,
                                                     **options)
                    conn.validate(**options)
                    conn.set_validation_result(True)

            conn.options["validate_connection_string_done"] = True

            schema_file_path = None
            if options.get("popup_schema") or (
                    not conn.options.get("auto_popup_schema_done") and
                    options.get("auto_popup_schema", self.auto_popup_schema)):
                schema_file_path = Database_html.get_schema_file_path(
                    conn, **options)
                Database_html.popup_schema(schema_file_path, conn)

            conn.options["auto_popup_schema_done"] = True
            if not conn.options.get("add_schema_to_help_done") and options.get(
                    "add_schema_to_help"):
                schema_file_path = schema_file_path or Database_html.get_schema_file_path(
                    conn, **options)
                Help_html.add_menu_item(conn.get_conn_name(), schema_file_path,
                                        **options)
                conn.options["add_schema_to_help_done"] = True

            if not query:
                #
                # If NO  kql query, just return the current connection
                #
                if not connection_string and Connection.connections and not suppress_results:
                    self._show_connection_info(**options)
                return None
            #
            # submit query
            #
            start_time = time.time()

            params_dict_name = options.get('params_dict')
            dictionary = user_ns.get(
                params_dict_name) if params_dict_name is not None and len(
                    params_dict_name) > 0 else user_ns
            parametrized_query = Parameterizer(dictionary).expand(
                query) if result_set is None else result_set.parametrized_query
            raw_query_result = conn.execute(parametrized_query, user_ns,
                                            **options)

            end_time = time.time()

            #
            # model query results
            #
            if result_set is None:
                fork_table_id = 0
                saved_result = ResultSet(raw_query_result,
                                         parametrized_query,
                                         fork_table_id=0,
                                         fork_table_resultSets={},
                                         metadata={},
                                         options=options)
                saved_result.metadata["magic"] = self
                saved_result.metadata["parsed"] = parsed
                saved_result.metadata["connection"] = conn.get_conn_name()
            else:
                fork_table_id = result_set.fork_table_id
                saved_result = result_set.fork_result(0)
                saved_result.feedback_info = []
                saved_result._update(raw_query_result)

            result = saved_result

            if not connection_string and Connection.connections:
                saved_result.metadata["conn_info"] = self._get_connection_info(
                    **options)
            else:
                saved_result.metadata["conn_info"] = []

            saved_result.metadata["start_time"] = start_time
            saved_result.metadata["end_time"] = end_time

            if options.get("feedback", self.feedback):
                minutes, seconds = divmod(end_time - start_time, 60)
                saved_result.feedback_info.append(
                    "Done ({:0>2}:{:06.3f}): {} records".format(
                        int(minutes), seconds, saved_result.records_count))

            if options.get("columns_to_local_vars",
                           self.columns_to_local_vars):
                # Instead of returning values, set variables directly in the
                # users namespace. Variable names given by column names

                if options.get("feedback", self.feedback):
                    saved_result.feedback_info.append(
                        "Returning raw data to local variables")

                self.shell.user_ns.update(saved_result.to_dict())
                result = None

            if options.get("auto_dataframe", self.auto_dataframe):
                if options.get("feedback", self.feedback):
                    saved_result.feedback_info.append(
                        "Returning data converted to pandas dataframe")
                result = saved_result.to_dataframe()

            if options.get("result_var") and result_set is None:
                result_var = options["result_var"]
                if options.get("feedback", self.feedback):
                    saved_result.feedback_info.append(
                        "Returning data to local variable {}".format(
                            result_var))
                self.shell.user_ns.update({
                    result_var:
                    result if result is not None else saved_result
                })
                result = None

            if options.get('cache') and not options.get(
                    'use_cache') and not isinstance(conn, CacheEngine):
                file_path = CacheClient().save(raw_query_result,
                                               conn.get_database(),
                                               conn.get_cluster(),
                                               parametrized_query, **options)
                if options.get("feedback", self.feedback):
                    saved_result.feedback_info.append("query results cached")

            if options.get('save_as') is not None:
                file_path = CacheClient().save(raw_query_result,
                                               conn.get_database(),
                                               conn.get_cluster(),
                                               parametrized_query,
                                               filepath=options.get('save_as'),
                                               **options)
                if options.get("feedback", self.feedback):
                    saved_result.feedback_info.append(
                        "query results saved as {0}".format(file_path))

            saved_result.suppress_result = False
            saved_result.display_info = False
            if result is not None:
                if suppress_results:
                    saved_result.suppress_result = True
                elif options.get("auto_dataframe", self.auto_dataframe):
                    Display.showSuccessMessage(saved_result.feedback_info)
                else:
                    saved_result.display_info = True

            if result_set is None:
                saved_result._create_fork_results()
            else:
                saved_result._update_fork_results()

            # Return results into the default ipython _ variable
            self.shell.user_ns.update({
                options.get("last_raw_result_var", self.last_raw_result_var):
                saved_result
            })

            if result == saved_result:
                result = saved_result.fork_result(fork_table_id)
            return result

        except Exception as e:
            if not connection_string and Connection.connections and not suppress_results:
                # display list of all connections
                self._show_connection_info(**options)

            if options.get("short_errors", self.short_errors):
                Display.showDangerMessage(e)
                return None
            else:
                raise e