def get_object(row): info = BaseClass(None) for key, value in row.items(): if (value == "None"): value = None setattr(info, key.lower(), value) return info
def get_list_infos_to_lower(self, host_info, sql): result = [] for row in self.fetchall(host_info, sql): info = BaseClass(None) for key, value in row.items(): setattr(info, key.lower(), value) result.append(info) return result
def get_table_infos(host_id, db_name, sql): if (db_name == None): return None try: number = 1 table_infos = [] table_names = QueryTableParser().parse(sql) host_info = cache.Cache().get_host_info(host_id) for name in table_names: entity = BaseClass(None) values = name.split(".") if (len(values) > 1): db_name_tmp = values[0] table_name_tmp = values[1] else: db_name_tmp = db_name table_name_tmp = name entity.key = number entity.table_name_full = (db_name_tmp + "." + table_name_tmp).lower() entity.index_infos = get_show_index(host_info, entity.table_name_full) entity.status_info = get_show_table_status(host_info, db_name_tmp, table_name_tmp) entity.create_table_info = get_show_create_table( host_info, entity.table_name_full) table_infos.append(entity) number += 1 return table_infos except: traceback.print_exc() return None
def load_all_host_infos(self): if (self.__thread_pool == None): self.__thread_pool = threadpool.ThreadPool( settings.THREAD_POOL_SIZE) sql = "select * from mysql_web.host_infos;" for row in db_util.DBUtil().fetchall(settings.MySQL_Host, sql): host_info_temp = common.get_object(row) host_info_temp.master_host_id = 0 host_info_temp.key = host_info_temp.host_id host_info_temp.is_slave = bool(host_info_temp.is_slave) host_info_temp.is_master = bool(host_info_temp.is_master) host_info_temp.user = custom_algorithm.decrypt( settings.MY_KEY, host_info_temp.user) host_info_temp.password = custom_algorithm.decrypt( settings.MY_KEY, host_info_temp.password) host_info_temp.ssh_password = custom_algorithm.decrypt( settings.MY_KEY, host_info_temp.ssh_password) if ( len(host_info_temp.ssh_password) > 0) else None host_id = host_info_temp.host_id self.__host_infos[host_info_temp.host_id] = host_info_temp if (host_info_temp.is_deleted == 1): self.remove_key(self.__tablespace, host_id) self.remove_key(self.__host_infos, host_id) self.remove_key(self.__repl_infos, host_id) self.remove_key(self.__status_infos, host_id) self.remove_key(self.__host_infos, host_id) self.remove_key(self.__innodb_infos, host_id) self.remove_key(self.__linux_infos, host_id) self.remove_key(self.__innodb_status_infos, host_id) self.remove_key(self.__analyze_infos, host_id) else: if (self.__tablespace.has_key(host_id) == False): self.__tablespace[host_id] = BaseClass(host_info_temp) if (self.__repl_infos.has_key(host_id) == False): self.__repl_infos[host_id] = BaseClass(host_info_temp) if (self.__linux_infos.has_key(host_id) == False): self.__linux_infos[host_id] = BaseClass(host_info_temp) if (self.__status_infos.has_key(host_id) == False): self.__status_infos[host_id] = BaseClass(host_info_temp) if (self.__innodb_infos.has_key(host_id) == False): self.__innodb_infos[host_id] = self.init_innodb_info( BaseClass(host_info_temp)) if (self.__analyze_infos.has_key(host_id) == False): self.__analyze_infos[host_id] = self.init_analyze_info( BaseClass(None)) if (self.__innodb_status_infos.has_key(host_id) == False): self.__innodb_status_infos[host_id] = BaseClass( host_info_temp) self.__innodb_status_infos[ host_id].buffer_pool_infos = collections.OrderedDict() self.load_mysql_web_user_infos() self.check_mysql_server_version_and_branch() self.check_master_and_slave_relation() result = "load all host infos ok." print(result) return result
def get_object_list(rows): info_list = [] for row in rows: info = BaseClass(None) for key, value in row.items(): if (value == "None"): value = None setattr(info, key.lower(), value) info_list.append(info) return info_list
def load_mysql_web_user_infos(self): sql = "select id, user_name, user_password, is_deleted from mysql_web.mysql_web_user_info;" for row in db_util.DBUtil().fetchall(settings.MySQL_Host, sql): user_info = BaseClass(None) user_info.user_id = row["id"] user_info.user_name = row["user_name"] user_info.user_password = row["user_password"] user_info.is_deleted = row["is_deleted"] self.add_user_info(user_info)
def get_review_detail_by_checksum(checksum): sql = "select is_reviewed, comments, reviewed_on, reviewed_id " \ "from mysql_web.mysql_slow_query_review where checksum={0}".format(checksum) info = BaseClass(None) result = db_util.DBUtil().fetchone(settings.MySQL_Host, sql) info.checksum = checksum info.reviewed_id = result["reviewed_id"] info.is_reviewed = result["is_reviewed"] info.comments = result["comments"] if result["comments"] else "" info.reviewed_on = result["reviewed_on"].strftime( '%Y-%m-%d %H:%M:%S') if result["reviewed_on"] else "" return json.dumps(info, default=lambda o: o.__dict__, skipkeys=True, ensure_ascii=False)
def test_ssh_connection_is_ok(obj): try: host_info = BaseClass(None) host_info.host = obj.host_ip host_info.ssh_port = obj.host_ssh_port host_info.ssh_user = obj.host_ssh_user host_info.ssh_password = obj.host_ssh_password if ( len(obj.host_ssh_password) > 0) else None execute_remote_command(host_info, "df -h") except: traceback.print_exc() return False return True
def get_slow_log_detail(checksum, server_id): sql = """select t1.checksum, sum(ifnull(t2.ts_cnt, 1)) as ts_cnt, t1.first_seen, t1.last_seen, t1.fingerprint, t2.sample, t2.serverid_max, t2.db_max, t2.user_max, t2.Query_time_min, t2.Query_time_max, sum(t2.Query_time_sum) as Query_time_sum, t2.Query_time_pct_95, sum(Lock_time_sum) as Lock_time_sum, Lock_time_min, Lock_time_max, Lock_time_pct_95, sum(Rows_sent_sum) as Rows_sent_sum, Rows_sent_min, Rows_sent_max, Rows_sent_pct_95, sum(Rows_examined_sum) as Rows_examined_sum, Rows_examined_min, Rows_examined_max, Rows_examined_pct_95 from mysql_web.mysql_slow_query_review t1 left join mysql_web.mysql_slow_query_review_history t2 on t1.checksum = t2.checksum and t2.serverid_max={0} where t1.checksum={1} limit 1;""".format(server_id, checksum) slow_log_detail = None for row in db_util.DBUtil().fetchall(settings.MySQL_Host, sql): slow_log_detail = BaseClass(None) slow_log_detail.serverid_max = row["serverid_max"] slow_log_detail.db_max = row["db_max"] slow_log_detail.user_max = row["user_max"] slow_log_detail.checksum = row["checksum"] slow_log_detail.count = get_sql_count_value(row["ts_cnt"]) slow_log_detail.query_time_sum = get_float(row["Query_time_sum"]) slow_log_detail.query_time_max = get_float(row["Query_time_max"]) slow_log_detail.query_time_min = get_float(row["Query_time_min"]) slow_log_detail.query_time_pct_95 = get_float(row["Query_time_pct_95"]) slow_log_detail.lock_time_sum = get_float(row["Lock_time_sum"]) slow_log_detail.lock_time_max = get_float(row["Lock_time_max"]) slow_log_detail.lock_time_min = get_float(row["Lock_time_min"]) slow_log_detail.lock_time_pct_95 = get_float(row["Lock_time_pct_95"]) slow_log_detail.rows_sent_sum = int(row["Rows_sent_sum"]) slow_log_detail.rows_sent_max = int(row["Rows_sent_max"]) slow_log_detail.rows_sent_min = int(row["Rows_sent_min"]) slow_log_detail.rows_sent_pct_95 = int(row["Rows_sent_pct_95"]) slow_log_detail.rows_examined_sum = int(row["Rows_examined_sum"]) slow_log_detail.rows_examined_max = int(row["Rows_examined_max"]) slow_log_detail.rows_examined_min = int(row["Rows_examined_min"]) slow_log_detail.rows_examined_pct_95 = int(row["Rows_examined_pct_95"]) slow_log_detail.first_seen = row["first_seen"] slow_log_detail.last_seen = row["last_seen"] slow_log_detail.fingerprint = row["fingerprint"].decode("utf-8") slow_log_detail.sample = sqlparse.format(row["sample"].decode("utf-8"), reindent=True, keyword_case='upper') slow_log_detail.table_infos = get_table_infos(server_id, slow_log_detail.db_max, slow_log_detail.sample) slow_log_detail.explain_infos = get_slow_log_explain( server_id, slow_log_detail.db_max, slow_log_detail.sample) return slow_log_detail
def get_slow_logs(server_id, start_datetime="", stop_datetime="", order_by_type=1, page_number=1, status=2): where_sql = "" if (len(start_datetime) > 0): where_sql += " and a.last_seen >= '{0}'".format(start_datetime) if (len(stop_datetime) > 0): where_sql += " and a.last_seen <= '{0}'".format(stop_datetime) if (status != 2 and status < 2): where_sql += " and a.is_reviewed = {0}".format(status) sql = """select a.checksum, a.fingerprint, a.first_seen, a.last_seen, a.is_reviewed, b.serverid_max, b.db_max, b.user_max, b.ts_min, b.ts_max, sum(ifnull(b.ts_cnt, 1)) ts_cnt, sum(b.Query_time_sum)/sum(b.ts_cnt) Query_time_avg, max(b.Query_time_max) Query_time_max, min(b.Query_time_min) Query_time_min, sum(b.Query_time_sum) Query_time_sum, sum(b.Lock_time_sum)/sum(b.ts_cnt) Lock_time_avg, max(b.Lock_time_max) Lock_time_max, min(b.Lock_time_min) Lock_time_min, sum(b.Lock_time_sum) Lock_time_sum from mysql_web.mysql_slow_query_review a inner join mysql_web.mysql_slow_query_review_history b on a.checksum=b.checksum and b.serverid_max={0} where 1 = 1 {1} group by a.checksum order by {2} desc limit {3}, 15;""" """sql = select t1.*, t2.checksum, t2.fingerprint, t2.first_seen, t2.last_seen, t2.is_reviewed from ( select b.serverid_max, b.db_max, b.user_max, b.ts_min, b.ts_max, sum(b.ts_cnt) ts_cnt, sum(b.Query_time_sum)/sum(b.ts_cnt) Query_time_avg, max(b.Query_time_max) Query_time_max, min(b.Query_time_min) Query_time_min, sum(b.Query_time_sum) Query_time_sum, sum(b.Lock_time_sum)/sum(b.ts_cnt) Lock_time_avg, max(b.Lock_time_max) Lock_time_max, min(b.Lock_time_min) Lock_time_min, sum(b.Lock_time_sum) Lock_time_sum from mysql_web.mysql_slow_query_review_history b where b.serverid_max={0} {1} group by b.checksum order by {2} desc limit {3}, 15 ) t1 left join mysql_web.mysql_slow_query_review t2 on t1.checksum=t2.checksum""" result = [] sql = sql.format(server_id, where_sql, order_by_options[order_by_type], (page_number - 1) * 15) for row in db_util.DBUtil().fetchall(settings.MySQL_Host, sql): info = BaseClass(None) info.checksum = row["checksum"] info.fingerprint = row["fingerprint"] info.fingerprint_tmp = row["fingerprint"].decode("utf-8")[0:35] info.first_seen = row["first_seen"] info.last_seen = row["last_seen"] info.serverid_max = row["serverid_max"] info.db_max = row["db_max"] info.user_max = row["user_max"] info.ts_max = row["ts_max"] info.ts_cnt = get_sql_count_value(row["ts_cnt"]) info.is_reviewed = row["is_reviewed"] info.Query_time_avg = get_float(row["Query_time_avg"]) info.Query_time_max = get_float(row["Query_time_max"]) info.Query_time_min = get_float(row["Query_time_min"]) info.Query_time_sum = get_float(row["Query_time_sum"]) info.Lock_time_max = get_float(row["Lock_time_max"]) info.Lock_time_min = get_float(row["Lock_time_min"]) info.Lock_time_sum = get_float(row["Lock_time_sum"]) info.Lock_time_avg = get_float(row["Lock_time_avg"]) result.append(info) return result
def get_slow_log_explain(server_id, db, sql): explain_info = BaseClass(None) explain_info.info = [] explain_info.warnings = [] connection, cursor = None, None host_info = cache.Cache().get_host_info(server_id) try: connection, cursor = db_util.DBUtil().get_conn_and_cur(host_info) cursor.execute("use {0};".format(db)) cursor.execute("explain extended {0};".format(sql)) for row in cursor.fetchall(): info = BaseClass(None) info.rows = row["rows"] info.select_type = row["select_type"] info.Extra = row["Extra"] info.ref = row["ref"] info.key_len = row["key_len"] info.possible_keys = row["possible_keys"] info.key = row["key"] info.table = row["table"] info.type = row["type"] info.id = row["id"] explain_info.info.append(info) cursor.execute("show warnings;") for row in cursor.fetchall(): info = BaseClass(None) info.level = row["Level"] info.code = row["Code"] info.message = row["Message"] explain_info.warnings.append(info) except Exception: traceback.print_exc() finally: db_util.DBUtil().close(connection, cursor) return explain_info