def execute(self, query): result = None self.mtx.acquire() try: if self.is_connected(): log_debug3("Executing statement %s\n" % strip_password(query)) result = self.dbconn.execute(query) finally: self.mtx.release() return result
def exec_query_multi_result(self, query): result = None self.mtx.acquire() try: if self.is_connected(): log_debug3("Executing query multi result %s\n" % strip_password(query)) result = self.dbconn.executeQueryMultiResult(query) finally: self.mtx.release() return result
def _get_ssh_config_path(self): paths = [] if platform.system().lower() == "windows": paths.append("%s\ssh\config" % mforms.App.get().get_user_data_folder()) paths.append("%s\ssh\ssh_config" % mforms.App.get().get_user_data_folder()) else: paths.append("~/.ssh/config") paths.append("~/.ssh/ssh_config") for path in paths: if os.path.isfile(os.path.expanduser(path)): return os.path.expanduser(path) else: log_debug3("ssh config file not found") return None
def __setattr__(self, name, value): # Verifies the value being set is a valid attribute # Also ensures the value is changing from the current value if name in self.__dict__ and \ name != '_ChangeTracker__changed' and \ name != '_ChangeTracker__ignoring' and \ name != '_ChangeTracker__notify_value_set_cb' and \ name != '_ChangeTracker__value_set_notification_cb' and \ name != '_ChangeNotifier__change_notification_cb' and \ name != '_ChangeCounterchange_count' and \ not self.__ignoring and \ self.__dict__[name] != value: log_message = "Changed %s from %s to %s at %s\n" % ( name, self.__dict__[name], value, self) # If the value was already changed and the new value # reverts the change then it removes the attribute from # the changed map if name in self.__dict__["_ChangeTracker__changed"]: if self.__dict__["_ChangeTracker__changed"][name] == value: del self.__dict__["_ChangeTracker__changed"][name] # Sends message indicating a change has been undone self.notify_change(False, name, value) log_message = "Reverted change on %s to %s at %s\n" % ( name, value, self) # If this is the first change to the attribute, registers the # Original value on the changed map else: self.__dict__["_ChangeTracker__changed"][name] = self.__dict__[ name] # Sends message indicating a change has been done self.notify_change(True, name, value) # If configured, notifies about a value being set if self.__value_set_notification_cb: self.__value_set_notification_cb(name, value) # Logs the change log_debug3("%s\n" % log_message) # Updates the value self.__dict__[name] = value
def add_event_handler(self, event_name, handler): event_name += "_event" if hasattr(handler, event_name): handlers_list = None if event_name in self.events: handlers_list = self.events[event_name] else: handlers_list = [] self.events[event_name] = handlers_list handlers_list.append(handler) log_debug3("Added " + handler.__class__.__name__ + " for event " + event_name + '\n') else: log_error(handler.__class__.__name__ + " does not have method " + event_name + '\n')
def server_started_event(self): log_debug3('Enter\n') if self.poll_thread and self.running[0]: # no need to restart the poll thread return # This is needed to ensure an existing polling thread is finished # Before creating the new one if self.poll_thread: self.running[0] = False self.poll_thread.join() self.poll_thread = None self.running[0] = True self.poll_thread = threading.Thread(target = self.poll_sources) self.poll_thread.start() log_debug3('Leave\n')
def _get_ssh_config_path(self): paths = [] user_path = grt.root.wb.options.options['pathtosshconfig'] if grt.root.wb.options.options['pathtosshconfig'] is not None else None if user_path: paths.append(user_path) if platform.system().lower() == "windows": paths.append("%s\ssh\config" % mforms.App.get().get_user_data_folder()) paths.append("%s\ssh\ssh_config" % mforms.App.get().get_user_data_folder()) else: paths.append("~/.ssh/config") paths.append("~/.ssh/ssh_config") for path in paths: if os.path.isfile(os.path.expanduser(path)): return os.path.expanduser(path) else: log_debug3("ssh config file not found") return None
def __setattr__(self, name, value): # Verifies the value being set is a valid attribute # Also ensures the value is changing from the current value if name in self.__dict__ and \ name != '_ChangeTracker__changed' and \ name != '_ChangeTracker__ignoring' and \ name != '_ChangeTracker__notify_value_set_cb' and \ name != '_ChangeTracker__value_set_notification_cb' and \ name != '_ChangeNotifier__change_notification_cb' and \ name != '_ChangeCounterchange_count' and \ not self.__ignoring and \ self.__dict__[name] != value: log_message = "Changed %s from %s to %s at %s\n" % (name, self.__dict__[name], value, self) # If the value was already changed and the new value # reverts the change then it removes the attribute from # the changed map if name in self.__dict__["_ChangeTracker__changed"]: if self.__dict__["_ChangeTracker__changed"][name] == value: del self.__dict__["_ChangeTracker__changed"][name] # Sends message indicating a change has been undone self.notify_change(False, name, value) log_message = "Reverted change on %s to %s at %s\n" % (name, value, self) # If this is the first change to the attribute, registers the # Original value on the changed map else: self.__dict__["_ChangeTracker__changed"][name] = self.__dict__[name] # Sends message indicating a change has been done self.notify_change(True, name, value) # If configured, notifies about a value being set if self.__value_set_notification_cb: self.__value_set_notification_cb(name, value) # Logs the change log_debug3("%s\n" % log_message) # Updates the value self.__dict__[name] = value
def server_polling_thread(self): try: password = self.get_mysql_password() self.poll_connection = MySQLConnection( self.server_profile.db_connection_params, password=password) self.poll_connection.connect() except MySQLError as err: log_error("Error creating SQL connection for monitoring: %r\n" % err) self.poll_connection = None mforms.Utilities.driver_shutdown() return None log_debug("Monitoring thread running...\n") time.sleep(self.status_variable_poll_interval) try: # runs in a separate thread to fetch status variables while self.running: log_debug3("Poll server status\n") variables = {} result = self.poll_connection.executeQuery( "SHOW GLOBAL STATUS") while result and result.nextRow(): variables[result.stringByName( "Variable_name")] = result.stringByName("Value") self.status_variables, self.status_variables_time = variables, time.time( ) time.sleep(self.status_variable_poll_interval) except QueryError: log_error("Error in monitoring thread: %s\n" % traceback.format_exc()) log_debug("Monitoring thread done.\n") self.poll_connection.disconnect() self.poll_connection = None mforms.Utilities.driver_shutdown()
def event(self, name): if self.defer: self.deferred_events.append(name) return name += "_event" if name not in self.valid_events: log_error('EventManager: invalid event: ' + name + '\n') elif name in self.events: log_debug3("Found event " + name + " in list" + '\n') for obj in self.events[name]: if hasattr(obj, name): log_debug3("Passing event " + name + " to " + obj.__class__.__name__ + '\n') getattr(obj, name)() else: log_debug3("Found valid but unrequested event " + name + " in list" + '\n')
def start_import(self): if not self._last_analyze: return False if self._new_table: if not self.prepare_new_table(): return False if self._truncate_table: self.update_progress(0.0, "Truncate table") self._editor.executeManagementCommand( "TRUNCATE TABLE %s" % self._table_w_prefix, 1) result = True with open(self._filepath, 'rb') as jsonfile: data = json.load(jsonfile) dest_col_order = list( set([i['dest_col'] for i in self._mapping if i['active']])) query = """PREPARE stmt FROM 'INSERT INTO %s (%s) VALUES(%s)'""" % ( self._table_w_prefix, ",".join([ "`%s`" % col for col in dest_col_order ]), ",".join(["?" for i in dest_col_order])) col_order = dict([(i['dest_col'], i['name']) for i in self._mapping if i['active']]) col_type = dict([(i['name'], i['type']) for i in self._mapping if i['active']]) self._editor.executeManagementCommand(query, 1) try: self._max_rows = len(data) for row in data: if self._thread_event and self._thread_event.is_set(): log_debug2("Worker thread was stopped by user") self._editor.executeManagementCommand( "DEALLOCATE PREPARE stmt", 1) return False self._current_row = self._current_row + 1 for i, col in enumerate(col_order): if col_order[col] not in row: log_error("Can't find col: %s in row: %s" % (col_order[col], row)) result = False break val = row[col_order[col]] col_name = col_order[col] if col_type[col] == "geometry": val = """ ST_GeomFromGeoJSON('%s')""" % json.dumps( val).replace("\\", "\\\\").replace("'", "\\'") self._editor.executeManagementCommand( """SET @a%d = %s """ % (i, val), 0) else: if col_type[col_name] != "json" and hasattr( val, "replace"): val = val.replace("\\", "\\\\").replace("'", "\\'") if col_type[col_name] == 'double': val = val(str).replace(self._decimal_separator, '.') elif col_type[col_name] == 'datetime': val = datetime.datetime.strptime( val, self._date_format).strftime( "%Y-%m-%d %H:%M:%S") elif col_type[col_name] == "json": val = json.dumps(val).replace("\\", "\\\\").replace( "'", "\\'") if col_type[col_name] == "int": self._editor.executeManagementCommand( """SET @a%d = %d """ % (i, int(val)), 0) else: self._editor.executeManagementCommand( """SET @a%d = '%s' """ % (i, val), 0) else: try: self._editor.executeManagementCommand( "EXECUTE stmt USING %s" % ", ".join([ '@a%d' % i for i, col in enumerate(col_order) ]), 0) self.item_count = self.item_count + 1 except Exception, e: log_error("Row import failed with error: %s" % e) except Exception, e: import traceback log_debug3("Import failed traceback: %s" % traceback.format_exc()) log_error("Import failed: %s" % e) self._editor.executeManagementCommand("DEALLOCATE PREPARE stmt", 1)
def start_import(self): if not self._last_analyze: return False if self._new_table: if not self.prepare_new_table(): return False if self._truncate_table: self.update_progress(0.0, "Truncate table") self._editor.executeManagementCommand( "TRUNCATE TABLE %s" % self._table_w_prefix, 1) result = True with open(self._filepath, 'rb') as csvfile: self.update_progress(0.0, "Prepare Import") dest_col_order = list( set([i['dest_col'] for i in self._mapping if i['active']])) query = """PREPARE stmt FROM 'INSERT INTO %s (%s) VALUES(%s)'""" % ( self._table_w_prefix, ",".join([ "`%s`" % col for col in dest_col_order ]), ",".join(["?" for i in dest_col_order])) col_order = dict([(i['dest_col'], i['col_no']) for i in self._mapping if i['active']]) col_type = dict([(i['dest_col'], i['type']) for i in self._mapping if i['active']]) is_server_5_7 = self._targetVersion.is_supported_mysql_version_at_least( Version.fromstr("5.7.5")) self._editor.executeManagementCommand(query, 1) try: is_header = self.has_header reader = UniReader(csvfile, self.dialect, encoding=self._encoding) self._max_rows = os.path.getsize(self._filepath) self.update_progress(0.0, "Begin Import") for row in reader: if self._thread_event and self._thread_event.is_set(): self._editor.executeManagementCommand( "DEALLOCATE PREPARE stmt", 1) log_debug2("Worker thread was stopped by user") self.update_progress( round(self._current_row / self._max_rows, 2), "Import stopped by user request") return False self._current_row = float(csvfile.tell()) if is_header: is_header = False continue for i, col in enumerate(col_order): if col_order[col] >= len(row): log_error("Can't find col: %s in row: %s" % (col_order[col], row)) result = False break val = row[col_order[col]] col_name = col_order[col] if col_type[col] == "geometry": if is_server_5_7: val = """ST_GeomFromText("%s")""" % row[ col_name] else: val = """GeomFromText("%s")""" % row[col_name] self._editor.executeManagementCommand( """SET @a%d = %s """ % (i, val), 0) else: if col_type[col] == 'double': val = row[col_name].replace( self._decimal_separator, '.') elif col_type[col] == 'datetime': val = datetime.datetime.strptime( row[col_name], self._date_format).strftime( "%Y-%m-%d %H:%M:%S") if hasattr(val, "replace"): val = val.replace("\\", "\\\\").replace("'", "\\'") if self.options['nullwordaskeyword'][ 'value'] == "y" and val.upper() == "NULL": self._editor.executeManagementCommand( """SET @a%d = NULL """ % (i), 0) else: self._editor.executeManagementCommand( """SET @a%d = '%s' """ % (i, val), 0) else: try: self._editor.executeManagementCommand( "EXECUTE stmt USING %s" % ", ".join([ '@a%d' % i for i, col in enumerate(col_order) ]), 0) self.item_count = self.item_count + 1 self.update_progress( round(self._current_row / self._max_rows, 2), "Data import") except Exception, e: log_error("Row import failed with error: %s" % e) self.update_progress( round(self._current_row / self._max_rows, 2), "Row import failed with error: %s" % e) result = False self.update_progress(1.0, "Import finished") except Exception, e: import traceback log_debug3("Import failed traceback: %s" % traceback.format_exc()) log_error("Import failed: %s" % e)
def start_import(self): if not self._last_analyze: return False if self._new_table: if not self.prepare_new_table(): return False if self._truncate_table: self.update_progress(0.0, "Truncate table") self._editor.executeManagementCommand("TRUNCATE TABLE %s" % self._table_w_prefix, 1) result = True with open(self._filepath, 'rb') as jsonfile: data = json.load(jsonfile) dest_col_order = list(set([i['dest_col'] for i in self._mapping if i['active']])) query = """PREPARE stmt FROM 'INSERT INTO %s (%s) VALUES(%s)'""" % (self._table_w_prefix, ",".join(["`%s`" % col for col in dest_col_order]), ",".join(["?" for i in dest_col_order])) col_order = dict([(i['dest_col'], i['name']) for i in self._mapping if i['active']]) col_type = dict([(i['name'], i['type']) for i in self._mapping if i['active']]) self._editor.executeManagementCommand(query, 1) try: self._max_rows = len(data) for row in data: if self._thread_event and self._thread_event.is_set(): log_debug2("Worker thread was stopped by user") self._editor.executeManagementCommand("DEALLOCATE PREPARE stmt", 1) return False self._current_row = self._current_row + 1 for i, col in enumerate(col_order): if col_order[col] not in row: log_error("Can't find col: %s in row: %s" % (col_order[col], row)) result = False break val = row[col_order[col]] col_name = col_order[col] if col_type[col] == "geometry": val = """ ST_GeomFromGeoJSON('%s')""" % json.dumps(val).replace("\\", "\\\\").replace("'", "\\'") self._editor.executeManagementCommand("""SET @a%d = %s """ % (i, val), 0) else: if col_type[col_name] != "json" and hasattr(val, "replace"): val = val.replace("\\", "\\\\").replace("'", "\\'") if col_type[col_name] == 'double': val = val(str).replace(self._decimal_separator, '.') elif col_type[col_name] == 'datetime': val = datetime.datetime.strptime(val, self._date_format).strftime("%Y-%m-%d %H:%M:%S") elif col_type[col_name] == "json": val = json.dumps(val).replace("\\", "\\\\").replace("'", "\\'") if col_type[col_name] == "int": self._editor.executeManagementCommand("""SET @a%d = %d """ % (i, int(val)), 0) else: self._editor.executeManagementCommand("""SET @a%d = '%s' """ % (i, val), 0) else: try: self._editor.executeManagementCommand("EXECUTE stmt USING %s" % ", ".join(['@a%d' % i for i, col in enumerate(col_order)]), 0) self.item_count = self.item_count + 1 except Exception, e: log_error("Row import failed with error: %s" % e) except Exception, e: import traceback log_debug3("Import failed traceback: %s" % traceback.format_exc()) log_error("Import failed: %s" % e) self._editor.executeManagementCommand("DEALLOCATE PREPARE stmt", 1)
def start_import(self): if not self._last_analyze: return False if self._new_table: if not self.prepare_new_table(): return False if self._truncate_table: self.update_progress(0.0, "Truncate table") self._editor.executeManagementCommand("TRUNCATE TABLE %s" % self._table_w_prefix, 1) result = True with open(self._filepath, 'rb') as csvfile: self.update_progress(0.0, "Prepare Import") dest_col_order = list(set([i['dest_col'] for i in self._mapping if i['active']])) query = """PREPARE stmt FROM 'INSERT INTO %s (%s) VALUES(%s)'""" % (self._table_w_prefix, ",".join(["`%s`" % col for col in dest_col_order]), ",".join(["?" for i in dest_col_order])) col_order = dict([(i['dest_col'], i['col_no']) for i in self._mapping if i['active']]) col_type = dict([(i['dest_col'], i['type']) for i in self._mapping if i['active']]) is_server_5_7 = self._targetVersion.is_supported_mysql_version_at_least(Version.fromstr("5.7.5")) self._editor.executeManagementCommand(query, 1) try: is_header = self.has_header reader = UniReader(csvfile, self.dialect, encoding=self._encoding) self._max_rows = os.path.getsize(self._filepath) self.update_progress(0.0, "Begin Import") for row in reader: if self._thread_event and self._thread_event.is_set(): self._editor.executeManagementCommand("DEALLOCATE PREPARE stmt", 1) log_debug2("Worker thread was stopped by user") self.update_progress(round(self._current_row / self._max_rows, 2), "Import stopped by user request") return False self._current_row = float(csvfile.tell()) if is_header: is_header = False continue for i, col in enumerate(col_order): if col_order[col] >= len(row): log_error("Can't find col: %s in row: %s" % (col_order[col], row)) result = False break val = row[col_order[col]] col_name = col_order[col] if col_type[col] == "geometry": if is_server_5_7: val = """ST_GeomFromText("%s")""" % row[col_name] else: val = """GeomFromText("%s")""" % row[col_name] self._editor.executeManagementCommand("""SET @a%d = %s """ % (i, val), 0) else: if col_type[col] == 'double': val = row[col_name].replace(self._decimal_separator, '.') elif col_type[col] == 'datetime': val = datetime.datetime.strptime(row[col_name], self._date_format).strftime("%Y-%m-%d %H:%M:%S") if hasattr(val, "replace"): val = val.replace("\\", "\\\\").replace("'", "\\'") self._editor.executeManagementCommand("""SET @a%d = '%s' """ % (i, val), 0) else: try: self._editor.executeManagementCommand("EXECUTE stmt USING %s" % ", ".join(['@a%d' % i for i, col in enumerate(col_order)]), 0) self.item_count = self.item_count + 1 self.update_progress(round(self._current_row / self._max_rows, 2), "Data import") except Exception, e: log_error("Row import failed with error: %s" % e) self.update_progress(round(self._current_row / self._max_rows, 2), "Row import failed with error: %s" % e) result = False self.update_progress(1.0, "Import finished") except Exception, e: import traceback log_debug3("Import failed traceback: %s" % traceback.format_exc()) log_error("Import failed: %s" % e)
def create_info_sections(self): info = self.ctrl_be.server_variables status = self.ctrl_be.status_variables plugins = dict(self.ctrl_be.server_active_plugins) # plugin -> type repl = {} disk_space = "checking..." def tristate(value, true_value=None): if true_value is not None and value == true_value: return True if value == "OFF" or value == "NO": return False elif value and true_value is None: return True return None semi_sync_master = tristate(info.get("rpl_semi_sync_master_enabled")) semi_sync_slave = tristate(info.get("rpl_semi_sync_slave_enabled")) semi_sync_status = ( semi_sync_master or semi_sync_slave, "(%s)" % ", ".join([ x for x in [semi_sync_master and "master", semi_sync_slave and "slave"] if x ])) memcached_status = True if plugins.has_key( 'daemon_memcached') else None if not repl: if semi_sync_master: semi_sync_master = False if semi_sync_slave: semi_sync_slave = False # the params to be passed to the lambdas params = (info, plugins, status) self.add_info_section_2( "Available Server Features", [("Performance Schema", lambda info, plugins, status: tristate( info.get("performance_schema"))), ("Thread Pool", lambda info, plugins, status: tristate( info.get("thread_handling"), "loaded-dynamically")), ("Memcached Plugin", lambda info, plugins, status: memcached_status), ("Semisync Replication Plugin", lambda info, plugins, status: semi_sync_status), ("SSL Availability", lambda info, plugins, status: info .get("have_openssl") == "YES" or info.get("have_ssl") == "YES"), ("Windows Authentication", lambda info, plugins, status: plugins .has_key("authentication_windows")) if self.server_profile.target_is_windows else ("PAM Authentication", lambda info, plugins, status: plugins .has_key("authentication_pam")), ("Password Validation", lambda info, plugins, status: (tristate(info.get("validate_password_policy")), "(Policy: %s)" % info.get("validate_password_policy"))), ("Audit Log", lambda info, plugins, status: (tristate(info.get("audit_log_policy")), "(Log Policy: %s)" % info.get("audit_log_policy"))), ("Firewall", lambda info, plugins, status: tristate( info.get("mysql_firewall_mode"))), ("Firewall Trace", lambda info, plugins, status: tristate( info.get("mysql_firewall_trace")))], params) log_output = info.get("log_output", "FILE") self.add_info_section( "Server Directories", [("Base Directory", lambda info, plugins, status: info.get("basedir")), ("Data Directory", lambda info, plugins, status: info.get("datadir")), ("Disk Space in Data Dir", disk_space), ("InnoDB Data Directory", lambda info, plugins, status: info.get("innodb_data_home_dir")) if info.get("innodb_data_home_dir") else None, ("Plugins Directory", lambda info, plugins, status: info.get("plugin_dir")), ("Tmp Directory", lambda info, plugins, status: info.get("tmpdir")), ("Error Log", lambda info, plugins, status: (info.get("log_error") and info.get("log_error") != "OFF", info.get("log_error"))), ("General Log", lambda info, plugins, status: (info.get("general_log") != "OFF" and log_output != "NONE", info.get("general_log_file") if "FILE" in log_output else "[Stored in database]")), ("Slow Query Log", lambda info, plugins, status: (info.get("slow_query_log") != "OFF" and log_output != "NONE", info.get("slow_query_log_file") if "FILE" in log_output else "[Stored in database]"))], params) self.add_info_section("Replication Slave", [("", "checking...")], params) self.add_info_section( "Authentication", [("SHA256 Password Private Key", lambda info, plugins, status: info .get("sha256_password_private_key_path")), ("SHA256 Password Public Key", lambda info, plugins, status: info. get("sha256_password_public_key_path"))], params) self.add_info_section( "SSL", [("SSL CA", lambda info, plugins, status: info.get("ssl_ca") or "n/a"), ("SSL CA Path", lambda info, plugins, status: info.get("ssl_capath") or "n/a"), ("SSL Cert", lambda info, plugins, status: info.get("ssl_cert") or "n/a"), ("SSL Cipher", lambda info, plugins, status: info.get("ssl_cipher") or "n/a"), ("SSL CRL", lambda info, plugins, status: info.get("ssl_crl") or "n/a"), ("SSL CRL Path", lambda info, plugins, status: info.get("ssl_crlpath") or "n/a"), ("SSL Key", lambda info, plugins, status: info.get("ssl_key") or "n/a")], params) log_debug3("mysql_firewall_trace: %s\n" % info.get("mysql_firewall_trace")) log_debug3("Firewall_access_denied: %s\n" % status.get("Firewall_access_denied")) log_debug3("Firewall_access_granted: %s\n" % status.get("Firewall_access_granted")) log_debug3("Firewall_cached_entries: %s\n" % status.get("Firewall_cached_entries")) if info.get("mysql_firewall_mode") == "ON": self.add_info_section("Firewall", [ ("Access Denied", lambda info, plugins, status: str( status.get("Firewall_access_denied")) or "n/a"), ("Access Granted", lambda info, plugins, status: str( status.get("Firewall_access_granted")) or "n/a"), ("Access Suspicious", lambda info, plugins, status: str( status.get("Firewall_access_suspicious")) or "n/a"), ("Cached Entries", lambda info, plugins, status: str( status.get("Firewall_cached_entries")) or "n/a") ], params)
def server_stopped_event(self): log_debug3('Enter\n') self.running[0] = False self.poll_thread = None log_debug3('Leave\n')
def create_info_sections(self): info = self.ctrl_be.server_variables status = self.ctrl_be.status_variables plugins = dict(self.ctrl_be.server_active_plugins) # plugin -> type repl = {} disk_space = "checking..." def tristate(value, true_value = None): if true_value is not None and value == true_value: return True if value == "OFF" or value == "NO": return False elif value and true_value is None: return True return None semi_sync_master = tristate(info.get("rpl_semi_sync_master_enabled")) semi_sync_slave = tristate(info.get("rpl_semi_sync_slave_enabled")) semi_sync_status = (semi_sync_master or semi_sync_slave, "(%s)"% ", ".join([x for x in [semi_sync_master and "master", semi_sync_slave and "slave"] if x])) memcached_status = True if plugins.has_key('daemon_memcached') else None if not repl: if semi_sync_master: semi_sync_master = False if semi_sync_slave: semi_sync_slave = False # the params to be passed to the lambdas params = (info, plugins, status) self.add_info_section_2("Available Server Features", [("Performance Schema:", lambda info, plugins, status: tristate(info.get("performance_schema"))), ("Thread Pool:", lambda info, plugins, status: tristate(info.get("thread_handling"), "loaded-dynamically")), ("Memcached Plugin:", lambda info, plugins, status: memcached_status), ("Semisync Replication Plugin:", lambda info, plugins, status: semi_sync_status), ("SSL Availability:", lambda info, plugins, status: info.get("have_openssl") == "YES" or info.get("have_ssl") == "YES"), ("Windows Authentication:", lambda info, plugins, status: plugins.has_key("authentication_windows")) if self.server_profile.target_is_windows else ("PAM Authentication:", lambda info, plugins, status: plugins.has_key("authentication_pam")), ("Password Validation:", lambda info, plugins, status: (tristate(info.get("validate_password_policy")), "(Policy: %s)" % info.get("validate_password_policy"))), ("Audit Log:", lambda info, plugins, status: (tristate(info.get("audit_log_policy")), "(Log Policy: %s)" % info.get("audit_log_policy"))), ("Firewall:", lambda info, plugins, status: tristate(info.get("mysql_firewall_mode"))), ("Firewall Trace:", lambda info, plugins, status: tristate(info.get("mysql_firewall_trace")))], params) log_output = info.get("log_output", "FILE") self.add_info_section("Server Directories", [("Base Directory:", lambda info, plugins, status: info.get("basedir")), ("Data Directory:", lambda info, plugins, status: info.get("datadir")), ("Disk Space in Data Dir:", disk_space), ("InnoDB Data Directory:", lambda info, plugins, status: info.get("innodb_data_home_dir")) if info.get("innodb_data_home_dir") else None, ("Plugins Directory:", lambda info, plugins, status: info.get("plugin_dir")), ("Tmp Directory:", lambda info, plugins, status: info.get("tmpdir")), ("Error Log:", lambda info, plugins, status: (info.get("log_error") and info.get("log_error")!="OFF", info.get("log_error"))), ("General Log:", lambda info, plugins, status: (info.get("general_log")!="OFF" and log_output != "NONE", info.get("general_log_file") if "FILE" in log_output else "[Stored in database]")), ("Slow Query Log:", lambda info, plugins, status: (info.get("slow_query_log")!="OFF" and log_output != "NONE", info.get("slow_query_log_file") if "FILE" in log_output else "[Stored in database]"))], params) self.add_info_section("Replication Slave", [("", "checking...")], params) self.add_info_section("Authentication", [("SHA256 password private key:", lambda info, plugins, status: info.get("sha256_password_private_key_path")), ("SHA256 password public key:", lambda info, plugins, status: info.get("sha256_password_public_key_path"))], params) self.add_info_section("SSL", [("SSL CA:", lambda info, plugins, status: info.get("ssl_ca") or "n/a"), ("SSL CA path:", lambda info, plugins, status: info.get("ssl_capath") or "n/a"), ("SSL Cert:", lambda info, plugins, status: info.get("ssl_cert") or "n/a"), ("SSL Cipher:", lambda info, plugins, status: info.get("ssl_cipher") or "n/a"), ("SSL CRL:", lambda info, plugins, status: info.get("ssl_crl") or "n/a"), ("SSL CRL path:", lambda info, plugins, status: info.get("ssl_crlpath") or "n/a"), ("SSL Key:", lambda info, plugins, status: info.get("ssl_key") or "n/a")], params) log_debug3("mysql_firewall_trace: %s\n" % info.get("mysql_firewall_trace")) log_debug3("Firewall_access_denied: %s\n" % status.get("Firewall_access_denied")) log_debug3("Firewall_access_granted: %s\n" % status.get("Firewall_access_granted")) log_debug3("Firewall_cached_entries: %s\n" % status.get("Firewall_cached_entries")) if info.get("mysql_firewall_mode") == "ON": self.add_info_section("Firewall", [("Access denied:", lambda info, plugins, status: str(status.get("Firewall_access_denied")) or "n/a"), ("Access granted:", lambda info, plugins, status: str(status.get("Firewall_access_granted")) or "n/a"), ("Access suspicious:", lambda info, plugins, status: str(status.get("Firewall_access_suspicious")) or "n/a"), ("Cached entries:", lambda info, plugins, status: str(status.get("Firewall_cached_entries")) or "n/a")], params)