def display_mysql_myisam_results(mysql_info): print_header("MyISAM", 2) print_stat("Key Buffer") print_stat("Size", format_bytes(mysql_info.vars.key_buffer_size), 2) print_stat("Used", "%s (%s)" % \ (format_bytes(mysql_info.key_buffer_used), format_percent(mysql_info.key_buffer_used_pct)), 2)
def menu(**kwargs): # Clear terminal os.system('clear') # Show header print_header() # Show main menu print_menu() # - Add master game # - Add sub game # -- Show current master games # -- Input game to add to # -- Call add sub game function # - View current games # - View Stats # Input selection while True: menu_select = input_validator("Enter your selection [1-7]", int) if menu_select == 1: create_game() elif menu_select == 2: getid = current_games(return_id=1) add_subgame(id=getid) elif menu_select == 3: current_games() elif menu_select == 4: finished_games() elif menu_select == 5: view_stats() elif menu_select == 6: print_help() elif menu_select == 7: print("Goodbye!") exit() elif menu_select > 7: print("Invalid input, enter [1-7]!")
def display_mysql_global_results(mysql_info): print_header('Global Information', 2) print_stat('Server ID', int(mysql_info.vars.server_id)) print_stat('MySQL Architecture', mysql_info.vars.version_compile_machine, 1) print_stat('MySQL Version', mysql_info.vars.version, 1) print_stat('Data Directory', mysql_info.vars.datadir) print_stat("Uptime", format_interval(mysql_info.status.uptime)) print_stat('Max Allowed Packet', format_bytes(mysql_info.vars.max_allowed_packet)) print_stat("Connections", "%s of %s" % \ (int(mysql_info.status.max_used_connections), int(mysql_info.vars.max_connections))) print_stat("Disk Based Temp Tables", "%s of %s (%s)" % \ (int(mysql_info.status.created_tmp_disk_tables), int(mysql_info.status.created_tmp_tables), format_percent(mysql_info.tmp_tables_disk_pct))) print_stat("Sort Merge Passes", int(mysql_info.status.sort_merge_passes)) print_stat("Non-Indexed Joins", int(mysql_info.status.select_full_join)) print_stat("Open Files", '%s (limit %s)' % \ (int(mysql_info.status.open_files), int(mysql_info.vars.open_files_limit))) print_stat("Open Tables", int(mysql_info.status.open_tables)) if mysql_info.vars.query_cache_size == 0: print_stat("Query Cache", "Disabled") else: print_stat("Query Cache"), print_stat("Size", format_bytes(mysql_info.vars.query_cache_size), 2) print_stat("Hit Rate", format_percent(mysql_info.query_cache_hitrate), 2) print_stat("Table Lock Waits", "%s of %s (%s)" % \ (int(mysql_info.status.table_locks_waited), int(mysql_info.table_locks_total), format_percent(mysql_info.table_lock_wait_pct))) print_stat("Estimated Table Scans", format_percent(mysql_info.table_scans_pct)) print_stat("Slow Queries") print_stat("Queries", "%s of %s (%s)" % \ (int(mysql_info.status.slow_queries), int(mysql_info.status.com_select), format_percent(mysql_info.slow_query_pct)), 2) print_stat("Long Query Time", format_interval(mysql_info.vars.long_query_time), 2) print_stat("Log Non-Indexed Queries", mysql_info.vars.log_queries_not_using_indexes, 2) print_stat('Binary Log', '') print_stat('Binary Logging', mysql_info.vars.log_bin, 2) try: print_stat('Binlog Format', mysql_info.vars.binlog_format, 2) except KeyError: print_stat('Binlog Format', 'Not-Detected / Pre 5.1', 2) print_stat("Read Frequency", format_percent(mysql_info.read_pct))
def check_health(mysql): global opts if opts.max_replication_delay: max_replication_delay = opts.max_replication_delay else: max_replication_delay = 300 errors = "" print_header("Health Checks") mysql_info = mysql.mysql_info print "" if mysql_info.queries['long_running_queries'] > 0: errors += "One or more long running queries detected\n" if mysql_info.slave_status: if mysql_info.slave_status.slave_io_running != 'Yes': errors += "Slave IO Thread Not Running\n" if mysql_info.slave_status.slave_sql_running != 'Yes': errors += "Slave SQL Thread Not Running\n" if mysql_info.slave_status.seconds_behind_master > max_replication_delay: errors += "Slave Lagging Too Far Behind\n" if mysql_info.slave_status.last_error: errors += "Slave Error Reported\n" # If wsrep variable does not exist, we can assume it is not # a Galera server, so we skip the checks try: if mysql_info.vars.wsrep_provider: if mysql_info.status.wsrep_cluster_size < 3: errors += "Galera Node Missing\n" if not mysql_info.status.wsrep_ready: errors += "Galera Cluster Not Ready\n" except: pass # Same thing as above - if, somehow InnoDB is not enabled # we don't need to check parameters for it. There could # be cases where you'd want to error if InnoDB is unavailable # but using 'innodb = FORCE' within MySQL is a better solution. try: if mysql_info.vars.innodb_version: if mysql_info.innodb_buffer_pool_hit_rate < 95: errors += "InnoDB Buffer Pool Hit Rate Under 95%\n" if (mysql_info.status.threads_connected / mysql_info.vars.max_connections * 100) > 75: errors += "Open Connections Above 75% of Max\n" except: pass if errors != "": print errors sys.exit(1) else: print "Everything is all good!" sys.exit(0)
def display_innodb_transactions(mysql): print "" print_header("InnoDB Transactions") try: for i, txn in enumerate(mysql.mysql_info.ibstatus.transactions): print "TRANSACTION(%d)" % i print txn except ValueError: print "Unable To Parse SHOW ENGINE INNODB STATUS"
def display_schema_info(mysql): print "" print_header("Schema Information") print "" print_header("Engine Breakdown", 2) print '%-8s : %8s : %12s : %12s' % \ ('Engine', '# Tables', 'Data Length', 'Index Length') for row in mysql.schema_engine_summary: print '%-8s : %8s : %12s : %12s' % \ (row['Engine'], row['Count'], format_bytes(row['Data Length']), format_bytes(row['Index Length'])) print "" print_header('%s Largest Databases' % limit, 2) print '%-32s : %12s : %12s' % \ ('Database', 'Data Length', 'Index Length') for row in mysql.schema_largest_dbs(limit): print '%-32s : %12s : %12s' % \ (row['Database'], format_bytes(row['Data Length']), format_bytes(row['Index Length'])) print "" print_header('%s Largest Tables' % limit, 2) print '%-32s : %12s : %12s' % \ ('Table', 'Data Length', 'Index Length') for row in mysql.schema_largest_tables(limit): print '%-32s : %12s : %12s' % \ (row['Table'], format_bytes(row['Data Length']), format_bytes(row['Index Length']))
def display_mysql_results(mysql): print "" mysql_info = mysql.mysql_info print "" print_header('MySQL Information', 1) print "" display_mysql_global_results(mysql_info) print "" display_mysql_thread_results(mysql_info) print "" display_mysql_myisam_results(mysql_info) print "" display_mysql_innodb_results(mysql_info) print "" display_slave_info(mysql_info)
def display_mysql_thread_results(mysql_info): print_header("Threads", 2) print_stat("Buffers") print ' %-9s : %-9s : %-9s : %-9s' % \ ('Read', 'Read RND', 'Sort', 'Join') print ' %-9s : %-9s : %-9s : %-9s' % \ (format_bytes(mysql_info.vars.read_buffer_size), format_bytes(mysql_info.vars.read_rnd_buffer_size), format_bytes(mysql_info.vars.sort_buffer_size), format_bytes(mysql_info.vars.join_buffer_size)) print_stat("Threads") print ' %-9s : %-9s : %-9s : %-9s' % \ ('Size', 'Cached', 'Running', 'Created') print ' %-9s : %-9s : %-9s : %-9s' % \ (int(mysql_info.vars.thread_cache_size), int(mysql_info.status.threads_cached), int(mysql_info.status.threads_running), int(mysql_info.status.threads_created))
def display_system_info(mysql): system_info = dict() print "Gathering system information...", system_info['architecture'] = platform.machine() if platform.system() == 'Linux': system_info['totalMemory'] = format_bytes(round(os.sysconf('SC_PHYS_PAGES') * \ os.sysconf('SC_PAGE_SIZE')), 0) system_info['freeMemory'] = format_bytes(os.sysconf('SC_AVPHYS_PAGES') * \ os.sysconf('SC_PAGE_SIZE')) system_info['cpuCores'] = os.sysconf('SC_NPROCESSORS_CONF') else: system_info['totalMemory'] = 'Unknown' system_info['freeMemory'] = 'Unknown' system_info['cpuCores'] = 'Unknown' print "done!\n" print_header('Local System Information', 1) print_stat('CPU Cores', system_info['cpuCores'], 1) print_stat('Total Memory', system_info['totalMemory'], 1) print_stat('System Architecture', system_info['architecture'], 1)
def display_mysql_innodb_results(mysql_info): print_header("InnoDB", 2) if mysql_info.vars.innodb_version == ('DISABLED' or False): print "Disabled" else: try: print_stat('Version', mysql_info.vars.innodb_version) except KeyError: print_stat('Version', 'Default') print_stat('Paths','') print_stat('InnoDB Home Directory', mysql_info.vars.innodb_data_home_dir, 2) print_stat("InnoDB Log Directory", mysql_info.vars.innodb_log_group_home_dir, 2) print_stat("InnoDB Data File Path", mysql_info.vars.innodb_data_file_path, 2) print_stat('Buffer Pool', '') print_stat("Usage", "%s of %s (%s)" % \ (format_bytes(mysql_info.innodb_buffer_pool_used), format_bytes(mysql_info.vars.innodb_buffer_pool_size), format_percent(mysql_info.innodb_buffer_pool_used_pct)), 2) print_stat("Hit Rate", format_percent(mysql_info.innodb_buffer_pool_hit_rate), 2) print_stat('History List', mysql_info.ibstatus.history_list_length) print_stat("File Per Table", mysql_info.vars.innodb_file_per_table) if mysql_info.vars.innodb_file_per_table: print_stat("InnoDB Open Files", int(mysql_info.vars.innodb_open_files), 2) print_stat("Flush Log At Commit", int(mysql_info.vars.innodb_flush_log_at_trx_commit)) print_stat("Flush Method", mysql_info.innodb_flush_method) print_stat("Thread Concurrency", int(mysql_info.vars.innodb_thread_concurrency)) print_stat("Log File Size", "%s x %s logs (%s total)" % \ (format_bytes(mysql_info.vars.innodb_log_file_size), int(mysql_info.vars.innodb_log_files_in_group), format_bytes(mysql_info.innodb_log_file_size_total)))
def display_slave_info(mysql_info): print_header('Replication', 2) if mysql_info.slave_status is None: print "Not Enabled" return print_stat('Master', mysql_info.slave_status.master_host) print_stat('Logs', '') print_stat('Spooled Master Log File', '%s (pos: %s)' % \ (mysql_info.slave_status.master_log_file, mysql_info.slave_status.read_master_log_pos), 2) print_stat('Executed Master Log File', '%s (pos: %s)' % \ (mysql_info.slave_status.relay_master_log_file, mysql_info.slave_status.exec_master_log_pos), 2) print_stat('Relay Log File', '%s (pos: %s)' % \ (mysql_info.slave_status.relay_log_file, mysql_info.slave_status.relay_log_pos), 2) # Using a long-style if for Python 2.4 compatibility #print_stat('Relay Log Space Limit', #(format_bytes(mysql_info.vars.relay_log_space_limit)) # if mysql_info.vars.relay_log_space_limit != 0 else 'Unlimited') if mysql_info.vars.relay_log_space_limit != 0: print_stat('Relay Log Space Limit', format_bytes(mysql_info.vars.relay_log_space_limit)) else: print_stat('Relay Log Space Limit', 'Unlimited') print_stat('IO Thread Running', mysql_info.slave_status.slave_io_running) print_stat('SQL Thread Running', mysql_info.slave_status.slave_sql_running) print_stat('Seconds Behind Master', mysql_info.slave_status.seconds_behind_master) print_stat('Last Error', mysql_info.slave_status.last_error)
Uses Keras and a simple cost function based off of angle """ # Include files import gym import numpy as np from random import uniform from keras.models import Sequential from keras.layers import Dense # Include custom files import functions as func import Configuration as cfg import StateModel as sm func.print_header() # Create the CartPole environment for the physics model if cfg.useOpenAImodel: env = gym.make('CartPole-v0') print("Defining Model") # create model model = Sequential() model.add( Dense(100, input_dim=cfg.action_input_size, init='uniform', activation='relu')) model.add(Dense(1, init='uniform', activation='sigmoid'))
import csv import glob import os from datetime import datetime from tkinter import Tk from tkinter.filedialog import askopenfilename import pandas as pd from mailmerge import MailMerge from functions import close_app, print_header print_header() root = Tk() root.withdraw() contact_csv = askopenfilename(initialdir="self.__base_filepath", title="Select Contacts CSV", filetypes=( ("CSV Files", "*.csv"), ("All Files", "*"))) print("You have selected", contact_csv) deal_csv = askopenfilename(initialdir="self.__base_filepath", title="Select Deals CSV", filetypes=( ("CSV Files", "*.csv"), ("All Files", "*"))) print("YOu have selected", deal_csv) a = pd.read_csv(contact_csv) b = pd.read_csv(deal_csv) print("Selecting Files...") save_dir = 'merge_output/' # If merge_output exists it deletes all files inside it, otherwise creates merge_output
def main(): # Initialization print("\n\n") print(". . . EMAIL BOT STARTED. PRESS CTRL-C TO QUIT . . .") print("\n\n") # Logging Setup logger = functions.logging_setup("water_station_logger") logger.critical("Initiating Email Bot") # Water Station Instances Initialization water_stations_list = [] for station_info in global_variables.STATION_INFO: if station_info[1] == '1': # Phase One Stations water_stations_list.append(classes.PhaseOneStation(station_info)) elif station_info[1] == '2': # Phase Two Stations water_stations_list.append(classes.PhaseTwoStation(station_info)) #--------------------------------------------------------------------------------------------------- # Obtaining the latest email information imap_client = functions.imap_client_setup() functions.print_header("DETERMING LATEST EMAIL") logger.info( "############################## DETERMING LATEST EMAIL ###############################" ) for water_station in water_stations_list: # Email Fetching and Parsing water_station.email_to_upload_data(imap_client) logger.info("Station {0} Email Data: {1}".format( water_station.number_label, water_station.data)) functions.create_upload_and_save_files(water_stations_list, logger) imap_client.logout() #-------------------------------------------------------------------------------------------------- # Monitoring for new emails functions.print_header("MONITORING UPCOMING EMAILS") logger.info( "############################### MONITORING UPCOMING EMAILS ############################" ) while True: print("CHECKING FOR NEW EMAIL AT " + str(datetime.datetime.now())) imap_client = functions.imap_client_setup() new_email_flag = False for water_station in water_stations_list: if water_station.new_email(imap_client) is True: new_email_flag = True print("NEW EMAIL DETECTED") logger.info("New email detected: Station {}".format( water_station.number_label)) water_station.remove_new_email_from_inbox(imap_client) # Email Fetching and Parsing water_station.email_to_upload_data(imap_client) logger.info("Station {0} Email Data: {1}".format( water_station.number_label, water_station.data)) if new_email_flag == True: functions.create_upload_and_save_files(water_stations_list, logger) print("SLEEPING") imap_client.logout() time.sleep(global_variables.MONITORING_DELAY)