def get5and20RevisionRanges(run_id, bzr_branch, test_data, dsn_string): """ Return a tuple with 2 ranges of run_id values for the last 5 and 20 runs Ported from drizzle-automation TODO: Further refactor / eliminate this """ query = """ SELECT run_id FROM bench_config c NATURAL JOIN bench_runs r WHERE c.name = '%s' AND r.server = '%s' AND r.version LIKE '%s%%' AND r.run_id <= %d ORDER BY run_id DESC LIMIT 20 """ % ( test_data['config_name'] , test_data['test_machine'] , bzr_branch , run_id ) retcode, results = execute_query(query, dsn_string=dsn_string) results_data = [] for result in results: cur_run_id= int(result[0]) results_data.append(str(cur_run_id)) last_5_revs = results_data[0:5] last_20_revs = results_data[0:20] return (last_5_revs, last_20_revs)
def getRegressionData(run_id, id_range, dsn_string): query= """ SELECT i.concurrency , ROUND(AVG(i.tps), 2) AS tps , IF (AVG(i.tps) >= agg.avg_tps , CONCAT('+', ROUND(((AVG(i.tps) - agg.avg_tps) / agg.avg_tps) * 100, 2), '%%') , CONCAT('-', ROUND(((agg.avg_tps - AVG(i.tps)) / agg.avg_tps) * 100, 2), '%%') ) as pct_diff_from_avg , ROUND((AVG(i.tps) - agg.avg_tps), 2) as diff_from_avg , ROUND(agg.min_tps, 2) AS min_tps , ROUND(agg.max_tps, 2) AS max_tps , ROUND(agg.avg_tps, 2) AS avg_tps , FORMAT(ROUND(agg.stddev_tps, 2),2) AS stddev_tps FROM bench_config c NATURAL JOIN bench_runs r NATURAL JOIN sysbench_run_iterations i INNER JOIN ( SELECT concurrency , MIN(tps) as min_tps , MAX(tps) as max_tps , AVG(tps) as avg_tps , STDDEV(tps) as stddev_tps FROM sysbench_run_iterations iter WHERE run_id IN (%s) GROUP BY concurrency ) AS agg ON i.concurrency = agg.concurrency WHERE r.run_id = %d GROUP BY i.concurrency ORDER BY i.concurrency """ %( ",".join(id_range) , run_id) retcode, result= execute_query(query, dsn_string=dsn_string) return result
def getRegressionData(run_id, id_range, dsn_string): query = """ SELECT i.concurrency , ROUND(AVG(i.tps), 2) AS tps , IF (AVG(i.tps) >= agg.avg_tps , CONCAT('+', ROUND(((AVG(i.tps) - agg.avg_tps) / agg.avg_tps) * 100, 2), '%%') , CONCAT('-', ROUND(((agg.avg_tps - AVG(i.tps)) / agg.avg_tps) * 100, 2), '%%') ) as pct_diff_from_avg , ROUND((AVG(i.tps) - agg.avg_tps), 2) as diff_from_avg , ROUND(agg.min_tps, 2) AS min_tps , ROUND(agg.max_tps, 2) AS max_tps , ROUND(agg.avg_tps, 2) AS avg_tps , FORMAT(ROUND(agg.stddev_tps, 2),2) AS stddev_tps FROM bench_config c NATURAL JOIN bench_runs r NATURAL JOIN sysbench_run_iterations i INNER JOIN ( SELECT concurrency , MIN(tps) as min_tps , MAX(tps) as max_tps , AVG(tps) as avg_tps , STDDEV(tps) as stddev_tps FROM sysbench_run_iterations iter WHERE run_id IN (%s) GROUP BY concurrency ) AS agg ON i.concurrency = agg.concurrency WHERE r.run_id = %d GROUP BY i.concurrency ORDER BY i.concurrency """ % (",".join(id_range), run_id) retcode, result = execute_query(query, dsn_string=dsn_string) return result
def log_sysbench_iteration(run_id, concurrency, iteration_data, dsn_string): # TODO: make sure we properly capture full commentary full_commentary = None # Write results to the DB query = """INSERT INTO sysbench_run_iterations ( run_id , concurrency , iteration , tps , read_write_req_per_second , deadlocks_per_second , min_req_latency_ms , max_req_latency_ms , avg_req_latency_ms , 95p_req_latency_ms ) VALUES (%d, %d, %d, %0.2f, %0.2f, %0.2f, %0.2f, %0.2f, %0.2f, %0.2f) """ % ( int(run_id) , int(concurrency) , int(iteration_data['iteration']) , iteration_data['tps'] , iteration_data['rwreqps'] , iteration_data['deadlocksps'] , iteration_data['min_req_lat_ms'] , iteration_data['max_req_lat_ms'] , iteration_data['avg_req_lat_ms'] , iteration_data['95p_req_lat_ms'] ) retcode, result= execute_query(query, dsn_string=dsn_string) return result
def get5and20RevisionRanges(run_id, bzr_branch, test_data, dsn_string): """ Return a tuple with 2 ranges of run_id values for the last 5 and 20 runs Ported from drizzle-automation TODO: Further refactor / eliminate this """ query = """ SELECT run_id FROM bench_config c NATURAL JOIN bench_runs r WHERE c.name = '%s' AND r.server = '%s' AND r.version LIKE '%s%%' AND r.run_id <= %d ORDER BY run_id DESC LIMIT 20 """ % (test_data['config_name'], test_data['test_machine'], bzr_branch, run_id) retcode, results = execute_query(query, dsn_string=dsn_string) results_data = [] for result in results: cur_run_id = int(result[0]) results_data.append(str(cur_run_id)) last_5_revs = results_data[0:5] last_20_revs = results_data[0:20] return (last_5_revs, last_20_revs)
def slave_ready(self): """ Return True / False based on wsrep_ready status variable """ query = "SHOW STATUS LIKE 'wsrep_ready'" retcode, result = execute_query(query, self) if result == (('wsrep_ready', 'ON'), ): return True return False
def slave_stop(self): """ We issue STOP SLAVE and wait for IO and SQL threads to stop """ query = "STOP SLAVE" retcode, result = execute_query(query, self) slave_status = self.get_slave_status() while slave_status['slave_io_running'] == 'Yes' or slave_status['slave_sql_running'] == 'Yes': slave_status = self.get_slave_status()
def slave_ready(self): """ Return True / False based on wsrep_ready status variable """ query = "SHOW STATUS LIKE 'wsrep_ready'" retcode, result = execute_query(query, self) if result == (('wsrep_ready', 'ON'),): return True return False
def slave_stop(self): """ We issue STOP SLAVE and wait for IO and SQL threads to stop """ query = "STOP SLAVE" retcode, result = execute_query(query, self) slave_status = self.get_slave_status() while slave_status['slave_io_running'] == 'Yes' or slave_status[ 'slave_sql_running'] == 'Yes': slave_status = self.get_slave_status()
def getConfigId(dsn_string, test_data): """Returns the integer ID of the configuration name used in this run.""" # If we have not already done so, we query the local DB for the ID # matching this sqlbench config name. If none is there, we insert # a new record in the bench_config table and return the newly generated # identifier. benchmark_name = test_data['config_name'] query = "SELECT config_id FROM bench_config WHERE name = '%s'" % benchmark_name retcode, result = execute_query(query, dsn_string=dsn_string) if len(result) == 0: # Insert a new record for this config and return the new ID... query = "INSERT INTO bench_config (config_id, name) VALUES (NULL, '%s')" % benchmark_name retcode, result = execute_query(query, dsn_string=dsn_string) return getConfigId(dsn_string, test_data) else: config_id = int(result[0][0]) return config_id
def slave_start(self): """ We issue START SLAVE and wait for IO and SQL threads to start """ query = "START SLAVE" decrement = .5 retcode, result = execute_query(query, self) slave_status = self.get_slave_status() while slave_status['slave_io_running'] == 'No' or slave_status['slave_sql_running'] == 'No': time.sleep(decrement) slave_status = self.get_slave_status()
def getConfigId(dsn_string, test_data): """Returns the integer ID of the configuration name used in this run.""" # If we have not already done so, we query the local DB for the ID # matching this sqlbench config name. If none is there, we insert # a new record in the bench_config table and return the newly generated # identifier. benchmark_name = test_data['config_name'] query = "SELECT config_id FROM bench_config WHERE name = '%s'" %benchmark_name retcode, result= execute_query(query, dsn_string=dsn_string) if len(result) == 0: # Insert a new record for this config and return the new ID... query = "INSERT INTO bench_config (config_id, name) VALUES (NULL, '%s')" %benchmark_name retcode, result= execute_query(query, dsn_string=dsn_string) return getConfigId(dsn_string, test_data) else: config_id= int(result[0][0]) return config_id
def get_innodb_version(self): """ SHOW VARIABLES LIKE innodb_version mostly used as a check to ensure if a test should/shouldn't be executed """ query = "SHOW VARIABLES LIKE 'innodb_version'" retcode, result = execute_query(query, self) return retcode, result
def slave_start(self): """ We issue START SLAVE and wait for IO and SQL threads to start """ query = "START SLAVE" decrement = .5 retcode, result = execute_query(query, self) slave_status = self.get_slave_status() while slave_status['slave_io_running'] == 'No' or slave_status[ 'slave_sql_running'] == 'No': time.sleep(decrement) slave_status = self.get_slave_status()
def get_binlog_info(self): """ We try to get binlog information for the server """ query = "SHOW MASTER STATUS" retcode, result_set = execute_query(query, self) if not retcode: binlog_file = result_set[0][0] binlog_pos = result_set[0][1] else: binlog_file = result_set binlog_pos = result_set return retcode, binlog_file, binlog_pos
def getNextRunId(dsn_string): """Returns a new run identifier from the database. The run ID is used in logging the results of the run iterations. """ query = "SELECT MAX(run_id) as new_run_id FROM bench_runs" retcode, result = execute_query(query, dsn_string=dsn_string) if result[0][0] >= 1: new_run_id = int(result[0][0]) + 1 else: new_run_id = 1 return new_run_id
def getNextRunId(dsn_string): """Returns a new run identifier from the database. The run ID is used in logging the results of the run iterations. """ query = "SELECT MAX(run_id) as new_run_id FROM bench_runs" retcode, result= execute_query(query, dsn_string=dsn_string) if result[0][0] >= 1: new_run_id= int(result[0][0]) + 1 else: new_run_id= 1 return new_run_id
def log_sysbench_run(run_id, config_id, server_name, server_version, run_date, dsn_string): """Creates a new run record in the database for this run""" query = """INSERT INTO bench_runs ( run_id , config_id , server , version , run_date ) VALUES (%d, %d, '%s', '%s', '%s') """ % (run_id, config_id, server_name, server_version, run_date) retcode, result = execute_query(query, dsn_string=dsn_string) return result
def get_slave_status(self): query = "SHOW SLAVE STATUS" retcode, result_set = execute_query(query, self) result_set = result_set[0] if not retcode: slave_data = { "slave_io_state": result_set[0], "master_host": result_set[1], "master_user": result_set[2], "master_port": result_set[3], "connect_retry": result_set[4], "master_log_file": result_set[5], "read_master_log_pos": result_set[6], "relay_log_file": result_set[7], "relay_log_pos": result_set[8], "relay_master_log_file": result_set[9], "slave_io_running": result_set[10], "slave_sql_running": result_set[11], "replicate_do_db": result_set[12], "replicate_ignore_db": result_set[13], "replicate_do_table": result_set[14], "replicate_ignore_table": result_set[15], "replicate_wild_do_table": result_set[16], "replicate_wild_ignore_table": result_set[17], "last_errno": result_set[18], "last_error": result_set[19], "skip_counter": result_set[20], "exec_master_log_pos": result_set[21], "relay_log_space": result_set[22], "until_condition": result_set[23], "until_log_file": result_set[24], "until_log_pos": result_set[25], "master_ssl_allowed": result_set[26], "master_ssl_ca_file": result_set[27], "master_ssl_ca_path": result_set[28], "master_ssl_cert": result_set[29], "master_ssl_cipher": result_set[30], "master_ssl_key": result_set[31], "seconds_behind_master": result_set[32], "master_ssl_verify_server_cert": result_set[33], "last_io_errno": result_set[34], "last_io_error": result_set[35], "last_sql_errno": result_set[36], "last_sql_error": result_set[37] # , 'replicate_ignore_server_ids':result_set[38] } return slave_data else: return None
def get_slave_status(self): query = "SHOW SLAVE STATUS" retcode, result_set = execute_query(query, self) result_set = result_set[0] if not retcode: slave_data = { 'slave_io_state': result_set[0], 'master_host': result_set[1], 'master_user': result_set[2], 'master_port': result_set[3], 'connect_retry': result_set[4], 'master_log_file': result_set[5], 'read_master_log_pos': result_set[6], 'relay_log_file': result_set[7], 'relay_log_pos': result_set[8], 'relay_master_log_file': result_set[9], 'slave_io_running': result_set[10], 'slave_sql_running': result_set[11], 'replicate_do_db': result_set[12], 'replicate_ignore_db': result_set[13], 'replicate_do_table': result_set[14], 'replicate_ignore_table': result_set[15], 'replicate_wild_do_table': result_set[16], 'replicate_wild_ignore_table': result_set[17], 'last_errno': result_set[18], 'last_error': result_set[19], 'skip_counter': result_set[20], 'exec_master_log_pos': result_set[21], 'relay_log_space': result_set[22], 'until_condition': result_set[23], 'until_log_file': result_set[24], 'until_log_pos': result_set[25], 'master_ssl_allowed': result_set[26], 'master_ssl_ca_file': result_set[27], 'master_ssl_ca_path': result_set[28], 'master_ssl_cert': result_set[29], 'master_ssl_cipher': result_set[30], 'master_ssl_key': result_set[31], 'seconds_behind_master': result_set[32], 'master_ssl_verify_server_cert': result_set[33], 'last_io_errno': result_set[34], 'last_io_error': result_set[35], 'last_sql_errno': result_set[36], 'last_sql_error': result_set[37] #, 'replicate_ignore_server_ids':result_set[38] } return slave_data else: return None
def log_sysbench_run(run_id, config_id, server_name, server_version, run_date, dsn_string): """Creates a new run record in the database for this run""" query = """INSERT INTO bench_runs ( run_id , config_id , server , version , run_date ) VALUES (%d, %d, '%s', '%s', '%s') """ % ( run_id , config_id , server_name , server_version , run_date ) retcode, result= execute_query(query, dsn_string=dsn_string) return result
def get_engine_info(self): """ Check innodb / xtradb version """ innodb_version = None xtradb_version = None #if not self.code_tree.version_checked: query = "SHOW VARIABLES LIKE 'innodb_version'" retcode, result = execute_query(query, self) # result format = (('innodb_version', '1.1.6-20.1'),) if result: innodb_version = result[0][1] split_data = innodb_version.split('-') if len(split_data) > 1: xtradb_version = split_data[-1] self.code_tree.version_checked = True self.code_tree.innodb_version = innodb_version self.code_tree.xtradb_version = xtradb_version return innodb_version, xtradb_version
def getAllRegressionData(server_name, bzr_branch, run_id, dsn_string, test_data): query = """ SELECT i.concurrency , ROUND(AVG(i.tps), 2) AS tps , IF (AVG(i.tps) >= agg.avg_tps , CONCAT('+', ROUND(((AVG(i.tps) - agg.avg_tps) / agg.avg_tps) * 100, 2), '%%') , CONCAT('-', ROUND(((agg.avg_tps - AVG(i.tps)) / agg.avg_tps) * 100, 2), '%%') ) as pct_diff_from_avg , ROUND((AVG(i.tps) - agg.avg_tps), 2) as diff_from_avg , ROUND(agg.min_tps, 2) AS min_tps , ROUND(agg.max_tps, 2) AS max_tps , ROUND(agg.avg_tps, 2) AS avg_tps , FORMAT(ROUND(agg.stddev_tps, 2),2) AS stddev_tps FROM bench_config c NATURAL JOIN bench_runs r NATURAL JOIN sysbench_run_iterations i INNER JOIN ( SELECT iter.concurrency , MIN(tps) as min_tps , MAX(tps) as max_tps , AVG(tps) as avg_tps , STDDEV(tps) as stddev_tps FROM bench_config conf NATURAL JOIN bench_runs runs NATURAL JOIN sysbench_run_iterations iter WHERE conf.name = '%s' AND runs.server = '%s' AND runs.version LIKE '%s%%' GROUP BY iter.concurrency ) AS agg ON i.concurrency = agg.concurrency WHERE r.run_id = %d GROUP BY i.concurrency ORDER BY i.concurrency """ % ( test_data['config_name'] , server_name , bzr_branch , run_id ) retcode, result= execute_query(query, dsn_string=dsn_string) return result
def set_master(self, master_server, get_cur_log_pos = True): """ We do what is needed to set the master_server as the replication master """ msg = None if self.status: # we are running and can do things! # Get master binlog info retcode, master_binlog_file, master_binlog_pos = master_server.get_binlog_info() if not retcode: if not get_cur_log_pos: master_binlog_pos = 0 else: return retcode, master_binlog_file #contains error msg on failure # update our slave's master info query = ("CHANGE MASTER TO " "MASTER_HOST='127.0.0.1'," "MASTER_USER='******'," "MASTER_PASSWORD=''," "MASTER_PORT=%d," "MASTER_LOG_FILE='%s'," "MASTER_LOG_POS=%d" % ( master_server.master_port , master_binlog_file , int(master_binlog_pos))) retcode, result_set = execute_query(query, self) if retcode: msg = ("Could not set slave: %s.%s\n" "With query: %s\n." "Returned result: %s" %( self.owner , self.name , query , result_set) ) return 1, msg # start the slave self.slave_start() self.need_to_set_master = False else: self.need_to_set_master = True self.master = master_server return 0,msg
def getAllRegressionData(server_name, bzr_branch, run_id, dsn_string, test_data): query = """ SELECT i.concurrency , ROUND(AVG(i.tps), 2) AS tps , IF (AVG(i.tps) >= agg.avg_tps , CONCAT('+', ROUND(((AVG(i.tps) - agg.avg_tps) / agg.avg_tps) * 100, 2), '%%') , CONCAT('-', ROUND(((agg.avg_tps - AVG(i.tps)) / agg.avg_tps) * 100, 2), '%%') ) as pct_diff_from_avg , ROUND((AVG(i.tps) - agg.avg_tps), 2) as diff_from_avg , ROUND(agg.min_tps, 2) AS min_tps , ROUND(agg.max_tps, 2) AS max_tps , ROUND(agg.avg_tps, 2) AS avg_tps , FORMAT(ROUND(agg.stddev_tps, 2),2) AS stddev_tps FROM bench_config c NATURAL JOIN bench_runs r NATURAL JOIN sysbench_run_iterations i INNER JOIN ( SELECT iter.concurrency , MIN(tps) as min_tps , MAX(tps) as max_tps , AVG(tps) as avg_tps , STDDEV(tps) as stddev_tps FROM bench_config conf NATURAL JOIN bench_runs runs NATURAL JOIN sysbench_run_iterations iter WHERE conf.name = '%s' AND runs.server = '%s' AND runs.version LIKE '%s%%' GROUP BY iter.concurrency ) AS agg ON i.concurrency = agg.concurrency WHERE r.run_id = %d GROUP BY i.concurrency ORDER BY i.concurrency """ % (test_data['config_name'], server_name, bzr_branch, run_id) retcode, result = execute_query(query, dsn_string=dsn_string) return result
def set_master(self, master_server, get_cur_log_pos=True): """ We do what is needed to set the master_server as the replication master """ msg = None if self.status: # we are running and can do things! # Get master binlog info retcode, master_binlog_file, master_binlog_pos = master_server.get_binlog_info( ) if not retcode: if not get_cur_log_pos: master_binlog_pos = 0 else: return retcode, master_binlog_file #contains error msg on failure # update our slave's master info query = ("CHANGE MASTER TO " "MASTER_HOST='127.0.0.1'," "MASTER_USER='******'," "MASTER_PASSWORD=''," "MASTER_PORT=%d," "MASTER_LOG_FILE='%s'," "MASTER_LOG_POS=%d" % (master_server.master_port, master_binlog_file, int(master_binlog_pos))) retcode, result_set = execute_query(query, self) if retcode: msg = ("Could not set slave: %s.%s\n" "With query: %s\n." "Returned result: %s" % (self.owner, self.name, query, result_set)) return 1, msg # start the slave self.slave_start() self.need_to_set_master = False else: self.need_to_set_master = True self.master = master_server return 0, msg
def log_sysbench_iteration(run_id, concurrency, iteration_data, dsn_string): # TODO: make sure we properly capture full commentary full_commentary = None # Write results to the DB query = """INSERT INTO sysbench_run_iterations ( run_id , concurrency , iteration , tps , read_write_req_per_second , deadlocks_per_second , min_req_latency_ms , max_req_latency_ms , avg_req_latency_ms , 95p_req_latency_ms ) VALUES (%d, %d, %d, %0.2f, %0.2f, %0.2f, %0.2f, %0.2f, %0.2f, %0.2f) """ % (int(run_id), int(concurrency), int(iteration_data['iteration']), iteration_data['tps'], iteration_data['rwreqps'], iteration_data['deadlocksps'], iteration_data['min_req_lat_ms'], iteration_data['max_req_lat_ms'], iteration_data['avg_req_lat_ms'], iteration_data['95p_req_lat_ms']) retcode, result = execute_query(query, dsn_string=dsn_string) return result