def read_result(benchmark_run, kusto_client): run_id = benchmark_run.run_id start_time = CLperfDB.benchmark_start_time(benchmark_run) end_time = CLperfDB.benchmark_end_time(benchmark_run) max_outstanding_io = benchmark_run.config_overrides.config_dict[ 'SQL.Config_RgSettings_LocalVolumeMaxOutstandingIo'] volume_io_request_size_bytes = benchmark_run.config_overrides.config_dict[ 'SQL.Config_RgSettings_LocalVolumeIORequestSizeBytes'] max_log_rate = benchmark_run.property_overrides.instance_settings_dict[ 'MaxLogRate'] results = CLperfDB.read_results_benchmark(benchmark_run) try: transactions_per_minute = results['Transactions per minute'] except: transactions_per_minute = None try: response_time_90th_percentile = results['90th percentile'] except: response_time_90th_percentile = None failover_kusto_test = KustoFailover.failover_kusto_test( kusto_client, benchmark_run) backup_kusto_test = KustoBackup.backup_kusto_test( kusto_client, benchmark_run) return Result(run_id, max_outstanding_io, volume_io_request_size_bytes, max_log_rate, start_time, end_time, transactions_per_minute, response_time_90th_percentile, failover_kusto_test, backup_kusto_test)
def failover_kusto_test(client, benchmark_run): start_time_variable = CLperfDB.benchmark_start_time(benchmark_run) end_time_variable = CLperfDB.benchmark_end_time(benchmark_run) if start_time_variable is None or end_time_variable is None: return (-1, -1) query_ = re.sub("start_time_variable", start_time_variable, hadr_replica_states_query) query_ = re.sub("end_time_variable", end_time_variable, query_) query_ = re.sub("server_name_variable", benchmark_run.get_logical_server_name(), query_) print("Querying kusto (failover info)") print("Query text \n " + query_) try: failover_response = client.execute("sqlazure1", query_) failover_df = dataframe_from_result_table( failover_response.primary_results[0]) redo_stats_avg = failover_df['redo_stats_avg'].max() failover_time_stats_max = failover_df['failover_time_stats_max'].max() a = (redo_stats_avg, failover_time_stats_max) return a except: return (-2, -2)
def backup_kusto_test(client, benchmark_run): CLperfDB.benchmark_start_time(benchmark_run) CLperfDB.benchmark_end_time(benchmark_run) print(benchmark_run.start_time) print(benchmark_run.end_time) if benchmark_run.start_time is None or benchmark_run.end_time is None: return -1 query_ = re.sub("start_time_variable", benchmark_run.start_time, backup_query) query_ = re.sub("end_time_variable", benchmark_run.end_time, query_) query_ = re.sub("server_name_variable", benchmark_run.get_logical_server_name(), query_) print("Querying kusto (backup info)") print("Query text \n " + query_) try: backup_response = client.execute("sqlazure1", query_) backup_df = dataframe_from_result_table( backup_response.primary_results[0]) return backup_df['backup_duration_min'].max() except: return -2
def read_results(self): results_json = CLperfDB.read_results_benchmark(self) self.target = Target(results_json_list, self.run_id)
from Config_Overrides import Config_Overrides from Property_Overrides import Property_Overrides from Benchmark_Configs import Benchmark_Configs from Benchmark_run import Benchmark_run from Job import Job import CLperfDB config_overrides = Config_Overrides.choose_random_config_overrides() property_overrides = Property_Overrides.choose_random_property_overrides() benchmark_configs = Benchmark_Configs.default_run_server_name_only( 'clperftesting-gen5-bc8-loose24-neu-00.neu187d1a144a72d.sqltest-eg1.mscds.com' ) connection = CLperfDB.connect() run = Benchmark_run(connection, config_overrides, property_overrides, benchmark_configs) print(run) runs = [run] job = Job(connection, 'Dubravka_external_schedule', runs, 'scheduling from Python') success = CLperfDB.schedule_job(job) print(success)
import re import CLperfDB from datetime import datetime from datetime import timedelta import pandas as pd con = CLperfDB.connect() def single_to_double_quotes(string_with_single_quotes): return re.sub("'", '"', string_with_single_quotes) """ returns current timestamp: format monthDayTimeTime """ def current_timestamp(): now = datetime.now() date_time = now.strftime("%m/%d/%Y, %H:%M:%S") date_time_pattern = r'(\d{2})/(\d{2})/(\d{4}), (\d{2}):(\d{2}):(\d{2})' match = re.match(date_time_pattern, date_time) return match.group(1) + match.group(2) + match.group(4) + match.group( 5) + match.group(6) def export_dataframe(df, file_name): res = df.to_csv() file_name = file_name + current_timestamp() + '.csv'
from azure.kusto.data.request import KustoClient, KustoConnectionStringBuilder from azure.kusto.data.exceptions import KustoServiceError from azure.kusto.data.helpers import dataframe_from_result_table from Benchmark_run import Benchmark_run from Benchmark_Configs import Benchmark_Configs import KustoBackup import CLperfDB cluster_name = "https://sqlstage.kustomfa.windows.net" kcsb = KustoConnectionStringBuilder.with_aad_device_authentication( cluster_name) client = KustoClient(kcsb) connection = CLperfDB.connect() benchmark_configs_object = Benchmark_Configs( action_name="TPCC", processor_count=8, is_bc=1, hardware_generation="SVMLoose", environment="SVMStage", should_restore=0, priority=100, worker_number=100, benchmark_scaling_argument=10500, scaled_down=1, server_name= "clperftesting-gen5-bc8-loose24-neu-00.neu187d1a144a72d.sqltest-eg1.mscds.com", database_name="tpcc10500", warmup_timespan_minutes=1, run_timespan_minutes=5,
from jobs import Job, Benchmark_run import brain import CLperfDB import time SLEEP_FOR_RESULTS = 1 NUMBER_OF_JOBS = 1 #I suppose this won't be convergence condition if __name__ == "__main__": connection = CLperfDB.connect() with open("first_job.json", "r+") as starting_job_file: starting_job_configs = starting_job_file.read() job = Job(connection, "Dummy job", starting_job_configs, "First job scheduled from Python") number_of_finished_jobs = 0 job_results = [] while number_of_finished_jobs < NUMBER_OF_JOBS: #I suppose this won't be convergence condition CLperfDB.schedule_job(job) next_job = brain.decide_for_next_job( connection, job, job_results ) #maybe this will decide for list of jobs, not one job... while not CLperfDB.is_job_finished(job): time.sleep(SLEEP_FOR_RESULTS)
from Benchmark_run import Benchmark_run from Result import Result import CLperfDB import time, json, plot, common_functions from datetime import datetime from azure.kusto.data.request import KustoClient, KustoConnectionStringBuilder import pandas as pd LIMIT_BENCHMARK_RUNS = 30 EXPECTED_DURATION_OF_SLEEPING_IN_MINUTES = 10 cluster_name = "https://sqlazureweu2.kustomfa.windows.net" kustoStringBuilder = KustoConnectionStringBuilder.with_aad_device_authentication( cluster_name) kusto_client = KustoClient(kustoStringBuilder) connection = CLperfDB.connect() if __name__ == "__main__": #make kusto client fakely fake_response = kusto_client.execute("sqlazure1", "MonBackup|take 1") number_of_scheduled_benchmarks = 0 instances_json = open('Configurations/instances.json', 'r+').read() instances = json.loads(instances_json) #for all jobs successful_runs = [] results = [] failed_runs = [] instance_occupation = {}