def testing_onebyone(): datetime_now = str(datetime.datetime.now().strftime("DATE%Y-%m-%d_TIME%H-%M")) create_admin_tbl(datetime_now) log_dir = os.path.join(local_config_testing()['cur_path'],'testing_result',datetime_now) if os.path.isdir(log_dir) == False: os.system('mkdir -p %s'%(log_dir)) logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s', datefmt='%a, %d %b %Y %H:%M:%S', filename='%s/renderman.log'%(log_dir), filemode='w') ex_wb = excel_writer(log_dir,datetime_now + '.xlsx') for test_case in local_config_testing()['test_case_list']: if test_case not in testing.TestingClassDict.keys(): print '%s has not been completed...'%(test_case) else: print '%s has been completed...TIME:%s'%(test_case,datetime_now) test_obj = testing.TestingClassDict[test_case](logging,datetime_now) test_obj.run_testing() test_obj.create_tbl() test_obj.close_db() test_obj.get_fastest() test_obj.write_to_excel(ex_wb) ex_wb.save()
def __init__(self,logging,datetime_now): testing.__init__(self) self.profile_path = os.path.join(local_config_testing()['cur_path'],'testing_result',datetime_now) self.logging = logging self.insert_queue = [] self.init_db() self.datetime_now = datetime_now
def write_to_excel(des_dic,work_book): sheet_name = 'temp' work_book.create_sheet(sheet_name) work_book.set_cell(sheet_name,1,1,"rendermanlog") start_row = 3 row = start_row for query in des_dic.keys(): col = 2 work_book.set_cell(sheet_name,col,row,query) col += 1 for cpufreq in des_dic[query].keys(): des_dic[query][cpufreq].sort() if row == start_row: work_book.set_cell(sheet_name,col,row - 2,cpufreq) for i in range(local_config_testing()["every_query_times"]): if row == start_row: work_book.set_cell(sheet_name,col,row - 1,'TIME_LINE') work_book.set_cell(sheet_name,col+1,row - 1,'Remote_Start_Time') work_book.set_cell(sheet_name,col+2,row - 1,'Query_Run_Time') work_book.set_cell(sheet_name,col+3,row - 1,'Time_Stamp') if i >= len(des_dic[query][cpufreq]): print("query:%s with cpu_freq(%s) has not been done,please retest it!"%(query,cpufreq)) break query_run_time,time_line,remote_start_time,time_stamp = des_dic[query][cpufreq][i] work_book.set_cell(sheet_name,col,row,time_line) col += 1 work_book.set_cell(sheet_name,col,row,remote_start_time) col += 1 work_book.set_cell(sheet_name,col,row,query_run_time) col += 1 work_book.set_cell(sheet_name,col,row,time_stamp) col += 1 row += 1
def main(): from impala.dbapi import connect time_s = time.time() for query in local_config_testing()['query_list']: with open(os.path.join(local_config_testing()['query_dir'],query),'r') as fp: conn = connect(host='172.168.0.24', port=21050,database = '%s'%(local_config_testing()['DATABASE_NAME'])) cursor = conn.cursor() os.system(""" echo "clear OS cache on tracing017" free && sync && echo 3 >/proc/sys/vm/drop_caches && free echo "clear OS cache on tracing024" ssh tracing024 "free && sync && echo 3 >/proc/sys/vm/drop_caches && free" echo "clear OS cache on tracing025" ssh tracing025 "free && sync && echo 3 >/proc/sys/vm/drop_caches && free" echo "clear OS cache on tracing026" ssh tracing026 "free && sync && echo 3 >/proc/sys/vm/drop_caches && free" echo "clear OS cache on tracing027" ssh tracing027 "free && sync && echo 3 >/proc/sys/vm/drop_caches && free" """) time1 = time.time() sql= fp.read().strip('profile;\n') sql = sql.strip('; ') #print sql try: cursor.execute('%s'%(sql)) except: cursor.close() conn.close() print query,"wrong" continue time2 = time.time() while True: row=cursor.fetchone() if row: # row print(row) else: break time3 = time.time() print "res:",query,time2 - time1,time3 - time1 cursor.close() conn.close()
def __init__(self): self.impalad_nodes = local_config_testing()['impalad_nodes'] self.cpufreq_range = local_config_testing()['CPUFreq_range'] self.query_list = local_config_testing()['query_list'] self.query_dir = local_config_testing()['query_dir'] self.every_query_times = local_config_testing()['every_query_times'] self.impala_server = local_config_testing()['IMPALA_SERVER'] self.database_name = local_config_testing()['DATABASE_NAME'] self.sh_path = os.path.join(local_config_testing()['cur_path'],'testing_onebyone','runsql.sh') self.network = local_config_testing()['net_work'] self.des_dic = {} self.CPUFreqSet_OBJ = CPUFreqSet() self.check_sys_state_obj = check_sys_state()
def create_admin_tbl(datetime_now): mydb1 = mydb(postgres_db_connector) #insert admin tbl sql_insert = """ insert into admin_control(tbl_name,scale_factor,db_format,database_name,test_case,global_config,local_config_gen,local_config_testing) values('%s',%d,'%s','%s','%s','%s','%s','%s') """%(datetime_now.replace('-','_'), local_config_testing()['tpcds_scale_factor'], str(local_config_testing()['db_format']).replace("'","''"), local_config_testing()['DATABASE_NAME'], '|'.join(local_config_testing()['test_case_list']), str(global_config).replace("'","''"), str(local_config_gen()).replace("'","''"), str(local_config_testing()).replace("'","''")) mydb1.runsql(sql_insert) mydb1.commit() mydb1.close()
def run_testing(self): sum_task = len(self.query_list) * len(self.cpufreq_range) * self.every_query_times cur_task = 0 with open(os.path.join(self.profile_path,'PROCESSING'),'w') as fp: fp.write('%s%%'%(str(float(cur_task)/float(sum_task)*100))) for query in self.query_list: if query not in self.des_dic.keys(): self.des_dic[query] = {} for cpufreq in self.cpufreq_range: if cpufreq not in self.des_dic[query].keys(): self.CPUFreqSet_OBJ.set(cpufreq) self.des_dic[query][cpufreq] = [] for i in range(self.every_query_times): self.clear_cache() if self.check_sys_state_obj.check_sys_io(): self.logging.info('"cpu_freq:%s---%s %s %s %s %s" has been started...'%(str(cpufreq)\ ,self.sh_path\ ,self.impala_server\ ,self.query_dir\ ,self.database_name\ ,query)) cmd = '%s %s %s %s %s %s %s'%(self.sh_path\ ,self.impala_server\ ,self.query_dir\ ,self.database_name\ ,query\ ,local_config_testing()['sys_log']\ ,self.profile_path) fp = os.popen(cmd,'r') lines_list = fp.readlines() try: time_stamp = lines_list[-3].split(':')[-1].strip('\n') time_line = get_time_by_second(lines_list[-2].split(' ')[-1]) remote_start_time = get_time_by_second((lines_list[-1].split(' ')[-2]).strip('\n')) query_run_time = time_line - remote_start_time self.des_dic[query][cpufreq].append([query_run_time,time_line,remote_start_time,time_stamp]) sql = """insert into %s(query_name,time_stamp,query_run_time,time_line,remote_start_time) values('%s',%d,'%s','%s','%s')"""%(self.datetime_now.replace('-','_'),query,int(time_stamp),query_run_time,time_line,remote_start_time) self.insert_queue.append(sql) except Exception : import traceback traceback.print_exc() print ''.join(str_i for str_i in lines_list) self.logging.info('"cpu_freq:%s---%s %s %s %s %s" has been finished in %ss!time_stamp is %s'%(str(cpufreq)\ ,self.sh_path\ ,self.impala_server\ ,self.query_dir\ ,self.database_name\ ,query\ ,str([query_run_time,time_line,remote_start_time])\ ,time_stamp)) fp.close() cur_task += 1 with open(os.path.join(self.profile_path,'PROCESSING'),'w') as fp: fp.write('%s%%'%(str(float(cur_task)/float(sum_task)*100))) self.des_dic[query][cpufreq].sort() return self.des_dic
def __init__(self): self.impalad_nodes = local_config_testing()['impalad_nodes']
def __init__(self,): self.impalad_nodes = local_config_testing()['impalad_nodes'] self.cpu_freq_dict = local_config_testing()['cpu_freq_dict']