def Sync_data(online_host, online_path, local_tmp_data_path): #### #update data #### Log.log("[%s] Update data\n" % get_now_time()) rsync_path = online_path if (rsync_path[0:1] == "/"): rsync_path = rsync_path[1:] if (rsync_path[len(rsync_path) - 1:] != "/"): rsync_path = rsync_path + "/" arg2 = local_tmp_data_path if (local_tmp_data_path[len(local_tmp_data_path) - 1:] != "/"): arg2 = local_tmp_data_path + "/" arg = "%s::%s" % (online_host, rsync_path) stdlog = "" errlog = "" asycmd = asycommands.TrAsyCommands(timeout=30 * 60) asycmd_list.append(asycmd) #for iotype, line in asycmd.execute_with_data(['rsync', '-ravl', arg, arg2], shell=False): for iotype, line in asycmd.execute_with_data(['rsync', '-ravl', arg, arg2], shell=False): if (iotype is 1): stdlog += line + '\n' print line elif (iotype is 2): errlog += line + '\n' if (asycmd.return_code() != 0): Log.log("[%s] Update data Error\n" % get_now_time()) Log.log(errlog) return 1 Log.log("[%s] Update data success\n" % get_now_time()) return 0
def deploy_once(path, start_script, port): log = [] log.append(path) log.append(start_script) log.append(port) if (path == ""): return 0 #log_file = path + "/client_application/" + err_name asycmd = asycommands.TrAsyCommands(timeout=120) asycmd_list.append(asycmd) interval_time = 10 * 60 # Start Query (ret, pid) = lanch(path, start_script, port, log) if (ret < 0): time.sleep(0.5) up_log = "" for line in log: up_log += "[%s] %s\n" % (get_now_time(), line) Log.log("%s\n" % up_log) ''' up_log = "" for iotype, line in asycmd.execute_with_data(['/bin/tail', '-50', log_file], shell=False): up_log += line + '\n' Log.log(up_log.decode('gbk').encode('utf-8')) ''' return -1 Log.log("[%s] query start ok, use %d s\n" % (get_now_time(), ret)) # Start OK return 0
def lanch(path, start_script, port, log): # rules: start_script must put pid in `PID` file: echo $! > PID # return a tuple(retcode, pid) pid = -1 asycmd = asycommands.TrAsyCommands(timeout=30) asycmd_list.append(asycmd) child = subprocess.Popen(['/bin/sh', start_script], shell=False, cwd=path, stderr=subprocess.PIPE) child.wait() if (child.returncode != 0): log.append(child.stderr.read()) return (-1, pid) for iotype, line in asycmd.execute_with_data(['/bin/cat', path + "/PID"], shell=False): if (iotype == 1 and line != ""): try: pid = int(line) except: continue if (pid == -1): return (-2, pid) proc = None try: proc = psutil.Process(pid) except: log.append("process %d is not alive" % pid) return (-3, pid) if (port is -1): return (0, pid) is_alive = True start_time = 0 proc_list.append(pid) while is_alive: try: conn_list = proc.connections() except: is_alive = False break listened = False for conn in conn_list: if (conn.status == "LISTEN" or conn.status == "NONE") and conn.laddr[1] == port: listened = True break if listened: break time.sleep(1) start_time += 1 if not is_alive: log.append("process start failed") proc_list.remove(pid) return (-3, pid) return (start_time, pid)
def get_perf_res(log_file, result): if os.path.exists(log_file) == False: result.append(log_file + " is not exists") return -1 asycmd = asycommands.TrAsyCommands(timeout=180) #asycmd_list.append(asycmd) for iotype, line in asycmd.execute_with_data( ['/bin/awk', '-f', perf_tool, log_file], shell=False): result.append(line) return asycmd.return_code()
def make_env(path): asycmd = asycommands.TrAsyCommands(timeout=600) make_log = "" for iotype, line in asycmd.execute_with_data(['make', '-j'], shell=False, cwd=path): if iotype == 2: make_log += line + "\n" if (asycmd.return_code() != 0): #timeout or error, then try again make_log = "" for iotype, line in asycmd.execute_with_data(['make', '-j'], shell=False, cwd=path): if iotype == 2: make_log += line + "\n" if (asycmd.return_code() != 0): update_errorlog(make_log) return -1 update_errorlog("Make Success\n") return 0
def check_lanch(path, start_script, port, err_name): log = [] if (path == ""): return 0 log_file = path + err_name asycmd = asycommands.TrAsyCommands(timeout=30) #asycmd_list.append(asycmd) (ret, pid) = lanch(path, start_script, port, log) if (ret < 0): time.sleep(0.5) up_log = "" for line in log: up_log += "%s\n" % line update_errorlog("%s\n" % up_log) up_log = "" for iotype, line in asycmd.execute_with_data( ['/bin/tail', '-50', log_file], shell=False): up_log += line + '\n' update_errorlog("%s\n" % up_log) return ret, pid
def gcov_check(gcov_path, press_path, basesvn, testsvn, err_name="err"): os.popen('killall -9 lt-websummaryd lt-summarytest CAPTURE_RESOURCE') time.sleep(5) #每次都需要check新代码,若目录存在先删除 if os.path.exists(gcov_path): shutil.rmtree(gcov_path) asycmd = asycommands.TrAsyCommands(timeout=60 * 30) #asycmd_list.append(asycmd) #check 代码 ret = checkcode_env(gcov_path, testsvn) if ret != 0: update_errorlog("check gcov code Error, pls check\n") set_status(3) return -1 update_errorlog("check gcov code OK\n") #将gcov_tool目录下的相关工具cp到gcov_src路径下 try: os.popen('cp -r %s %s' % (gcov_tool + "*", gcov_path)) except Exception as err: print("[gcov_check:%s]" % err) return -1 #需sleep一下,否则下一步运行shell脚本的时候,可能还没有完成拷贝,会报错 time.sleep(3) #使用make-gcov.sh编译 make_gcov_sh = gcov_path + "make-gcov.sh" for iotype, line in asycmd.execute_with_data(['/bin/sh', make_gcov_sh], shell=False, cwd=gcov_path): pass if asycmd.return_code() != 0: update_errorlog("sh make-gcov.sh Error\n") return -1 #检查gcno文件是否存在 try: web_sum = check_file_suffix(gcov_path + "WebSummary/", 'gcno') sum_kernel = check_file_suffix( gcov_path + "summary_kernel/Kernel/.libs", 'gcno') if not web_sum: update_errorlog("no gcno file in WebSummary, Error\n") return -1 if not sum_kernel: update_errorlog("no gcno file in summary_kernel, Error\n") return -1 except Exception as err: print("[gcov_check:%s]" % err) return -1 update_errorlog("make gcov Success\n") #创建data、conf、start.sh软链,使用test环境的data、conf和start.sh gcov_env = gcov_path + 'WebSummary/' ret = prepare_symbolic_link(gcov_env) if ret != 0: update_errorlog("prepare symbolic link for %s Error\n" % gcov_env) set_status(3) return -1 update_errorlog("prepare symbolic link for %s Success\n" % gcov_env) #修改配置文件的监听端口、备库配置、缓存大小等配置 gcov_cf = root_path + test_conf + "norm_onsum01.cfg" ret = modify_sum_conf(gcov_cf, db_standby, test_sum_port, sum_cache_size) if ret == -1: update_errorlog("modify config:%s error\n" % gcov_cf) return -1 #启动gcov summary ret, sum_pid = check_lanch(gcov_env, "start.sh", 19018, err_name) if ret < 0: update_errorlog("Gcov Summary start failed") return -1 update_errorlog("Gcov Summary start ok, use %d s\n" % ret) #启动压力工具 press_err_name2 = err_name + "2" #err2 ret, tool2_pid = check_lanch(press_path, "start2.sh", -1, press_err_name2) if ret < 0: update_errorlog("Press Tool start failed") return -1 update_errorlog("Press Tool start ok, use %d s\n" % ret) update_errorlog("gcov press start, about 20min\n") #等待压力结束 wait_to_die(tool2_pid, 5 * 60) update_errorlog("Press Tool stoped\n") #执行makegcda-gcov.sh,生成gcda文件 #sum_pid = 17791 make_gcda_sh = gcov_path + "makegcda-gcov.sh" for iotype, line in asycmd.execute_with_data( ['/bin/sh', make_gcda_sh, str(sum_pid)], shell=False, cwd=gcov_path): pass if asycmd.return_code() != 0: update_errorlog("sh makegcda-gcov.sh Error\n") return -1 #检查gcda文件是否存在 try: web_sum = check_file_suffix(gcov_path + "WebSummary/", 'gcda') sum_kernel = check_file_suffix( gcov_path + "summary_kernel/Kernel/.libs", 'gcda') if not web_sum: update_errorlog("no gcda file in WebSummary, Error\n") return -1 if not sum_kernel: update_errorlog("no gcda file in summary_kernel, Error\n") return -1 except Exception as err: print("[gcov_check:%s]" % err) return -1 update_errorlog("generate gcda Success\n") #执行collect-gcov.sh, 收集gcda报告产生info文件 collect_gcda_sh = gcov_path + "collect-gcov.sh" for iotype, line in asycmd.execute_with_data(['/bin/sh', collect_gcda_sh], shell=False, cwd=gcov_path): pass if asycmd.return_code() != 0: update_errorlog("sh collect-gcov.sh Error\n") return -1 update_errorlog("collect gcda Success\n") #执行genhtml-gcov.sh,每次gcov的路径都不一致,否则会覆盖之前的结果,使用当前时间的MD5值作为唯一标识 gen_html_sh = gcov_path + "genhtml-gcov.sh" gcov_dir_suffix = "gcov_" + md5_convert(get_now_time()) gcov_dir = gcov_repo_path + gcov_dir_suffix for iotype, line in asycmd.execute_with_data( ['/bin/sh', gen_html_sh, gcov_dir], shell=False, cwd=gcov_path): pass if asycmd.return_code() != 0: update_errorlog("sh genhtml-gcov.sh Error\n") return -1 update_errorlog("generate html Success\n") #执行svndiff-gcov.sh,获取diff的gcov + 执行php diffviewer.php dict_basesvn = {} dict_testsvn = {} username = "******" password = "******" diff_view_php = gcov_path + "diffviewer.php" for line in basesvn.split("\n"): line = line.strip() key = line.split('=')[0] value = line.split('=')[1] dict_basesvn[key] = value for line in testsvn.split("\n"): line = line.strip() key = line.split('=')[0] value = line.split('=')[1] dict_testsvn[key] = value for key in dict_basesvn: if dict_basesvn[key] != dict_testsvn[key]: file_name = gcov_path + 'svndiff-gcov-out_' + key f = open(file_name, 'w') child = subprocess.Popen(['svn', 'diff', '--diff-cmd=diff', '-x', '-U0', \ dict_basesvn[key], dict_testsvn[key],\ '--username', username, '--password', password],\ shell=False, cwd = gcov_path, stdout = f.fileno()) child.wait() f.close() #执行php diffviewer.php for iotype, line in asycmd.execute_with_data([ 'php', diff_view_php, file_name, gcov_repo_path, gcov_dir_suffix, key ], shell=False, cwd=gcov_path): pass if asycmd.return_code() != 0: update_errorlog("php diffviewer.php Error\n") return -1 update_errorlog("get svndiff and diff_view_php Success\n") gcov_result = os.path.join(gcov_dir, 'append.html') if not os.path.exists(gcov_result): update_errorlog("gcov result is not exists\n") return -1 ip = get_host_ip() http_result = 'http://' + ip + "/" + gcov_dir_suffix + "/" + 'append.html' print(http_result) sql = "UPDATE %s set code_gcov_result='%s' where id=%d" % ( database_table, http_result, mission_id) cursor.execute(sql) db.commit() update_errorlog("gcov result write mysql Success\n") return 0
def sync_ol_to_local(rsync_type): if rsync_type == 'data': base_path = root_path + base_data test_path = root_path + test_data os.popen('rm -rf %s' % test_path) if rsync_type == 'conf': base_path = root_path + base_conf test_path = root_path + test_conf os.popen('rm -rf %s' % test_path) if os.path.exists(base_path) == False: print("save ol_%s's path not exists, mkdir -p" % rsync_type) update_errorlog("ol_%s path not exists, mkdir -p\n" % rsync_type) os.popen("mkdir -p " + base_path) update_errorlog("start rsync ol_%s to local\n" % rsync_type) #对路径格式做一定的容错处理 rsync_path = online_path if rsync_path[0] == "/": rsync_path = rsync_path[1:] if (rsync_path[-1] != "/"): rsync_path = rsync_path + "/" rsync_path = rsync_path + rsync_type + "/" arg = "%s::odin/%s" % (online_host, rsync_path) arg2 = base_path if base_path[-1] != "/": arg2 = base_path + "/" stdlog = "" errlog = "" asycmd = asycommands.TrAsyCommands(timeout=30 * 60) for iotype, line in asycmd.execute_with_data(['rsync', '-ravl', arg, arg2], shell=False): if iotype == 1: stdlog += line + '\n' print("[sync_ol_to_local] stdlog:%s" % line) elif iotype == 2: errlog += line + '\n' print("[sync_ol_to_local] errlog:%s" % line) if (asycmd.return_code() != 0): update_errorlog("rsync ol_%s to local Error\n" % rsync_type) update_errorlog(errlog) return -1 update_errorlog("rsync ol_%s to local Success\n" % rsync_type) #拷贝norm_onsum01/data/base下的软链文件 if rsync_type == 'data': if os.path.exists(symbolic_link_path) == False: print("save symbolic_link path not exists, mkdir -p") update_errorlog("symbolic_link path not exists, mkdir -p\n") os.popen("mkdir -p " + symbolic_link_path) for iotype, line in asycmd.execute_with_data([ 'rsync', '-ravlu', 'rsync.query001.web.djt.ted::odin/search/odin/daemon/data_agent/data/base/', '/search/odin/daemon/data_agent/data/base/' ], shell=False): if iotype == 1: stdlog += line + '\n' elif iotype == 2: errlog += line + '\n' if (asycmd.return_code() != 0): update_errorlog("rsync symbolic link file to local Error\n") update_errorlog(errlog) return -1 update_errorlog("rsync symbolic link file to local Success\n") #处理conf下多余的文件 if rsync_type == 'conf': os.popen("rm -rf " + base_path + "/{1.djt,1.gd,gd,js,1.tc}") #copy test_data or test_conf try: os.popen('cp -r %s %s' % (base_path, test_path)) update_errorlog("copy test_%s Success\n" % rsync_type) except Exception as err: update_errorlog("copy test_%s Error:%s\n" % (rsync_type, err)) return -1 return 0