def exec_shell(self, ssh, params): if self.in_back: cmd = "{0} async_call_shell {1} {2} {3}".format( self.inner_caller, self.task, self.shell, params) else: cmd = "{0} sync_call_shell {1} {2} {3}".format( self.inner_caller, self.task, self.shell, params) SSHUtil.exec_ret(ssh, cmd, self.in_root)
def ssh_login(): ret, err = SSHUtil.user_login(ssh, user) if not ret: raise Exception("{} {}登录失败\n重试次数:{}\n详细信息:{}".format( ip, user, times, err)) ret, err = SSHUtil.user_login(ssh, 'root') if not ret: raise Exception("{} root登录失败\n重试次数:{}\n详细信息:{}".format( ip, times, err))
def ssh_check(): for t in range(1, retry_times + 1): ret1 = SSHUtil.user_login(ssh, user)[0] ret2 = SSHUtil.user_login(ssh, 'root')[0] ret3 = SSHUtil.exec_ret(ssh, 'echo')[0] ret4 = SSHUtil.upload_file(ssh, Global.G_SETTINGS_FILE, remote_file)[0] if all([ret1, ret2, ret3, ret4]): return True return False
def post_handle(): # dos2unix dos2unix_cmd = ''' for file in {0}/*.sh do cp $file ${{file}}_tmp cat ${{file}}_tmp | tr -d "\\r" >$file rm ${{file}}_tmp & done '''.format(server_dir) SSHUtil.exec_ret(ssh, dos2unix_cmd)
def upload_package(): remote_path = "{0}/{1}".format(server_dir, Global.G_PACK_ZIP) unzip_cmd = "cd {0} && unzip -o {1} && chmod 777 {0}/*".format( server_dir, Global.G_PACK_ZIP) ret, err = SSHUtil.upload_file(ssh, pack_path, remote_path) if not ret: raise Exception( "{} 登录失败\n重试次数:{}\nUpload package failed:{}".format( ip, times, err)) ret, err = SSHUtil.exec_ret(ssh, unzip_cmd, root=True) if not ret: raise Exception( "{} 登录失败\n重试次数:{}\nDecompression failed:{}".format( ip, times, err))
def refresh_cache_data(self, args=None): logger.debug('[timer] refresh cache data start...') server_dir = model_gate.settings_data.server_dir scripts = model_gate.settings_data.refresh_cache['scripts'] server_cache_data = model_gate.server_cache_data.get_data() try: for ip, data in model_gate.login_state_data.get_data().items(): ssh = data['SSH'] for script in scripts: cmd = "{}/{}".format(server_dir, script) result = SSHUtil.exec_info(ssh, cmd, True)[0] result = result.split(Global.G_INNER_SPLIT)[1].strip() for line in result.split('\n'): key, value = line.split(Global.G_CACHE_SPLIT) key = key.strip() value = value.strip().split() logger.debug("modify cache: {}: {}".format( ip, {key: value})) if ip in server_cache_data: server_cache_data[ip][key] = value else: server_cache_data[ip] = {} server_cache_data[ip][key] = value model_gate.server_cache_data.set_data(server_cache_data) except Exception as e: logger.error("refresh_cache_data {}".format( traceback.format_exc())) logger.debug( '[timer] refresh cache data end, server_cache_data: {}'.format( server_cache_data))
def upload_file(self, ssh, ip, uploads, offset): # 不直接在上传的callback中调用更新状态的Caller,因为会影响上传的速度, # callback中只更新内存,状态更新在一个线程中定时去调用 def upload_back(current, total): size[0] = current size[1] = total def get_progress(): return int(size[0] / size[1] * offset) def update_thread(): while not is_done: self.return_exec_start_result(ip, get_progress(), '', True) Common.sleep(0.2) size, is_done = [0, 1], False server_upload = Global.G_SERVER_UPLOAD % model_gate.settings_data.server_dir for local in uploads: remote = "{0}/{1}".format(server_upload, Common.basename(local)) Common.create_thread(func=update_thread, args=()) self.insert_text_info(ip, get_progress(), 'Uploading {}'.format(local)) ret, err = SSHUtil.upload_file(ssh, local, remote, upload_back) is_done = True if not ret: self.insert_text_info( ip, get_progress(), 'Upload {} failed: {}'.format(local, err), 'ERROR') self.return_exec_start_result(ip, get_progress(), '', False) return False self.return_exec_start_result(ip, get_progress(), '', True) return True
def download_file(self, ssh, ip, file): if file in ["", "NULL"]: self.insert_text_info(ip, 100, 'Success') return True download_dir = "{0}\\{1}".format(Global.G_DOWNLOAD_DIR, ip) filename = "{0}\\{1}".format(download_dir, Common.basename(file)) Common.mkdir(download_dir) self.insert_text_info(ip, 100, 'Downloading to {}'.format(filename)) if not SSHUtil.download_file(ssh, remote=file, local=filename): return False self.insert_text_info(ip, 100, "Download success") return True
def refresh_file_data(self, args=None): logger.debug('[timer] refresh file data start...') server_dir = model_gate.settings_data.server_dir scripts = model_gate.settings_data.refresh_file['scripts'] server_download = Global.G_SERVER_DOWNLOAD % server_dir try: for ip, data in model_gate.login_state_data.get_data().items(): ssh = data['SSH'] local_download = "{}\\{}".format(Global.G_DOWNLOAD_DIR, ip) data_dir = "{}\\__FILE_DATA__".format(local_download) Common.mkdir(local_download) Common.mkdir(data_dir) # 初始运行时先把之前已经运行的进程杀死,每次都用最新代码跑 if ip not in self.is_first_run: [ SSHUtil.exec_ret(ssh, "killall {}".format(script), True) for script in scripts ] self.is_first_run[ip] = True cmd = '' for script in scripts: cmd = "{0}\n{1}/{2}".format(cmd, server_dir, script) SSHUtil.exec_ret(ssh, cmd, True) # 再压缩DOWNLOAD目录 cmd = 'cd {0} && zip refresh_file.zip *;chmod 777 *.zip'.format( server_download) SSHUtil.exec_ret(ssh, cmd, True) # 然后下载文件 SSHUtil.download_file( ssh, "{}/refresh_file.zip".format(server_download), '{}\\refresh_file.zip'.format(local_download)) # 最后解压 Common.unzip_file( '{}\\refresh_file.zip'.format(local_download), data_dir) except Exception as e: logger.error("RefreshTimer refresh_file_impl {}".format(str(e))) logger.debug('[timer] refresh file data end')
def init_server(): # 如果上次登录用户跟这次不一致,会导致后面解压失败; 这里每次登录都清空目录 cmd = "rm -rf {0}/*; mkdir {0}; chmod 777 {0}".format(server_dir) SSHUtil.exec_ret(ssh, cmd, root=True)
def kill_shell(self, ssh): cmd = "{0} kill_shell {1} {2}".format(self.inner_caller, self.task, self.shell) SSHUtil.exec_ret(ssh, cmd, True)
def execute_out(self, ssh, cmd): result = SSHUtil.exec_info(ssh, cmd, True)[0] return result.split(Global.G_INNER_SPLIT)[1].strip()