def outputlog(self, logfile, wait_time): finish = False def timerstop( func, spread=30, ): threading.Event().wait(spread) finish = True threading.Thread(target=timerstop, args=(wait_time, )).start() while (not self.fileExists(logfile) and not finish): threading.Event().wait(1) if self.fileExists(logfile): cmd = 'tail -f %s \r\n' % logfile log.debug(cmd) channel, stdin, stdout = self.newChannel() channel.invoke_shell() stdin.write(to_bytes(cmd)) stdin.flush() def timerstopchannel( func, spread=30, ): threading.Event().wait(spread) channel.close() threading.Thread(target=timerstopchannel, args=(wait_time, )).start() data = stdout.readline() while (data or not channel.closed): log.info(to_text(data)) data = stdout.readline()
def findMysqldVersion(self, mysqldpath): cmd = '%s --version' % mysqldpath data = to_text(self._sshobject.execute_cmd(cmd, False)) # eg: mysqld Ver 8.0.17 r = re.search('mysqld Ver ([1-9]+\.[0-9]+\.[0-9]+)', data) if r: return r.group(1) return None
def isPortBusy(port, connectobj): port = str(port) cmd = "netstat -apn|grep -w -E '(\\S+):%s'|grep -w 'LISTEN'|wc -l" % port if not re.match(r'[1-9][0-9]+', port, RegexFlag.IGNORECASE): return False stat, data = connectobj.execute_cmd(cmd, False) if stat == ConnectionBase.SHELL_SUCCESS and int(to_text(data)) < 1: return False return True
def whichPath(software, sshconnect): cmd = 'which ' + software nostr = 'no ' + software + ' in' stat, data = sshconnect.execute_cmd(cmd, False) if stat == 0: data = to_text(data) for s in data.splitlines(): i = s.find(nostr) if i >= 0: return None data = data.replace('\r', '').replace('\n', '') if sshconnect.isLink(data): cmd = 'readlink data' stat, tdata = sshconnect.execute_cmd(cmd) if stat == 0 and tdata: return to_text(tdata).replace('\r', '').replace('\n', '') return data return None
def _checkBackupOk(self, logdir): logpath = path_join(logdir, self.backup_log_filename) if self._sshobject.fileExists(logpath): cmd = 'tail -1 %s ' % logpath stat, data = self._sshobject.execute_cmd(cmd, False) if data: data = to_text(data) if data.find(self.backup_success_flag) > -1: return True return False
def wait_process_end(processid): while (True): threading.Event().wait(2) cmd = 'ps --no-header -p %s' % processid st, pinfo = self.execute_cmd(cmd) if st == 0 and pinfo: tpid = to_text(pinfo).strip().split()[0] if tpid == processid: continue break safe_doing(channel.close)
def outputlog(): while (not self.fileExists(logfile) and not channel.closed): threading.Event().wait(1) if self.fileExists(logfile) and not channel.closed: cmd = 'tail -f %s \r\n' % logfile stdin.write(to_bytes(cmd)) stdin.flush() data = stdout.readline() while (data or not channel.closed): log.info(to_text(data)) data = stdout.readline()
def execute_cmd(self, cmd, consumeoutput=True, logfile=None, mode='w', wait_time=None): try: log.debug(cmd) channel, _, _ = self.inner_execute_cmd(to_bytes(cmd)) result = bytes() data = channel.recv(self.DEFAULT_BUFFER_SIZE) l = None if logfile: l = open(logfile, mode) def timerstop(spread=2): threading.Event().wait(spread) channel.close() if wait_time: threading.Thread(target=timerstop, args=(wait_time, )).start() while (data or not channel.closed): if consumeoutput: log.info(to_text(data)) else: result += data if l: l.write(to_text(data)) data = channel.recv(self.DEFAULT_BUFFER_SIZE) stat = channel.recv_exit_status() if channel: del channel if l: safe_doing(l.close) return stat, result except BaseException as e: log.error(traceback.format_exc()) return None, None
def setBackupCnfFile(self, cnffile=None, force=False): if not none_null_stringNone(self.defaults_file[1]) and not force: return data = to_text(self.getMysqldCommand()) if data: self.setSocket(self.getSocketFromCommand(data)) # r = re.search('(defaults-file)=([\S]+)',data) # if r: # self.defaults_file[1] = r.group(2) # else: # self.defaults_file[1] = self.makeCnf() self.defaults_file[1] = self.makeCnf() else: raise MysqldNotRunningException( 'mysqld process not running , it shoud be')
def getRestoredMysqlBase(self, mysqldpath): if not isinstance(mysqldpath, (list, tuple)): mysqldpath = (mysqldpath, ) for mpath in mysqldpath: if self._sshobject.isFile(mpath): mpath = mpath.rpartition('/')[0] cmd = 'cd %s;find . -name mysqld ' % mpath stat, mysqlds = self._sshobject.execute_cmd(cmd, False) mysqlds = to_text(mysqlds) for p in mysqlds.splitlines(): sv = self.findMysqldVersion(path_join(mpath, p)) if sv == self.mysql_version[1]: return path_join(mpath, p).rpartition('/')[0].rpartition('/')[0] raise MysqlVersionNotMatchException( 'Can not find a mysql which version match with backup files')
def install(config): with ParamikoConnection(config.ssh_host, config.ssh_user, config.ssh_passwd, config.ssh_port) as pk: execute_cmd = functools.partial(ssh_connect.exec, pk.execute_cmd) mkdir = functools.partial(ssh_connect.exec, pk.mkdir) mkdir((config.es_base_path, )) mkdir((get_yml_value(config.yml_config, *('path', 'data')), )) mkdir((get_yml_value(config.yml_config, *('path', 'logs')), )) mkdir((path_join(config.es_base_path, 'config'), )) mkdir((path_join(config.es_base_path, 'scripts'), )) mkdir((path_join(config.es_base_path, 'var'), )) mkdir((path_join(config.es_base_path, 'software'), )) cmd = 'cp %s %s' % (config.es_tgz_path, path_join(config.es_base_path, 'software')) execute_cmd((cmd, )) cmd = 'cd %s ; tar -xzpvf %s ' % (path_join( config.es_base_path, 'software'), os.path.split( config.es_tgz_path)[1]) execute_cmd((cmd, )) software_name = os.path.split(config.es_tgz_path)[1][:-7] cmd = 'cp %s %s'%(path_join(config.es_base_path,('software',software_name,'config','log4j2.properties')), \ path_join(config.es_base_path,'config')) execute_cmd((cmd, )) cmd = 'cp %s %s'%(path_join(config.es_base_path,('software',software_name,'config','jvm.options')), \ path_join(config.es_base_path,'config')) execute_cmd((cmd, )) transferFileToRemote = functools.partial(ssh_connect.exec, pk.transferFileToRemote) result = transferFileToRemote( (os.path.join(os.path.dirname(os.path.abspath(__file__)), 'root_execute.sh'), path_join(config.es_base_path, ('scripts', 'root_execute.sh')))) if config.ssh_user == 'root': cmd = 'chmod u+x %s' % path_join(config.es_base_path, ('scripts', 'root_execute.sh')) execute_cmd((cmd, )) cmd = 'sh %s' % path_join(config.es_base_path, ('scripts', 'root_execute.sh')) execute_cmd(cmd) tmp_file=os.path.join(os.path.dirname(os.path.abspath(__file__)),'elasticsearch.yml'+formatDateTime()+get_yml_value(config.yml_config,*('http','host')) + \ get_yml_value(config.yml_config,*('http','port'))) with open(tmp_file, 'w', encoding='utf-8') as f: yaml.dump(config.yml_config, f) result = transferFileToRemote( (tmp_file, path_join(config.es_base_path, ('config', 'elasticsearch.yml')))) s = 'sed -r -i -e \"s=\"\'^\\s*-Xms[0-9]+g\\s*$\'\"=-Xms%s=\" -e \"s=\"\'^\\s*-Xmx[0-9]+g\\s*$\'\"=-Xmx%s=\" %s' for jvm_path in (path_join(config.es_base_path, ('config', 'jvm.options')), path_join(config.es_base_path, ('software', software_name, 'config', 'jvm.options'))): cmd = s % (config.jvm_heap, config.jvm_heap, jvm_path) execute_cmd((cmd, )) transferFileToRemote( (os.path.join(os.path.dirname(os.path.abspath(__file__)), 'start.sh'), path_join(config.es_base_path, ('scripts', 'start.sh')))) transferFileToRemote( (os.path.join(os.path.dirname(os.path.abspath(__file__)), 'stop.sh'), path_join(config.es_base_path, ('scripts', 'stop.sh')))) user_es = 'elasticsearch' cmd = 'id -u %s' % user_es _, data = execute_cmd((cmd, False)) data = to_text(data) if not re.match(r'^[0-9]+', data): cmd = 'useradd -m -s /sbin/nologin -U %s' % user_es execute_cmd((cmd, )) cmd = 'chown -R %s:%s %s %s %s' % ( user_es, user_es, config.es_base_path, get_yml_value(config.yml_config, *('path', 'data')), get_yml_value(config.yml_config, *('path', 'logs'))) execute_cmd((cmd, ))
def execute_backupground(self, cmd, consumeoutput=True, logfile=None, logmode='r', mode='w', wait=False, wait_time=None, wait_join=False): cmd = cmd.strip() if cmd[-1] != '&': cmd += ' &' log.debug(cmd) channel, stdin, stdout = self.newChannel() channel.invoke_shell() stdin.write(to_bytes(cmd + '\r\n')) stdin.flush() def timerstop(spread=2): threading.Event().wait(spread) channel.close() def wait_process_end(processid): while (True): threading.Event().wait(2) cmd = 'ps --no-header -p %s' % processid st, pinfo = self.execute_cmd(cmd) if st == 0 and pinfo: tpid = to_text(pinfo).strip().split()[0] if tpid == processid: continue break safe_doing(channel.close) def outputlog(): while (not self.fileExists(logfile) and not channel.closed): threading.Event().wait(1) if self.fileExists(logfile) and not channel.closed: cmd = 'tail -f %s \r\n' % logfile stdin.write(to_bytes(cmd)) stdin.flush() data = stdout.readline() while (data or not channel.closed): log.info(to_text(data)) data = stdout.readline() data = stdout.readline() while (data): pid = to_text(data) if len(pid.split()) > 1: pid = pid.split()[1] if re.match('[0-9]+', pid): break data = stdout.readline() if wait: if wait_time: threading.Thread(target=timerstop, args=(wait_time, )).start() if re.match('[0-9]+', pid): if wait_join or not wait_time: threading.Thread(target=wait_process_end, args=(pid, )).start() outputlog() else: safe_doing(channel.close) else: threading.Thread(target=timerstop, args=(2, )).start() while (data or not channel.closed): if consumeoutput: log.info(to_text(data)) if logfile: pass data = stdout.readline()
def preInit(self): cmd = 'cd ~;pwd' _, tmp_dir = self._sshobject.execute_cmd(cmd, False) self.tmp_dir = path_join(to_text(tmp_dir), formatDateTime()) self._sshobject.mkdir(self.tmp_dir)
def setRestoreCnfFile(self): cnf = path_join(self.full_dir[1], self.backup_mysql_cnfname) if self._sshobject.fileExists(cnf): self.defaults_file[1] = cnf else: raise MysqlCnfFileNotExistsException( 'can not find mysql config file in backup directory : {}'. format(self.full_dir[1])) cmd = 'cat %s' % cnf stat, cnfContent = self._sshobject.execute_cmd(cmd, False) checkStatAndRaise(stat, ReadBackupConfigFileException, cnf) self.mysql_cnf_config.read_string(to_text(cnfContent)) self.old_mysql_port = self.mysql_cnf_config.get('mysqld', 'port', fallback=None) new_softwarebase = self.mysql_base[1] old_softwarebase = self.getOldBackupConfig(self.mysql_base[0].replace( '-', '')) new_data_base = rpartition(self.datadir[1], '/')[0] old_data_base = rpartition( self.getOldBackupConfig(self.datadir[0].replace('-', '')), '/')[0] def replace_cnfconfig(type, sec, option): if type == constant.SOFTWARE_PATH: o = old_softwarebase n = new_softwarebase else: o = old_data_base n = new_data_base ov = self.mysql_cnf_config.get(sec, option, fallback=None) if ov: self.mysql_cnf_config.set(sec, option, ov.replace(o, n)) for sec in MYSQL57_CNF_VAR_PREFERENCE.keys(): for item in MYSQL57_CNF_VAR_PREFERENCE[sec]: if isinstance(item, (list, tuple)): if item[2]: replace_cnfconfig(item[2], sec, item[0]) self.mysql_cnf_config.set(sec, 'port', str(self._config.port)) if self.server_id[1]: self.mysql_cnf_config.set('mysqld', 'server-id', self.server_id[1]) for op in ('pid_file', 'socket'): o_v = self.mysql_cnf_config.get('mysqld', op, fallback=None) self.mysql_cnf_config.set( 'mysqld', op, o_v.replace(str(self.old_mysql_port), str(self._config.port))) tmplogpath = self.mysql_cnf_config.get('mysqld', 'log-error', fallback=None) if not tmplogpath: tmplogpath = path_join(self.log_err_dir, 'log.err') self.mysql_cnf_config.set('mysqld', 'log-error', tmplogpath) if not self._sshobject.fileExists(tmplogpath): cmd = 'touch %s' % tmplogpath self._sshobject.execute_cmd(cmd) tmplocalpath = path.join(self.getLocalTmpDir(), self.backup_mysql_cnfname) with open(tmplocalpath, 'w') as wf: self.mysql_cnf_config.write(wf) self._sshobject.transferFileToRemote(tmplocalpath, self.defaults_file[1])
def getBackupParam(self, path, parser): cmd = 'cat %s' % path stat, paramContent = self._sshobject.execute_cmd(cmd, False) checkStatAndRaise(stat, ReadBackupParamException, *(path, )) parser.read_string(to_text(paramContent))
def getRemoteFileContent(self, filepath): cmd = 'cat %s' % filepath stat, paramContent = self._sshobject.execute_cmd(cmd, False) checkStatAndRaise(stat, ReadRemoteFileContentException, *(filepath, )) return to_text(paramContent)