def isBackupOK(self, backupLog): """ 检查backup日志文件,查看备份是否正常结束 """ isSuccess = False masterFile = errInfo = "" masterPos = -1 if not Util.isExists(backupLog): errInfo = "backup log [{0}] not exist".format(backupLog) return isSuccess, masterFile, masterPos, errInfo rePosition = re.compile("position[\s]+([0-9]+)") reFileName = re.compile("filename[\s]+\'([0-9a-z-.]+)\'", re.IGNORECASE) reErrInfo = re.compile("Error:([\s\S]+)", re.IGNORECASE) lines = Util.popen("tail -10 {0}".format(backupLog)).readlines() for line in lines: info = reErrInfo.search(line) if info != None: errInfo = info.group(1) break if line.find("innobackupex: completed OK!") != -1: isSuccess = True break info = reFileName.search(line) if info != None: masterFile = info.group(1) info = rePosition.search(line) if info != None: try: masterPos = long(info.group(1)) except: masterPos = -1 if not isSuccess: errInfo = ' '.join(lines) return isSuccess, masterFile, masterPos, errInfo
def getSlowLog(self, count, endRow): """ 获取slow log信息,startRow为开始行,endRow为结束行, endRow如果为-1,则代表endRow为最后一行 """ slowLog = self.getValue(self.configFile, "slow_query_log_file") if slowLog == "": slowLog = self.getValue(self.configFile, "slow-query-log-file") if slowLog == "": return False, "error log not configure in mysql configure file" path, name = Util.pathSplit(slowLog) if path == "": dataDir = self.getValue(self.configFile, "datadir") if dataDir == "": return False, "can not find data dir for mysql" else: slowLog = "{0}/{1}".format(dataDir, slowLog) if not Util.isExists(slowLog): return False, "error log file not exist" startRow = endRow - count if startRow < 0: startRow = 0 cmd = "sed -n '%d,%d'p %s" % (startRow + 1, endRow, slowLog) lines = Util.popen(cmd).readlines() return True, "".join(lines)
def getErrorLog(self, count, endRow): """ 获取error log信息,startRow为开始行,endRow为结束行, endRow如果为-1,则代表endRow为最后一行 """ errLog = self.getValue(self.configFile, "log_error") if errLog == "": errLog = self.getValue(self.configFile, "log-error") if errLog == "": return False, "error log not configure in mysql configure file" path, name = Util.pathSplit(errLog) if path == "": dataDir = self.getValue(self.configFile, "datadir") if dataDir == "": return False, "can not find data dir for mysql" else: errLog = "{0}/{1}".format(dataDir, errLog) if not Util.isExists(errLog): return False, "error log file not exist" cmd = "sed -n '$=' %s" % errLog rows = long(Util.popen(cmd).readlines()[0]) endRow = rows - endRow if endRow < 1: endRow = rows % count startRow = endRow - count if startRow < 0: startRow = 0 cmd = "sed -n '%d,%d'p %s" % (startRow + 1, endRow, errLog) lines = Util.popen(cmd).readlines() return True, "".join(lines)
def getInfos2(self): self.lock.acquire() msgInfo = Util.deepCopy(self.msgInfo) dfMsg = Util.deepCopy(self.dfMsg) self.lock.release() keys = msgInfo.keys() ret = {} for key in keys: infoList = msgInfo[key] head = infoList[0].split() infoList = infoList[1:] tempDir = {} for info in infoList: temp = info.split() if key == 'cpu': for i in xrange(1, len(temp)): tempDir[head[i]] = temp[i] elif key == 'memory': for i in xrange(len(temp)): tempDir[head[i]] = temp[i] # DEV or IFACE else: d = {} for i in xrange(1, len(temp)): d[head[i]] = temp[i] if key == 'disk': tps = float(d['tps']) wsec = float(d['wr_sec/s']) rsec = float(d['rd_sec/s']) if tps == 0: d['wtps'] = '0.00' d['rtps'] = '0.00' else: #处理除零错误 if wsec + rsec == 0: wtps = 0 rtps = 0 else: wtps = tps * wsec / (wsec + rsec) rtps = tps - wtps d['wtps'] = "%.02f" % wtps d['rtps'] = "%.02f" % rtps tempDir[temp[0]] = d ret[key] = tempDir if ret.has_key('disk'): partition_used = {} for line in dfMsg: key, value = line.split() partition_used[key] = value[:-1] ret['disk']['partition'] = partition_used return ret
def backupByMysqldump(self, backupDir): # mysqldump -A -R -E -uuser -ppassword --socket=... # -A: 导出全部数据库的数据 # -R: 导出存储过程和函数 # -E: 导出events # --master-data值为1或者2,如果设置为1则在load sql脚本的时候会自动执行change master,如果设置为2则需要手动执行 # 其实就是change master在sql脚本中是否被注释了,1为不注释,2为被注释掉了 # --dump-slave在slave上执行时,获取master上的位置,类似于innobackupex的--slave-info,如果在master上执行则会报错 isSuccess = False masterFile = '' masterPos = -1 errMsg = '' fileSize = -1 #Start modified by hzluqianjie for IM-25 at 2016-1-12 新增了错误重定向 cmd = "{0}/bin/mysqldump --user={1} --password={2} --socket={3} -A -E -R --master-data=2 1> {4}/backup.sql 2>{5}/error".format( self.installPath, self.user, self.passwd, self.socket, backupDir, backupDir) lines = Util.popen(cmd).readlines() if len(lines) != 0: return isSuccess, masterFile, '{0}'.format(masterPos), ' '.join( lines), '{0}'.format(fileSize) lines = Util.popen('cat {0}/error'.format(backupDir)).readline() if len(lines) != 0: #循环检查是否有Error,如果是Error的话返回失败 for line in lines: if re.match('^mysqldump: Error:(.*?)', line, re.I): return isSuccess, masterFile, '{0}'.format( masterPos), ' '.join(lines), '{0}'.format(fileSize) #End modified by hzluqianjie for IM-25 at 2016-1-12 新增了错误重定向 cmd = "head -n 50 {0}/backup.sql".format(backupDir) lines = Util.popen(cmd).readlines() if len(lines) == 0: errMsg = "backup file not exist" return isSuccess, masterFile, '{0}'.format( masterPos), errMsg, '{0}'.format(fileSize) for line in lines: if line.find("CHANGE MASTER TO") != -1: line = line.split(';')[0] temp = line.split(',') masterFile = temp[0].split('=')[1] masterFile = masterFile.strip("'") masterPos = temp[1].split('=')[1] break if masterFile != '' and masterPos != -1: fileSize = Util.getFileSize("{0}/backup.sql".format(backupDir)) isSuccess = True errMsg = backupDir else: errMsg = "can't get master info from backup file" return isSuccess, masterFile, '{0}'.format( masterPos), errMsg, '{0}'.format(fileSize)
def backup(self, timestamp, backupType, binlogFile, backupTool): """ 利用innobackupex进行数据备份 """ backupDir = "{0}/{1}".format(self.backupDir, timestamp) Util.popen("mkdir -p {0}".format(backupDir)) Util.popen("chmod 0755 {0}".format(backupDir)) if backupType == "incremental": return self.backupBinlogInfo(backupDir, binlogFile) elif backupTool == "innobackupex" or backupTool == "xtrabackup": return self.backupByInnobackupex(backupDir) elif backupTool == "mysqldump": return self.backupByMysqldump(backupDir)
def backupBinlogInfo(self, backupDir, startBinlog): binlogList = self.getLatestBinlogList(startBinlog) if len(binlogList) == 0: return False, '', '-1', 'binlog later than %s not found' % startBinlog, '-1' filestr = ' '.join(binlogList) lastBinlog = binlogList[-1] cmd = "tar czf {0}/{1}.tar.gz -C {2} {3}".format( backupDir, lastBinlog, self.dataDir, filestr) lines = Util.popen(cmd).readlines() if len(lines) != 0: return False, '', '-1', ' '.join(lines), '-1' fileSize = Util.getFileSize("{0}/{1}.tar.gz".format( backupDir, lastBinlog)) return True, lastBinlog, '0', backupDir, '{0}'.format(fileSize)
def getRequest(self): # sys.getsizeof(msg) return Util.obj2Json({ 'head': self.head, 'content': self.content, 'timestamp': self.timestamp })
def __init__(self, head={}, content=[]): self.head = head self.content = content self.timestamp = Util.getTimeStr() for h, v in AgentRequest.headList.iteritems(): if h not in head.keys(): self.head[h] = v
def getInfos(self): self.lock.acquire() msgInfo = Util.deepCopy(self.msgInfo) self.lock.release() keys = msgInfo.keys() ret = {} for key in keys: infoList = msgInfo[key] head = infoList[0].split() infoList = infoList[1:] tempList = [] for info in infoList: temp = info.split() if key == 'cpu': for i in xrange(1, len(temp)): tempList.append({head[i]: temp[i]}) elif key == 'memory': for i in xrange(len(temp)): tempList.append({head[i]: temp[i]}) # DEV or IFACE else: l = [] for i in xrange(1, len(temp)): l.append({head[i]: temp[i]}) d = {temp[0]: l} tempList.append(d) ret[key] = tempList return ret
def createInstance(self, agentConfig): try: pidFile = '{0}/agent.pid'.format(agentConfig.get_agent_tmpdir()) fileName = Util.pathSplit(__file__)[1] return AgentInstance(pidFile, fileName) except KeyError, e: raise AgentAtrrException( 'AgentManager.createInstance KeyError: %s' % e)
def backupByInnobackupex(self, backupDir): isSuccess = False masterFile = '' masterPos = -1 errMsg = '' fileSize = -1 backupLog = "{0}/backup.log".format(backupDir) cmd = "innobackupex --lock-wait-timeout=3600 --lock-wait-threshold=5 --lock-wait-query-type=all " \ "--defaults-file={0} --socket={1} --slave-info --stream=tar --user={2} --password={3} --tmpdir={4} {4} 2>{5} | gzip > {4}/backup.tar.gz".format( self.configFile, self.socket, self.user, self.passwd, backupDir, backupLog) lines = Util.popen(cmd).readlines() if len(lines) != 0: return isSuccess, masterFile, '{0}'.format(masterPos), ' '.join( lines), '{0}'.format(fileSize) fileSize = Util.getFileSize("{0}/backup.tar.gz".format(backupDir)) isSuccess, masterFile, masterPos, errMsg = self.isBackupOK(backupLog) if isSuccess: errMsg = backupDir return isSuccess, masterFile, '{0}'.format( masterPos), errMsg, '{0}'.format(fileSize)
def getFile(self, count, endRow): """ 获取path信息,startRow为开始行,endRow为结束行, endRow如果为-1,则代表endRow为最后一行 """ path, name = Util.pathSplit(self.filePath) if path == "": return False, "can not find dir" if not Util.isExists(self.filePath): return False, "file not exist" cmd = "sed -n '$=' %s" % self.filePath rows = long(Util.popen(cmd).readlines()[0]) endRow = rows - endRow if endRow < 1: endRow = rows % count startRow = endRow - count if startRow < 0: startRow = 0 cmd = "sed -n '%d,%d'p %s" % (startRow + 1, endRow, self.filePath) lines = Util.popen(cmd).readlines() return True, "".join(lines)
def getLatestBinlogList(self, startBinlog): cmd = "ls %s | grep %s | grep -v %s.index" % ( self.dataDir, self.binlogName, self.binlogName) binlogNo = 0 if startBinlog.find(self.binlogName) != -1: binlogNo = long(startBinlog.split('.')[1]) binlogList = Util.popen(cmd).readlines() retList = [] for item in binlogList: item = item.split('/')[-1] index = long(item.split('.')[1]) if not index < binlogNo: retList.append("%s.%06d" % (self.binlogName, index)) return retList
def main(): agentManager = None try: # 解析命令行,获取配置文件地址和是否console执行 optParser = AgentOptionParser() cfgFile = Util.getRealPath(optParser.configFile) console = optParser.console # 获取配置文件配置 userConfig, sysConfig = AgentConfigManager.initAllConfig(cfgFile) # 如果为console状态,则直接运行,如果为非console状态,则在后台以daemon形式运行 if not console: Daemon() else: userConfig.log['logSection'] = "debug" # 初始化日志句柄,所有相关日志的操作都需要放该操作后面 AgentLog.init(userConfig.log['logConfig'], userConfig.log['logSection']) agentManager = AgentManager(userConfig, sysConfig) agentManager.start() while True: Util.sleep(5) agentManager.stop() except KeyboardInterrupt: if agentManager is not None: agentManager.stop() finally: if agentManager is not None: agentManager.stop()
def getValue(self, configFile, key): """ 从配置文件中获取指定key的值 """ if not Util.isExists(configFile): raise AgentFileException( 'Mysql can not find config File from path :%s' % configFile) try: with open(configFile, 'r') as f: for line in f: if line.find(key) != -1: key = (line.split('=')[0]).strip() if key[0] != '#': value = (line.split('=')[1]).strip() print value return value except IOError, e: raise AgentFileException( 'Mysql can not find config File from path :%s' % configFile)
def upload_scp(self, params, srcDir): remoteHost = params.get('remoteHost', None) remotePort = params.get('remotePort', None) remoteUser = params.get('remoteUser', None) remotePasswd = params.get('remotePassword', None) backupPath = params.get('backupPath', None) uploadLimit = params.get('uploadLimit', None) if remoteHost == None or remotePort == None or remoteUser == None or \ remotePasswd == None or backupPath == None or uploadLimit == None: return False, 'remote host information errors' uploadLimit = long(uploadLimit) AgentLog.info("start upload backup data to remote server") cmd = "scp -r -P %s %s %s@%s:%s" % (remotePort, srcDir, remoteUser, remoteHost, backupPath) if uploadLimit > 0: cmd = "scp -r -P %s -l %d %s %s@%s:%s" % (remotePort, uploadLimit * 8, srcDir, remoteUser, remoteHost, backupPath) lines = Util.popen(cmd).readlines() if len(lines) != 0: return False, ' '.join(lines) return True, ''
uploadLimit = params.get('uploadLimit', None) if remoteHost == None or remotePort == None or remoteUser == None or \ remotePasswd == None or backupPath == None or uploadLimit == None: return False, 'remote host information errors' AgentLog.info("start upload backup data to ftp server") try: ftp = FTP() ftp.connect(remoteHost, int(remotePort), 60) except Exception, e: return False, 'can not connect to remote host: %s with port: %s, error: %s' % ( remoteHost, remotePort, e) try: ftp.login(remoteUser, remotePasswd) ftp.cwd(backupPath) srcDir = srcDir.rstrip('/') localName = Util.pathSplit(srcDir)[1] ftp.mkd(localName) self.uploadDir(ftp, srcDir, localName) AgentLog.info("upload completely") except Exception, e: return False, 'upload error, error:%s' % e finally: ftp.quit() return True, '' def uploadDir(self, ftp, localDir, remoteDir): ftp.cwd(remoteDir) for file in os.listdir(localDir): src = os.path.join(localDir, file) if os.path.isfile(src): ftp.storbinary('STOR ' + file, open(src, 'rb'))
def getData(self): self.lock.acquire() ret = Util.deepCopy(self.dataDict) self.lock.release() return {self.alias:ret}