def testGetDirEntries(self): tests = self.getBackupTests() for testDir in tests: # Get a list of backup entries for the root folder rdiffDestDir = joinPaths(self.destRoot, testDir) entries = getDirEntries(rdiffDestDir, "/") # Go back through all backup states and make sure that the backup entries match the files that exist origStateDir = joinPaths(self.masterDirPath, testDir) backupStates = self.getBackupStates(origStateDir) backupStates.sort(lambda x, y: cmp(x, y)) for backupState in backupStates: backupTime = rdw_helpers.rdwTime() backupTime.initFromString(backupState) # Go through each file, and make sure we have a backup entry for this file and date origStateDir = joinPaths(self.masterDirPath, testDir, backupState) files = self.getBackupStates(origStateDir) for file in files: origFilePath = joinPaths(origStateDir, file) entry = getMatchingDirEntry(entries, file) assertionErrorMessage = "backupTime "+backupTime.getDisplayString()+" not found in backup entries for backup test \""+testDir+"\" for file \""+file+"\". Returned changeDates:" for changeDate in entry.changeDates: assertionErrorMessage = assertionErrorMessage + "\n"+changeDate.getDisplayString() assertionErrorMessage = assertionErrorMessage + "\nIncrements dir: "+str(os.listdir(joinPaths(rdiffDestDir, "rdiff-backup-data", "increments"))) for entryDate in entry.changeDates: if backupTime.getSeconds() == entryDate.getSeconds(): if self.fileChangedBetweenBackups(testDir, entry.name, backupState, backupStates): assert False, assertionErrorMessage break else: if not self.fileChangedBetweenBackups(testDir, entry.name, backupState, backupStates): assert False or False, assertionErrorMessage assert os.path.isdir(origFilePath) == entry.isDir
def getDirEntries(self): """ returns dictionary of dir entries, keyed by dir name """ entriesDict = {} # First, we grab a dir listing of the target, setting entry attributes for entryName in self.entries: if entryName == rdiffDataDirName: continue entryPath = joinPaths(self.repo, self.dirPath, entryName) newEntry = dirEntry(entryName, os.path.isdir(entryPath), os.lstat(entryPath)[6], True, [self._getLastChangedBackupTime(entryName)]) entriesDict[entryName] = newEntry # Go through the increments dir. If we find any files that didn't exist in dirPath (i.e. have been deleted), add them for entryFile in self.incrementEntries: entry = incrementEntry(entryFile) entryName = entry.getFilename() if entry.shouldShowIncrement() or entry.isMissingIncrement(): entryDate = entry.getDate() if not entry.isSnapshotIncrement(): if entry.isMissingIncrement(): entryDate = self._getFirstBackupAfterDate(entry.getDate()) else: entryDate = entry.getDate() if not entryName in entriesDict.keys(): entryPath = joinPaths(self.repo, rdiffIncrementsDirName, self.dirPath, entryName) newEntry = dirEntry(entryName, os.path.isdir(entryPath), 0, False, [entryDate]) entriesDict[entryName] = newEntry else: if not entryDate in entriesDict[entryName].changeDates: bisect.insort_left(entriesDict[entryName].changeDates, entryDate) return entriesDict
def restoreFileOrDir(repoRoot, dirPath, filename, restoreDate, useZip): """ returns a file path to the file. User is responsible for deleting file, as well as containing dir, after use. """ filePath = joinPaths(dirPath, filename) filePath = rdiffQuotedPath(repoRoot).getQuotedPath(filePath) checkRepoPath(repoRoot, filePath) restoredFilename = filename if restoredFilename == "/": restoredFilename = "(root)" fileToRestore = joinPaths(repoRoot, dirPath, filename) dateString = str(restoreDate.getSeconds()) rdiffOutputFile = joinPaths(tempfile.mkdtemp(), restoredFilename) # TODO: make so this includes the username results = rdw_helpers.execute("rdiff-backup", "--restore-as-of="+dateString, fileToRestore, rdiffOutputFile) if results['exitCode'] != 0 or not os.access(rdiffOutputFile, os.F_OK): error = results['stderr'] if not error: error = 'rdiff-backup claimed success, but did not restore anything. This indicates a bug in rdiffWeb. Please report this to a developer.' raise UnknownError('Unable to restore! rdiff-backup output:\n'+error) if os.path.isdir(rdiffOutputFile): if useZip: rdw_helpers.recursiveZipDir(rdiffOutputFile, rdiffOutputFile+".zip") rdw_helpers.removeDir(rdiffOutputFile) rdiffOutputFile = rdiffOutputFile+".zip" else: rdw_helpers.recursiveTarDir(rdiffOutputFile, rdiffOutputFile+".tar.gz") rdw_helpers.removeDir(rdiffOutputFile) rdiffOutputFile = rdiffOutputFile+".tar.gz" return rdiffOutputFile
def checkRepoPath(repoRoot, filePath): # Make sure repoRoot is a valid rdiff-backup repository dataPath = joinPaths(repoRoot, rdiffDataDirName) if not os.access(dataPath, os.F_OK) or not os.path.isdir(dataPath): raise DoesNotExistError() # Make sure there are no symlinks in the path pathToCheck = joinPaths(repoRoot, filePath) while True: pathToCheck = pathToCheck.rstrip("/") if os.path.islink(pathToCheck): raise AccessDeniedError() (pathToCheck, file) = os.path.split(pathToCheck) if not file: break # Make sure that the folder/file exists somewhere - either in the current folder, or in the incrementsDir if not os.access(joinPaths(repoRoot, filePath), os.F_OK): (parentFolder, filename) = os.path.split(joinPaths(repoRoot, rdiffIncrementsDirName, filePath)) try: increments = os.listdir(parentFolder) except OSError: increments = [] increments = filter(lambda x: x.startswith(filename), increments) if not increments: raise DoesNotExistError()
def fileChangedBetweenBackups(self, backupTest, filename, lastBackup, allBackups): prevRevisions = filter(lambda x: x < lastBackup, allBackups) if not prevRevisions: return False oldVersion = prevRevisions[-1] oldFilePath = joinPaths(self.masterDirPath, backupTest, oldVersion, filename) newFilePath = joinPaths(self.masterDirPath, backupTest, lastBackup, filename) if not os.access(oldFilePath, os.F_OK): return False return open(oldFilePath, "r").read() == open(newFilePath, "r").read()
def restoreFile(repoRoot, dirPath, filename, restoreDate): """ returns a file path to the file. User is responsible for deleting file, as well as containing dir, after use. """ checkRepoPath(repoRoot, joinPaths(dirPath, filename)) fileToRestore = joinPaths(repoRoot, dirPath, filename) dateString = str(restoreDate.getSeconds()) rdiffOutputFile = joinPaths(tempfile.mkdtemp(), filename) # TODO: make so this includes the username args = [ "rdiff-backup", "--restore-as-of="+dateString, fileToRestore, rdiffOutputFile ] os.spawnvp(os.P_WAIT, args[0], args) if not os.access(rdiffOutputFile, os.F_OK): raise UnknownError() return rdiffOutputFile
def getBackupHistory(self, numLatestEntries=-1, earliestDate=None, latestDate=None, includeInProgress=True): """Returns a list of backupHistoryEntry's earliestDate and latestDate are inclusive.""" # Get a listing of error log files, and use that to build backup history curEntries = filter(lambda x: x.startswith("error_log."), self.dirEntries) curEntries.reverse() entries = [] for entryFile in curEntries: entry = incrementEntry(self.pathQuoter, entryFile) # compare local times because of discrepency between client/server time zones if earliestDate and entry.getDate().getLocalSeconds() < earliestDate.getLocalSeconds(): continue if latestDate and entry.getDate().getLocalSeconds() > latestDate.getLocalSeconds(): continue try: if entry.isCompressed(): errors = gzip.open(joinPaths(self.rdiffDir, entryFile), "r").read() else: errors = open(joinPaths(self.rdiffDir, entryFile), "r").read() except IOError: errors = "[Unable to read errors file.]" try: sessionStatsFile = self._getSessionStatsFile(entry) session_stats = open(joinPaths(self.rdiffDir, sessionStatsFile), "r").read() fileSize = re.compile("SourceFileSize ([0-9]+) ").findall(session_stats)[0] incrementSize = re.compile("IncrementFileSize ([0-9]+) ").findall(session_stats)[0] except IOError: fileSize = 0 incrementSize = 0 newEntry = backupHistoryEntry() newEntry.date = entry.getDate() newEntry.inProgress = self._backupIsInProgress(entry.getDate()) if not includeInProgress and newEntry.inProgress: continue if newEntry.inProgress: newEntry.errors = "" else: newEntry.errors = errors newEntry.size = int(fileSize) newEntry.incrementSize = int(incrementSize) entries.insert(0, newEntry) if numLatestEntries != -1 and len(entries) == numLatestEntries: return entries return entries
def _getBackupHistory(repoRoot, numLatestEntries=-1, earliestDate=None, latestDate=None, includeInProgress=True): """Returns a list of backupHistoryEntry's""" checkRepoPath(repoRoot, "") # Get a listing of error log files, and use that to build backup history rdiffDir = joinPaths(repoRoot, rdiffDataDirName) curEntries = os.listdir(rdiffDir) curEntries = filter(lambda x: x.startswith("error_log."), curEntries) curEntries.sort() entries = [] for entryFile in curEntries: entry = incrementEntry(entryFile) # compare local times because of discrepency between client/server time zones if earliestDate and entry.getDate().getLocalSeconds() < earliestDate.getLocalSeconds(): continue if latestDate and entry.getDate().getLocalSeconds() > latestDate.getLocalSeconds(): continue try: if entry.isCompressed(): errors = gzip.open(joinPaths(rdiffDir, entryFile), "r").read() else: errors = open(joinPaths(rdiffDir, entryFile), "r").read() except IOError: errors = "[Unable to read errors file.]" try: sessionStatsPath = getSessionStatsFile(rdiffDir, entry) session_stats = open(sessionStatsPath, "r").read() expression = re.compile("SourceFileSize ([0-9]+) ").findall(session_stats)[0] except IOError: expression = 0 newEntry = backupHistoryEntry() newEntry.date = entry.getDate() newEntry.inProgress = backupIsInProgress(repoRoot, entry.getDate()) if newEntry.inProgress: newEntry.errors = "" else: newEntry.errors = errors newEntry.size = int(expression) entries.append(newEntry) if len(entries) > 0 and not includeInProgress and backupIsInProgressForRepo(repoRoot): entries.pop() if numLatestEntries != -1: entries = entries[-numLatestEntries:] return entries
def testGetBackupHistory(self): tests = self.getBackupTests() for testDir in tests: # Get a list of backup entries for the root folder origBackupDir = joinPaths(self.masterDirPath, testDir) backupStates = self.getBackupStates(origBackupDir) backupStates.sort(lambda x, y: cmp(x, y)) rdiffDestDir = joinPaths(self.destRoot, testDir) entries = getBackupHistory(rdiffDestDir) assert len(entries) == len(backupStates) backupNum = 0 for backup in backupStates: origBackupStateDir = joinPaths(origBackupDir, backup) totalBackupSize = 0 for file in os.listdir(origBackupStateDir): totalBackupSize = totalBackupSize + os.lstat(joinPaths(origBackupStateDir, file))[6] #TODO: fix this to handle subdirs #assert totalBackupSize == entries[backupNum].size, "Calculated: "+str(totalBackupSize)+" Reported: "+str(entries[backupNum].size)+" State: "+str(backupNum) backupNum = backupNum + 1 # Test that the last backup entry works correctly lastEntry = getLastBackupHistoryEntry(rdiffDestDir) lastBackupTime = rdw_helpers.rdwTime() lastBackupTime.initFromString(backupStates[-1]) assert lastEntry.date == lastBackupTime # Test that timezone differences are ignored historyAsOf = lastEntry.date.getUrlString() # if "+" in historyAsOf: # historyAsOf = historyAsOf.replace("+", "-") # else: # historyAsOf = historyAsOf[:19] + "+" + historyAsOf[20:] lastBackupTime = rdw_helpers.rdwTime() lastBackupTime.initFromString(historyAsOf) entries = getBackupHistorySinceDate(rdiffDestDir, lastBackupTime) assert len(entries) == 1 # Test that no backups are returned one second after the last backup historyAsOf = historyAsOf[:18] + "1" + historyAsOf[19:] postBackupTime = rdw_helpers.rdwTime() postBackupTime.initFromString(historyAsOf) assert lastBackupTime.getLocalSeconds() + 1 == postBackupTime.getLocalSeconds() entries = getBackupHistorySinceDate(rdiffDestDir, postBackupTime) assert len(entries) == 0
def getSessionStatsFile(rdiffDataDir, entry): """Attempts to get the sessions statistics file for a given backup. Tries the following to find a match: 1. The date with no timezone information 2. The date, 1 hour in the past, with no timezone information 3. The date with timezone information""" sessionStatsPath = joinPaths(rdiffDataDir, getSessionStatsFileName(entry.getDateStringNoTZ())) if os.access(sessionStatsPath, os.F_OK): return sessionStatsPath sessionStatsPath = joinPaths(rdiffDataDir, getSessionStatsFileName(entry.getDateStringNoTZ(-60*60))) if os.access(sessionStatsPath, os.F_OK): return sessionStatsPath sessionStatsPath = joinPaths(rdiffDataDir, getSessionStatsFileName(entry.getDateString())) if os.access(sessionStatsPath, os.F_OK): return sessionStatsPath return ""
def getParmsForPage(self, root, repos): repoList = [] repoErrors = [] for userRepo in repos: try: repoHistory = librdiff.getLastBackupHistoryEntry(rdw_helpers.joinPaths(root, userRepo)) except librdiff.FileError: repoSize = "0" repoDate = "Error" repoErrors.append( { "repoName": userRepo, "repoSize": repoSize, "repoDate": repoDate, "repoBrowseUrl": self.buildBrowseUrl(userRepo, "/", False), "repoHistoryUrl": self.buildHistoryUrl(userRepo), } ) else: repoSize = rdw_helpers.formatFileSizeStr(repoHistory.size) if repoHistory.inProgress: repoSize = "In Progress" repoDate = repoHistory.date.getDisplayString() repoList.append( { "repoName": userRepo, "repoSize": repoSize, "repoDate": repoDate, "repoBrowseUrl": self.buildBrowseUrl(userRepo, "/", False), "repoHistoryUrl": self.buildHistoryUrl(userRepo), } ) return {"title": "browse", "repos": repoList, "badrepos": repoErrors}
def getBackupHistory(repoRoot, numLatestEntries=-1): """Returns a list of backupHistoryEntry's""" checkRepoPath(repoRoot, "") # Get a listing of error log files, and use that to build backup history rdiffDir = joinPaths(repoRoot, rdiffDataDirName) curEntries = os.listdir(rdiffDir) curEntries = filter(lambda x: x.startswith("error_log."), curEntries) curEntries.sort() if numLatestEntries != -1: assert numLatestEntries > 0 curEntries = curEntries[-numLatestEntries:] curEntries.reverse() entries = [] for entryFile in curEntries: entry = incrementEntry(entryFile) try: errors = gzip.open(os.path.join(rdiffDir, entryFile), "r").read() except IOError: errors = "[Unable to read errors file.]" try: sessionStatsPath = getSessionStatsFile(rdiffDir, entry) session_stats = open(sessionStatsPath, "r").read() expression = re.compile("SourceFileSize ([0-9]+) ").findall(session_stats)[0] except IOError: expression = 0 newEntry = backupHistoryEntry() newEntry.date = entry.getDate() newEntry.errors = errors newEntry.size = int(expression) entries.append(newEntry) return entries
def getParmsForPage(self, root, repos): repoList = [] for reponame in repos: try: repoHistory = librdiff.getLastBackupHistoryEntry(rdw_helpers.joinPaths(root, reponame)) reposize = rdw_helpers.formatFileSizeStr(repoHistory.size) reposizeinbytes = repoHistory.size if repoHistory.inProgress: reposize = "En Progreso" repoDate = repoHistory.date.getDisplayString() repodateinseconds = repoHistory.date.getLocalSeconds() failed = False except librdiff.FileError: logging.exception("No se puede obtener informacion previa de %s" % reponame) reposize = "0" reposizeinbytes = 0 repoDate = "Error" repodateinseconds = 0 failed = True repoList.append({ "reponame" : reponame, "reposize" : reposize, "reposizeinbytes" : reposizeinbytes, "repodate" : repoDate, "repodateinseconds" : repodateinseconds, "repoBrowseUrl" : self.buildBrowseUrl(reponame, "/", False), "repoHistoryUrl" : self.buildHistoryUrl(reponame), 'failed': failed}) self._sortLocations(repoList) return { "title" : "browse", "repos" : repoList }
def getParmsForPage(self, root, repos): repoList = [] for userRepo in repos: try: repoHistory = librdiff.getLastBackupHistoryEntry(rdw_helpers.joinPaths(root, userRepo)) except librdiff.FileError: repoSize = "0" repoDate = "Error" repoList.append({ "repoName" : userRepo, "repoSize" : repoSize, "repoDate" : repoDate, "repoBrowseUrl" : self.buildBrowseUrl(userRepo, "/", False), "repoHistoryUrl" : self.buildHistoryUrl(userRepo), 'failed': True}) else: repoSize = rdw_helpers.formatFileSizeStr(repoHistory.size) if repoHistory.inProgress: repoSize = "In Progress" repoDate = repoHistory.date.getDisplayString() repoList.append({ "repoName" : userRepo, "repoSize" : repoSize, "repoDate" : repoDate, "repoBrowseUrl" : self.buildBrowseUrl(userRepo, "/", False), "repoHistoryUrl" : self.buildHistoryUrl(userRepo), 'failed': False}) self._sortLocations(repoList) # Make second pass through list, setting the 'altRow' attribute for i in range(0, len(repoList)): repoList[i]['altRow'] = (i % 2 == 0) return { "title" : "browse", "repos" : repoList }
def init(self, repo, dirPath): # Var assignment and validation self.repo = repo self.dirPath = dirPath completePath = joinPaths(repo, dirPath) dataPath = joinPaths(repo, rdiffDataDirName) # cache dir listings self.entries = [] if os.access(completePath, os.F_OK): self.entries = os.listdir(completePath) # the directory may not exist if it has been deleted self.dataDirEntries = os.listdir(dataPath) incrementsDir = joinPaths(repo, rdiffIncrementsDirName, dirPath) self.incrementEntries = [] if os.access(incrementsDir, os.F_OK): # the increments may not exist if the folder has existed forever and never been changed self.incrementEntries = os.listdir(incrementsDir)
def sendEmails(self): for user in self.userDB.getUserList(): userRepos = self.userDB.getUserRepoPaths(user) oldRepos = [] for repo in userRepos: maxDaysOld = self.userDB.getRepoMaxAge(user, repo) if maxDaysOld != 0: # get the last backup date try: lastBackup = librdiff.getLastBackupHistoryEntry(rdw_helpers.joinPaths(self.userDB.getUserRoot(user), repo), False) except librdiff.FileError: pass # Skip repos that have never been successfully backed up else: if lastBackup: oldestGoodBackupTime = rdw_helpers.rdwTime() oldestGoodBackupTime.initFromMidnightUTC(-maxDaysOld) if lastBackup.date < oldestGoodBackupTime: oldRepos.append({"repo" : repo, "lastBackupDate" : lastBackup.date.getDisplayString(), "maxAge" : maxDaysOld }) if oldRepos: userEmailAddress = self.userDB.getUserEmail(user) emailText = rdw_helpers.compileTemplate("email_notification.txt", repos=oldRepos, sender=self._getEmailSender(), user=user) session = smtplib.SMTP(self._getEmailHost()) session.login(self._getEmailUsername(), self._getEmailPassword()) smtpresult = session.sendmail(self._getEmailSender(), userEmailAddress.split(";"), emailText) session.quit()
def setUp(self): self.destRoot = rdw_helpers.joinPaths(os.path.realpath(tempfile.gettempdir()), "rdiffWeb") self.masterDirPath = os.path.realpath("tests") self.tearDown() # Copy and set up each test self._copyDirWithoutSvn(self.masterDirPath, self.destRoot)
def setUp(self): self.destRoot = rdw_helpers.joinPaths(os.path.realpath(tempfile.gettempdir()), "rdiffWeb") self.masterDirPath = os.path.realpath("tests") # TODO: do this right, including tying tests into "python setup.py test" self.tearDown() # Copy and set up each test self._copyDirWithoutSvn(self.masterDirPath, self.destRoot)
def _getLastChangedBackupTime(self, filename): files = self.groupedIncrementEntries.get(filename, []) if os.path.isdir(joinPaths(self.completePath, filename)): files = filter(lambda x: x.endswith(".dir") or x.endswith(".missing"), files) files.sort() if not files: return self._getFirstBackupAfterDate(None) return self._getFirstBackupAfterDate(incrementEntry(files[-1]).getDate())
def __init__(self, repoRoot): checkRepoPath(repoRoot, "") self.repoRoot = repoRoot self.pathQuoter = rdiffQuotedPath(repoRoot) self.rdiffDir = joinPaths(repoRoot, rdiffDataDirName) self.dirEntries = os.listdir(self.rdiffDir) self.dirEntries.sort() self.mirrorMarkers = filter(lambda x: x.startswith("current_mirror."), self.dirEntries)
def __init__(self, repoRoot): self.unquoteRegex = re.compile(";[0-9]{3}", re.S) charsToQuotePath = joinPaths(repoRoot, RDIFF_BACKUP_DATA, "chars_to_quote") if os.path.exists(charsToQuotePath): charsToQuoteStr = open(charsToQuotePath).read() if charsToQuoteStr: self.quoteRegex = re.compile("[^/%s]|;" % charsToQuoteStr, re.S)
def restoreFileOrDir(repoRoot, dirPath, filename, restoreDate, useZip): """This function is used to restore a directory tree or a file from the given respository. Users may specified the restore date and the archive format.""" # Format the specified file name / repository path for validation dirPath = dirPath.encode('utf-8') filename = filename.encode('utf-8') filePath = joinPaths(dirPath, filename) filePath = RdiffQuotedPath(repoRoot).getQuotedPath(filePath) checkRepoPath(repoRoot, filePath) restoredFilename = filename if restoredFilename == "/": restoredFilename = "(root)" fileToRestore = joinPaths(repoRoot, dirPath, filename) dateString = str(restoreDate.getSeconds()) rdiffOutputFile = joinPaths(tempfile.mkdtemp(), restoredFilename) # TODO: make so this includes the username # Use rdiff-backup executable to restore the data into a specified location results = rdw_helpers.execute("rdiff-backup", "--restore-as-of=" + dateString, fileToRestore, rdiffOutputFile) # Check the result if results['exitCode'] != 0 or not os.access(rdiffOutputFile, os.F_OK): error = results['stderr'] if not error: error = 'rdiff-backup claimed success, but did not restore anything. This indicates a bug in rdiffWeb. Please report this to a developer.' raise UnknownError('Unable to restore! rdiff-backup output:\n' + error) # The path restored is a directory and need to be archived using zip or tar if os.path.isdir(rdiffOutputFile): rdiffOutputDirectory = rdiffOutputFile try: if useZip: rdiffOutputFile = rdiffOutputFile + ZIP_SUFFIX _recursiveZipDir(rdiffOutputDirectory, rdiffOutputFile) else: rdiffOutputFile = rdiffOutputFile + TARGZ_SUFFIX _recursiveTarDir(rdiffOutputDirectory, rdiffOutputFile) finally: rdw_helpers.removeDir(rdiffOutputDirectory) return rdiffOutputFile
def backupIsInProgress(repo, date): rdiffDir = joinPaths(repo, rdiffDataDirName) mirrorMarkers = os.listdir(rdiffDir) mirrorMarkers.sort() mirrorMarkers = filter(lambda x: x.startswith("current_mirror."), mirrorMarkers) if not mirrorMarkers: return True mirrorMarkers = mirrorMarkers[1:] return len(filter(lambda x: x.startswith("current_mirror."+date.getUrlString()), mirrorMarkers)) > 0
def testCompileTemplate(self): for test in self._getBackupTests(): parms = self.getParmsForTemplate(rdw_helpers.joinPaths(self.destRoot, test), "repo") #print parms encounteredText = rdw_templating.templateParser().parseTemplate(self._getFileText("", self.getTemplateName()), **parms) expectedText = self._getFileText(test, self.getExpectedResultsName()) self.assertEquals(encounteredText, expectedText) assert encounteredText == expectedText, "Got:\n" + encounteredText + "\nExpected:\n" + expectedText + "\nEnd"
def _findRdiffRepos(dirToSearch, outRepoPaths): dirEntries = os.listdir(dirToSearch) if librdiff.rdiffDataDirName in dirEntries: outRepoPaths.append(dirToSearch) return for entry in dirEntries: entryPath = rdw_helpers.joinPaths(dirToSearch, entry) if os.path.isdir(entryPath) and not os.path.islink(entryPath): _findRdiffRepos(entryPath, outRepoPaths)
def findRdiffRepos(dirToSearch, outRepoPaths): dirEntries = os.listdir(dirToSearch) if librdiff.rdiffDataDirName in dirEntries: print " Found repo at " + dirToSearch outRepoPaths.append(dirToSearch) return for entry in dirEntries: entryPath = rdw_helpers.joinPaths(dirToSearch, entry) if os.path.isdir(entryPath) and not os.path.islink(entryPath): findRdiffRepos(entryPath, outRepoPaths)
def init(self, repo, dirPath): # Var assignment and validation self.repo = repo self.dirPath = dirPath completePath = joinPaths(repo, dirPath) dataPath = joinPaths(repo, rdiffDataDirName) # cache dir listings self.entries = [] if os.access(completePath, os.F_OK): self.entries = os.listdir( completePath ) # the directory may not exist if it has been deleted self.dataDirEntries = os.listdir(dataPath) incrementsDir = joinPaths(repo, rdiffIncrementsDirName, dirPath) self.incrementEntries = [] if os.access( incrementsDir, os.F_OK ): # the increments may not exist if the folder has existed forever and never been changed self.incrementEntries = os.listdir(incrementsDir)
def restoreFileOrDir(repoRoot, dirPath, filename, restoreDate): """ returns a file path to the file. User is responsible for deleting file, as well as containing dir, after use. """ checkRepoPath(repoRoot, joinPaths(dirPath, filename)) restoredFilename = filename if restoredFilename == "/": restoredFilename = "(root)" fileToRestore = joinPaths(repoRoot, dirPath, filename) dateString = str(restoreDate.getSeconds()) rdiffOutputFile = joinPaths(tempfile.mkdtemp(), restoredFilename) # TODO: make so this includes the username args = [ "rdiff-backup", "--restore-as-of="+dateString, fileToRestore, rdiffOutputFile ] os.spawnvp(os.P_WAIT, args[0], args) if not os.access(rdiffOutputFile, os.F_OK): raise UnknownError() if os.path.isdir(rdiffOutputFile): rdw_helpers.recursiveZipDir(rdiffOutputFile, rdiffOutputFile+".zip") rdw_helpers.removeDir(rdiffOutputFile) rdiffOutputFile = rdiffOutputFile+".zip" return rdiffOutputFile
def init(self, repo, dirPath): # Var assignment and validation self.repo = repo self.dirPath = dirPath self.completePath = joinPaths(repo, dirPath) dataPath = joinPaths(repo, rdiffDataDirName) # cache dir listings self.entries = [] if os.access(self.completePath, os.F_OK): self.entries = os.listdir(self.completePath) # the directory may not exist if it has been deleted self.dataDirEntries = os.listdir(dataPath) incrementsDir = joinPaths(repo, rdiffIncrementsDirName, dirPath) self.incrementEntries = [] if os.access(incrementsDir, os.F_OK): # the increments may not exist if the folder has existed forever and never been changed self.incrementEntries = os.listdir(incrementsDir) self.groupedIncrementEntries = rdw_helpers.groupby(self.incrementEntries, lambda x: incrementEntry(x).getFilename()) self.backupTimes = [ incrementEntry(x).getDate() for x in filter(lambda x: x.startswith("mirror_metadata"), self.dataDirEntries) ] self.backupTimes.sort()
def validateUserPath(self, path): '''Takes a path relative to the user's root dir and validates that it is valid and within the user's root''' path = rdw_helpers.joinPaths(self.getUserDB().getUserRoot(self.getUsername()), rdw_helpers.encodePath(path)) path = path.rstrip("/") realPath = os.path.realpath(path) if realPath != path: raise rdw_helpers.accessDeniedError # Make sure that the path starts with the user root # This check should be accomplished by ensurePathValid, but adding for a sanity check if realPath.find(rdw_helpers.encodePath(self.getUserDB().getUserRoot(self.getUsername()))) != 0: raise rdw_helpers.accessDeniedError
def backupIsInProgress(repo, date): rdiffDir = joinPaths(repo, rdiffDataDirName) mirrorMarkers = os.listdir(rdiffDir) mirrorMarkers.sort() mirrorMarkers = filter(lambda x: x.startswith("current_mirror."), mirrorMarkers) if not mirrorMarkers: return True mirrorMarkers = mirrorMarkers[1:] return len( filter(lambda x: x.startswith("current_mirror." + date.getUrlString()), mirrorMarkers)) > 0
def _getUserMessages(self, repos, includeSuccess, includeFailure, earliestDate, latestDate): userRoot = self.userDB.getUserRoot(self.getUsername()) repoErrors = [] allBackups = [] for repo in repos: try: backups = librdiff.getBackupHistoryForDateRange(rdw_helpers.joinPaths(userRoot, repo), earliestDate, latestDate); allBackups += [{"repo": repo, "date": backup.date, "displayDate": backup.date.getDisplayString(), "size": rdw_helpers.formatFileSizeStr(backup.size), "errors": backup.errors} for backup in backups] except librdiff.FileError, error: repoErrors.append({"repo": repo, "error": error.getErrorString()})
def testCompileTemplate(self): for test in self._getBackupTests(): parms = self.getParmsForTemplate( rdw_helpers.joinPaths(self.destRoot, test), "repo") #print parms encounteredText = rdw_templating.templateParser().parseTemplate( self._getFileText("", self.getTemplateName()), **parms) expectedText = self._getFileText(test, self.getExpectedResultsName()) self.assertEquals(encounteredText, expectedText) assert encounteredText == expectedText, "Got:\n" + encounteredText + "\nExpected:\n" + expectedText + "\nEnd"
def getDirEntries(self): """ returns dictionary of dir entries, keyed by dir name """ entriesDict = {} # First, we grab a dir listing of the target, setting entry attributes for entryName in self.entries: if entryName == RDIFF_BACKUP_DATA: continue entryPath = joinPaths(self.repo, self.dirPath, entryName) newEntry = DirEntry(entryName, self.pathQuoter, os.path.isdir(entryPath), os.lstat(entryPath)[6], True, [self._getLastChangedBackupTime(entryName)]) entriesDict[newEntry.name] = newEntry # Go through the increments dir. If we find any files that didn't exist in dirPath (i.e. have been deleted), add them for entryFile in self.incrementEntries: entry = IncrementEntry(self.repo, entryFile) entryName = entry.getFilename() if entry.shouldShowIncrement() or entry.isMissingIncrement(): entryDate = entry.getDate() if not entry.isSnapshotIncrement(): if entry.isMissingIncrement(): entryDate = self._getFirstBackupAfterDate( entry.getDate()) else: entryDate = entry.getDate() if not entryName in entriesDict.keys(): entryPath = joinPaths(self.repo, INCREMENTS, self.dirPath, entryName) newEntry = DirEntry(entryName, self.pathQuoter, os.path.isdir(entryPath), 0, False, [entryDate]) entriesDict[entryName] = newEntry else: if not entryDate in entriesDict[entryName].changeDates: bisect.insort_left(entriesDict[entryName].changeDates, entryDate) return entriesDict
def validateUserPath(self, path): '''Takes a path relative to the user's root dir and validates that it is valid and within the user's root''' path = rdw_helpers.joinPaths(self.getUserDB().getUserRoot(self.getUsername()), path) path = path.rstrip("/") realPath = os.path.realpath(path) if realPath != path: raise rdw_helpers.accessDeniedError # Make sure that the path starts with the user root # This check should be accomplished by ensurePathValid, but adding for a sanity check realDestPath = os.path.realpath(path) if realDestPath.find(self.getUserDB().getUserRoot(self.getUsername())) != 0: raise rdw_helpers.accessDeniedError
def testGetDirRestoreDates(self): tests = self.getBackupTests() for testDir in tests: if self.hasDirRestoreDates(testDir): rdiffDestDir = joinPaths(self.destRoot, testDir) #print rdiffDestDir dates = getDirRestoreDates(rdiffDestDir, "/testdir2") statusText = "" for date in dates: statusText = statusText + date.getUrlString() + "\n" assert statusText.replace("\n", "") == self.getExpectedDirRestoreDates(testDir).replace("\n", ""), "Got: " + statusText + "\nExpected:" + self.getExpectedDirRestoreDates(testDir)
def _getUserMessages(self, repos, includeSuccess, includeFailure, earliestDate, latestDate): userRoot = self.getUserDB().getUserRoot(self.getUsername()) repoErrors = [] allBackups = [] for repo in repos: try: backups = librdiff.getBackupHistoryForDateRange(rdw_helpers.joinPaths(userRoot, repo), earliestDate, latestDate); allBackups += [{"repo": repo, "date": backup.date, "displayDate": backup.date.getDisplayString(), "size": rdw_helpers.formatFileSizeStr(backup.size), "errors": backup.errors, "repoLink" : self.buildBrowseUrl(repo, "/", False)} for backup in backups] except librdiff.FileError, error: repoErrors.append({"repo": repo, "error": error.getErrorString(), "repoLink" : self.buildBrowseUrl(repo, "/", False)})
def setUp(self): # The temp dir on Mac OS X is a symlink; expand it because of validation against symlinks in paths self.destRoot = joinPaths(os.path.realpath(tempfile.gettempdir()), "rdiffWeb") self.masterDirPath = joinPaths("..", "tests") # TODO: do this right, including tying tests into "python setup.py test" self.tearDown() os.makedirs(self.destRoot) # Set up each scenario tests = self.getBackupTests() for testDir in tests: # Iterate through the backup states origStateDir = joinPaths(self.masterDirPath, testDir) backupStates = self.getBackupStates(origStateDir) backupStates.sort(lambda x, y: cmp(x, y)) for backupState in backupStates: # Try to parse the folder name as a date. If we can't, raise backupTime = rdw_helpers.rdwTime() backupTime.initFromString(backupState) # Backup the data as it should be at that state #print " State", backupState runRdiff(joinPaths(origStateDir, backupState), joinPaths(self.destRoot, testDir), backupTime)
class rdiffRestorePage(page_main.rdiffPage): def index(self, repo, path, date): try: rdw_helpers.ensurePathValid(repo) rdw_helpers.ensurePathValid(path) except rdw_helpers.accessDeniedError, error: return self.writeErrorPage(str(error)) if not repo: return self.writeErrorPage("Backup location not specified.") if not repo in self.userDB.getUserRepoPaths(self.getUsername()): return self.writeErrorPage("Access is denied.") if librdiff.backupIsInProgress(rdw_helpers.joinPaths(self.userDB.getUserRoot(self.getUsername()), repo)): return self.writeErrorPage("A backup is currently in progress to this location. Restores are disabled until this backup is complete.") try: restoreTime = rdw_helpers.rdwTime() restoreTime.initFromString(date) (path, file) = os.path.split(path) if not file: file = path path = "/" filePath = librdiff.restoreFileOrDir(rdw_helpers.joinPaths(self.userDB.getUserRoot(self.getUsername()), repo), path, file, restoreTime) except librdiff.FileError, error: return self.writeErrorPage(error.getErrorString())
def testRestoreFile(self): tests = self.getBackupTests() for testDir in tests: # Get a list of backup entries for the root folder rdiffDestDir = joinPaths(self.destRoot, testDir) entries = getDirEntries(rdiffDestDir, "/") # Go back through all backup states and make sure that the backup entries match the files that exist origStateDir = joinPaths(self.masterDirPath, testDir) backupStates = self.getBackupStates(origStateDir) backupStates.sort(lambda x, y: cmp(x, y)) for backupState in backupStates: backupTime = rdw_helpers.rdwTime() backupTime.initFromString(backupState) # Go through each file, and make sure that the restored file looks the same as the orig file origStateDir = joinPaths(self.masterDirPath, testDir, backupState) files = self.getBackupStates(origStateDir) for file in files: origFilePath = joinPaths(origStateDir, file) if not os.path.isdir(origFilePath): restoredFilePath = restoreFileOrDir(rdiffDestDir, "/", file, backupTime) assert open(restoredFilePath, "r").read() == open(origFilePath, "r").read() os.remove(restoredFilePath)
def getParmsForPage(self, root, repos): repoList = [] for userRepo in repos: try: repoHistory = librdiff.getLastBackupHistoryEntry( rdw_helpers.joinPaths(root, userRepo)) except librdiff.FileError: repoSize = "0" repoDate = "Error" repoList.append({ "repoName": userRepo, "repoSize": repoSize, "repoDate": repoDate, "repoBrowseUrl": self.buildBrowseUrl(userRepo, "/", False), "repoHistoryUrl": self.buildHistoryUrl(userRepo), 'failed': True }) else: repoSize = rdw_helpers.formatFileSizeStr(repoHistory.size) if repoHistory.inProgress: repoSize = "In Progress" repoDate = repoHistory.date.getDisplayString() repoList.append({ "repoName": userRepo, "repoSize": repoSize, "repoDate": repoDate, "repoBrowseUrl": self.buildBrowseUrl(userRepo, "/", False), "repoHistoryUrl": self.buildHistoryUrl(userRepo), 'failed': False }) self._sortLocations(repoList) # Make second pass through list, setting the 'altRow' attribute for i in range(0, len(repoList)): repoList[i]['altRow'] = (i % 2 == 0) return {"title": "browse", "repos": repoList}
class rdiffHistoryPage(page_main.rdiffPage): def index(self, repo): try: self.validateUserPath(repo) except rdw_helpers.accessDeniedError, error: return self.writeErrorPage(str(error)) if not repo: return self.writeErrorPage("Backup location not specified.") if not repo in self.userDB.getUserRepoPaths(self.getUsername()): return self.writeErrorPage("Access is denied.") parms = {} try: parms = self.getParmsForPage(joinPaths(self.userDB.getUserRoot(self.getUsername()), repo), repo) except librdiff.FileError, error: return self.writeErrorPage(error.getErrorString())
def _getUserMessages(self): userRoot = self.userDB.getUserRoot(self.getUsername()) userRepos = self.userDB.getUserRepoPaths(self.getUsername()) asOfDate = rdw_helpers.rdwTime() asOfDate.initFromMidnightUTC(-5) # build list of all backups allBackups = [] repoErrors = [] for repo in userRepos: try: backups = librdiff.getBackupHistorySinceDate(rdw_helpers.joinPaths(userRoot, repo), asOfDate) allBackups += [{"repo": repo, "date": backup.date, "displayDate": backup.date.getDisplayString(), "size": rdw_helpers.formatFileSizeStr(backup.size), "errors": backup.errors} for backup in backups] except librdiff.FileError, error: repoErrors.append({"repo": repo, "error": error.getErrorString()})
def testGetDirRestoreDates(self): tests = self.getBackupTests() for testDir in tests: if self.hasDirRestoreDates(testDir): rdiffDestDir = joinPaths(self.destRoot, testDir) #print rdiffDestDir dates = getDirRestoreDates(rdiffDestDir, "/testdir2") statusText = "" for date in dates: statusText = statusText + date.getUrlString() + "\n" assert statusText.replace( "\n", "" ) == self.getExpectedDirRestoreDates(testDir).replace( "\n", "" ), "Got: " + statusText + "\nExpected:" + self.getExpectedDirRestoreDates( testDir)
def sendEmails(self): for user in self.userDB.getUserList(): userRepos = self.userDB.getUserRepoPaths(user) oldRepos = [] for repo in userRepos: maxDaysOld = self.userDB.getRepoMaxAge(user, repo) if maxDaysOld != 0: # get the last backup date try: lastBackup = librdiff.getLastBackupHistoryEntry( rdw_helpers.joinPaths( self.userDB.getUserRoot(user), repo), False) except librdiff.FileError: pass # Skip repos that have never been successfully backed up else: if lastBackup: oldestGoodBackupTime = rdw_helpers.rdwTime() oldestGoodBackupTime.initFromMidnightUTC( -maxDaysOld) if lastBackup.date < oldestGoodBackupTime: oldRepos.append({ "repo": repo, "lastBackupDate": lastBackup.date.getDisplayString(), "maxAge": maxDaysOld }) if oldRepos: userEmailAddress = self.userDB.getUserEmail(user) emailText = rdw_helpers.compileTemplate( "email_notification.txt", repos=oldRepos, sender=self._getEmailSender(), user=user) session = smtplib.SMTP(self._getEmailHost()) session.login(self._getEmailUsername(), self._getEmailPassword()) smtpresult = session.sendmail(self._getEmailSender(), userEmailAddress.split(";"), emailText) session.quit()
class rdiffHistoryPage(page_main.rdiffPage): def index(self, repo): try: rdw_helpers.ensurePathValid(repo) except rdw_helpers.accessDeniedError, error: return self.writeErrorPage(str(error)) if not repo: return self.writeErrorPage("Backup location not specified.") if not repo in self.userDB.getUserRepoPaths(self.getUsername()): return self.writeErrorPage("Access is denied.") page = self.startPage("Backup History") page = page + self.writeTopLinks() try: rdiffHistory = librdiff.getBackupHistory( joinPaths(self.userDB.getUserRoot(self.getUsername()), repo)) except librdiff.FileError, error: return self.writeErrorPage(error.getErrorString())
def emailNotifications(): emailHost = getEmailHost() emailSender = getEmailSender() emailUsername = getEmailUsername() emailPassword = getEmailPassword() dbBackend = db.userDB().getUserDBModule() for user in dbBackend.getUserList(): userRepos = dbBackend.getUserRepoPaths(user) oldRepos = [] for repo in userRepos: maxDaysOld = dbBackend.getRepoMaxAge(user, repo) if maxDaysOld != 0: # get the last backup date try: lastBackup = librdiff.getLastBackupHistoryEntry(rdw_helpers.joinPaths(dbBackend.getUserRoot(user), repo), False) except librdiff.FileError: pass # Skip repos that have never been successfully backed up else: if lastBackup: oldestGoodBackupTime = rdw_helpers.rdwTime() oldestGoodBackupTime.initFromMidnightUTC(-maxDaysOld) if lastBackup.date < oldestGoodBackupTime: oldRepos.append({"repo" : repo, "lastBackupDate" : lastBackup.date.getDisplayString(), "maxAge" : maxDaysOld }) if oldRepos: userEmailAddress = dbBackend.getUserEmail(user) emailText = rdw_helpers.compileTemplate("email_notification.txt", repos=oldRepos, sender=emailSender, user=user) session = smtplib.SMTP(emailHost) session.login(emailUsername, emailPassword) smtpresult = session.sendmail(emailSender, userEmailAddress.split(";"), emailText) if smtpresult: error = "" for recipient in smtpresult.keys(): error = """Could not delivery mail to: %s Server said: %s %s %s""" % (recipient, smtpresult[recipient][0], smtpresult[recipient][1], error) raise smtplib.SMTPException, error session.quit()
def _recursiveZipDir(dirPath, zipFilename): """This function is used during to archive a restored directory. It will create a zip archive with the specified directory.""" dirPath = dirPath.encode('utf-8') assert os.path.isdir(dirPath) import zipfile dirPath = os.path.normpath(dirPath) # Create the archive zipObj = zipfile.ZipFile(zipFilename, "w", zipfile.ZIP_DEFLATED) # Add files to archive for root, dirs, files in os.walk(dirPath, topdown=True): for name in files: fullPath = joinPaths(root, name) assert fullPath.startswith(dirPath) relPath = fullPath[len(dirPath) + 1:] zipObj.write(fullPath, relPath) zipObj.close()
def getParmsForPage(self, root, repos): repoList = [] repoErrors = [] for userRepo in repos: try: repoHistory = librdiff.getLastBackupHistoryEntry( rdw_helpers.joinPaths(root, userRepo)) except librdiff.FileError: repoSize = "0" repoDate = "Error" repoErrors.append({ "repoName": userRepo, "repoSize": repoSize, "repoDate": repoDate, "repoBrowseUrl": self.buildBrowseUrl(userRepo, "/", False), "repoHistoryUrl": self.buildHistoryUrl(userRepo) }) else: repoSize = rdw_helpers.formatFileSizeStr(repoHistory.size) if repoHistory.inProgress: repoSize = "In Progress" repoDate = repoHistory.date.getDisplayString() repoList.append({ "repoName": userRepo, "repoSize": repoSize, "repoDate": repoDate, "repoBrowseUrl": self.buildBrowseUrl(userRepo, "/", False), "repoHistoryUrl": self.buildHistoryUrl(userRepo) }) return {"title": "browse", "repos": repoList, "badrepos": repoErrors}
def index(self): page = self.startPage("Backup Locations") page = page + self.writeTopLinks() repoList = [] for userRepo in self.userDB.getUserRepoPaths(self.getUsername()): try: repoHistory = librdiff.getBackupHistory(rdw_helpers.joinPaths(self.userDB.getUserRoot(self.getUsername()), userRepo), 1) except librdiff.FileError: repoSize = "0" repoDate = "Error" else: repoSize = rdw_helpers.formatFileSizeStr(repoHistory[0].size) repoDate = repoHistory[0].date.getDisplayString() repoList.append({ "repoName" : userRepo, "repoSize" : repoSize, "repoDate" : repoDate, "repoBrowseUrl" : self.buildBrowseUrl(userRepo, "/"), "repoHistoryUrl" : self.buildHistoryUrl(userRepo) }) page = page + self.compileTemplate("repo_listing.html", title="browse", repos=repoList) page = page + self.endPage() return page
def _recursiveTarDir(dirPath, tarFilename): """This function is used during to archive a restored directory. It will create a tar gz archive with the specified directory.""" dirPath = dirPath.encode('utf-8') tarFilename = tarFilename.encode('utf-8') assert os.path.isdir(dirPath) import tarfile dirPath = os.path.normpath(dirPath) # Create a tar.gz archive tar = tarfile.open(tarFilename, "w:gz") files = os.listdir(dirPath) # Add files to the archive for file in files: tar.add( joinPaths(dirPath, file), file) # Pass in file as name explicitly so we get relative paths # Close the archive tar.close()
def _getBackupHistory(repoRoot, numLatestEntries=-1, cutoffDate=None): """Returns a list of backupHistoryEntry's""" checkRepoPath(repoRoot, "") # Get a listing of error log files, and use that to build backup history rdiffDir = joinPaths(repoRoot, rdiffDataDirName) curEntries = os.listdir(rdiffDir) curEntries = filter(lambda x: x.startswith("error_log."), curEntries) curEntries.sort() if numLatestEntries != -1: assert numLatestEntries > 0 curEntries = curEntries[-numLatestEntries:] curEntries.reverse() entries = [] for entryFile in curEntries: entry = incrementEntry(entryFile) # compare local times because of discrepency between client/server time zones if cutoffDate and entry.getDate().getLocalSeconds() < cutoffDate.getLocalSeconds(): continue try: errors = gzip.open(os.path.join(rdiffDir, entryFile), "r").read() except IOError: errors = "[Unable to read errors file.]" try: sessionStatsPath = getSessionStatsFile(rdiffDir, entry) session_stats = open(sessionStatsPath, "r").read() expression = re.compile("SourceFileSize ([0-9]+) ").findall(session_stats)[0] except IOError: expression = 0 newEntry = backupHistoryEntry() newEntry.date = entry.getDate() newEntry.errors = errors newEntry.size = int(expression) entries.append(newEntry) return entries
class rdiffHistoryPage(page_main.rdiffPage): def index(self, repo, date=''): try: self.validateUserPath(repo) except rdw_helpers.accessDeniedError, error: return self.writeErrorPage(str(error)) if not repo: return self.writeErrorPage("Backup location not specified.") if not repo in self.getUserDB().getUserRepoPaths(self.getUsername()): return self.writeErrorPage("Access is denied.") if cherrypy.request.method == 'POST': if not date: return self.writeErrorPage("No deletion date was specified.") deleteTime = rdw_helpers.rdwTime() deleteTime.initFromString(date) repoPath = joinPaths( self.getUserDB().getUserRoot(self.getUsername()), repo) try: librdiff.removeRepoHistory(repoPath, deleteTime) except librdiff.FileError, error: return self.writeErrorPage(error.getErrorString())
class rdiffRestorePage(page_main.rdiffPage): def index(self, repo, path, date): repo = rdw_helpers.decodeUrl(repo) path = rdw_helpers.decodeUrl(path) date = rdw_helpers.decodeUrl(date) try: rdw_helpers.ensurePathValid(repo) rdw_helpers.ensurePathValid(path) except rdw_helpers.accessDeniedError, error: return self.writeErrorPage(str(error)) if not repo: return self.writeErrorPage("Backup location not specified.") if not repo in self.userDB.getUserRepoPaths(self.getUsername()): return self.writeErrorPage("Access is denied.") try: restoreTime = rdw_helpers.rdwTime() restoreTime.loadFromString(date) (path, file) = os.path.split(path) if not file: file = path path = "/" filePath = librdiff.restoreFile(rdw_helpers.joinPaths(self.userDB.getUserRoot(self.getUsername()), repo), path, file, restoreTime) except librdiff.FileError, error: return self.writeErrorPage(error.getErrorString())
def hasDirRestoreDates(self, testname): return os.access( joinPaths(self.masterDirPath, testname, "results", "dir_restore_dates.txt"), os.F_OK)
def getExpectedDirRestoreDates(self, testName): return open( joinPaths(self.masterDirPath, testName, "results", "dir_restore_dates.txt")).read()
def getExpectedDirEntriesResults(self, testName): return open( joinPaths(self.masterDirPath, testName, "results", "dir_entries.txt")).read()
def _getFileText(self, testName, templateName): return open( rdw_helpers.joinPaths(self.destRoot, testName, templateName)).read()
def hasIncrementSuffix(self, filename): for suffix in self.suffixes: if filename.endswith(suffix): return True return False def _removeSuffix(self, filename): """ returns None if there was no suffix to remove. """ for suffix in self.suffixes: if filename.endswith(suffix): return filename[0:-len(suffix)] return filename rdiffDataDirName = "rdiff-backup-data" rdiffIncrementsDirName = joinPaths(rdiffDataDirName, "increments") class rdiffDirEntries: """This class is responsible for building a listing of directory entries. All knowledge of how increments work is contained in this class.""" def init(self, repo, dirPath): # Var assignment and validation self.repo = repo self.dirPath = dirPath completePath = joinPaths(repo, dirPath) dataPath = joinPaths(repo, rdiffDataDirName) # cache dir listings self.entries = [] if os.access(completePath, os.F_OK):
def index(self, repo, path, date): try: self.validateUserPath(rdw_helpers.joinPaths(repo, path)) except rdw_helpers.accessDeniedError, error: return self.writeErrorPage(str(error))