def calculateSpaceRequired(self, ctx: ThaniyaBackupContext) -> int: # process root directory nErrorsWikiRoot, nSizeWikiRoot = ThaniyaTar.tarCalculateSize( ctx, jk_pathpatternmatcher2.walk( self.__mwHelper.wikiDirPath, ignoreDirPathPatterns=self.__ignoreDirPathPatterns)) ctx.log.info("I/O expected: " + jk_utils.formatBytes(nSizeWikiRoot)) # process database directory nErrorsDBRoot, nSizeDBRoot = ThaniyaTar.tarCalculateSize( ctx, jk_pathpatternmatcher2.walk(self.__mwHelper.wikiDBDirPath)) ctx.log.info("I/O expected: " + jk_utils.formatBytes(nSizeDBRoot)) # process cron script nErrorsCronScript, nSizeCronScript = ThaniyaTar.tarCalculateSize( ctx, jk_pathpatternmatcher2.walk(self.__mwHelper.cronScriptFilePath)) ctx.log.info("I/O expected: " + jk_utils.formatBytes(nSizeCronScript)) # process start cron script nErrorsStartCronScript, nSizeStartCronScript = ThaniyaTar.tarCalculateSize( ctx, jk_pathpatternmatcher2.walk( self.__mwHelper.startCronScriptFilePath)) ctx.log.info("I/O expected: " + jk_utils.formatBytes(nSizeStartCronScript)) return nSizeWikiRoot + nSizeDBRoot + nSizeCronScript + nSizeStartCronScript
def performBackup(self, ctx: ThaniyaBackupContext): def errorCallback(entry: jk_pathpatternmatcher2.Entry, exception): ctx.log.warn(str(exception)) #ctx.log.error(str(exception)) # # shut down various processes bIsRunning = self.__mwHelper.isCronScriptRunning() if bIsRunning: self.__mwHelper.stopCronScript( ctx.log.descend("Stopping cron process(es) ...")) else: ctx.log.notice( "No cron process(es) need to be stopped and later restarted as they are not running." ) # process root directory ThaniyaTar.tar( ctx=ctx, outputTarFilePath=ctx.absPath(self.__mwHelper.wikiDirName + "-wiki.tar"), walker=jk_pathpatternmatcher2.walk( self.__mwHelper.wikiDirPath, ignoreDirPathPatterns=self.__ignoreDirPathPatterns), pathMode=EnumTarPathMode.RELATIVE_PATH_WITH_BASE_DIR, onErrorCallback=errorCallback, ) # process database directory ThaniyaTar.tar( ctx=ctx, outputTarFilePath=ctx.absPath(self.__mwHelper.wikiDirName + "-sqlite.tar"), walker=jk_pathpatternmatcher2.walk( self.__mwHelper.wikiDBDirPath, ignoreDirPathPatterns=self.__ignoreDirPathPatterns), pathMode=EnumTarPathMode.RELATIVE_PATH_WITH_BASE_DIR, onErrorCallback=errorCallback, ) # restart processes if bIsRunning: self.__mwHelper.startCronScript( ctx.log.descend("Restarting cron process(es) ..."))
def calculateSpaceRequired(self, ctx: ThaniyaBackupContext) -> int: # process root directory nErrorsWikiRoot, nSizeTypo3Root = ThaniyaTar.tarCalculateSize( ctx, jk_pathpatternmatcher2.walk( self.__typo3Helper.typo3BaseDirPath, ignoreDirPathPatterns=self.__ignoreDirPathPatterns)) ctx.log.info("I/O expected for the local installation: " + jk_utils.formatBytes(nSizeTypo3Root)) # process database directory dbHost, dbPort, dbName, dbUser, dbPwd = self.__getMySQLDBParameters() assert dbHost in ["127.0.0.1", "localhost"] assert dbPort == 3306 nSizeDB = ThaniyaMySQL.mySQLDumpCalculateSize( ctx=ctx, dbName=dbName, dbUserName=dbUser, dbPassword=dbPwd, ) ctx.log.info("I/O expected for the database: " + jk_utils.formatBytes(nSizeDB)) return nSizeTypo3Root + nSizeDB
def performBackup(self, ctx: ThaniyaBackupContext): def errorCallback(entry: jk_pathpatternmatcher2.Entry, exception): ctx.log.warn(str(exception)) #ctx.log.error(str(exception)) # siteName = self.__typo3Helper.getSiteName() # process root directory ThaniyaTar.tar( ctx=ctx, outputTarFilePath=ctx.absPath(siteName + "-typo3.tar"), walker=jk_pathpatternmatcher2.walk( self.__typo3Helper.typo3BaseDirPath, ignoreDirPathPatterns=self.__ignoreDirPathPatterns), pathMode=EnumTarPathMode.RELATIVE_PATH_WITH_BASE_DIR, onErrorCallback=errorCallback, ) # process database directory dbHost, dbPort, dbName, dbUser, dbPwd = self.__getMySQLDBParameters() assert dbHost in ["127.0.0.1", "localhost"] assert dbPort == 3306 ThaniyaMySQL.mySQLDump( ctx=ctx, dbName=dbName, dbUserName=dbUser, dbPassword=dbPwd, outputDumpFilePath=ctx.absPath(siteName + "-typo3.sql"), onErrorCallback=errorCallback, )
def calculateSpaceRequired(self, ctx: ThaniyaBackupContext) -> int: nErrors, nSize = ThaniyaTar.tarCalculateSize( ctx=ctx, walker=jk_pathpatternmatcher2.walk(self.__sourceDirPath)) ctx.log.info("I/O expected: " + jk_utils.formatBytes(nSize)) return nSize
def performBackup(self, ctx: ThaniyaBackupContext): ThaniyaTar.tar(ctx=ctx, outputTarFilePath=ctx.absPath(self.__targetFileName), walker=jk_pathpatternmatcher2.walk( self.__sourceDirPath), pathMode=EnumTarPathMode.RELATIVE_PATH_WITH_BASE_DIR) ctx.log.info("Backup performed.")
def initializeProcessing(self, ctx: Context): self.__dirWalker = jk_pathpatternmatcher2.walk( self.__baseDirPath, emitBaseDirs=False, emitDirs=False, emitErrorEntries=False, emitLinks=False, clazz=DiskFile, acceptFilePathPatterns=self.__filePatterns, sort=True, )
def performBackup(self, ctx:ThaniyaBackupContext): def errorCallback(entry:jk_pathpatternmatcher2.Entry, exception): ctx.log.warn(str(exception)) #ctx.log.error(str(exception)) # ThaniyaTar.tar( ctx=ctx, outputTarFilePath=ctx.absPath(self.__targetFileName), walker=jk_pathpatternmatcher2.walk(self.__sourceDirPath), pathMode = EnumTarPathMode.RELATIVE_PATH_WITH_BASE_DIR, onErrorCallback=errorCallback, )
def unpackToDir(self, outBaseDirPath: str, sp: Spinner = None): outBaseDirPath = os.path.abspath(outBaseDirPath) validDirectories = set() validDirectories.add(outBaseDirPath) validFiles = set() for f in self.__files: absTargetDirPath, fileName = self.__parent._unpackToDir( f, outBaseDirPath, sp) validDirectories.add(absTargetDirPath) for d in self.__allParentDirs(absTargetDirPath, outBaseDirPath): validDirectories.add(d) validFiles.add(os.path.join(absTargetDirPath, fileName)) dirsToDelete = [] filesToDelete = [] for e in jk_pathpatternmatcher2.walk( outBaseDirPath, acceptDirPathPatterns="**/*", acceptFilePathPatterns="**/*", acceptLinkPathPatterns="**/*", ): if e.typeID == "d": # dir if e.fullPath not in validDirectories: dirsToDelete.append(e) elif e.typeID == "e": # error raise Exception("ERROR: " + repr(e.exception)) else: # file or link if e.fullPath not in validFiles: filesToDelete.append(e) for e in filesToDelete: updir = self.__directories.get(e.relDirPath) if updir and updir.getEffectiveCleanDir(): if sp: sp.spin("delete", e.relFilePath, bPrintPercent=False) os.unlink(e.fullPath) for e in dirsToDelete: updir = self.__directories.get(e.relDirPath) if updir and updir.getEffectiveCleanDir(): if sp: sp.spin("delete", e.relFilePath, bPrintPercent=False) shutil.rmtree(e.fullPath)
def compress(compression: str): outFilePath = OUTPUT_BASE_FILE_NAME + ( ("." + compression) if compression else "") t0 = time.time() with jk_uploadpack.Packer(outFilePath, compression) as up: up.fileGroup("default").bCleanDir = True allFiles = [] for e in jk_pathpatternmatcher2.walk( SRC_DIR_PATH, acceptDirPathPatterns=None, acceptFilePathPatterns="**/*", acceptLinkPathPatterns=None, ignorePathPatterns="**/__*", ignoreDirPathPatterns=None, ignoreFilePathPatterns=None, ignoreLinkPathPatterns=None, emitDirs=False, emitFiles=True, emitLinks=False, emitBaseDirs=False, recursive=True, sort=True, emitErrorEntries=True, clazz=None, ioAdapter=None, ): allFiles.append((e.fullPath, e.relFilePath)) sp = jk_terminal_essentials.Spinner(len(allFiles)) for fullPath, relFilePath in allFiles: sp.spin("packing", relFilePath) up.fileGroup("default").addFile(fullPath, relFilePath) sp.hide() print() print("upload pack", compression if compression else "uncompressed") print() print("\ttotalSizeLogical =", jk_utils.formatBytes(up.totalSizeLogical)) print("\ttotalSizeUncompressed =", jk_utils.formatBytes(up.totalSizeUncompressed)) print("\ttotalSizeCompressed =", jk_utils.formatBytes(up.totalSizeCompressed)) print("\tduration:", jk_utils.formatTime(time.time() - t0, withMilliseconds=True)) print()