示例#1
0
    def calculateSpaceRequired(self, ctx: ThaniyaBackupContext) -> int:
        # process root directory

        nErrorsWikiRoot, nSizeTypo3Root = ThaniyaTar.tarCalculateSize(
            ctx,
            jk_pathpatternmatcher2.walk(
                self.__typo3Helper.typo3BaseDirPath,
                ignoreDirPathPatterns=self.__ignoreDirPathPatterns))
        ctx.log.info("I/O expected for the local installation: " +
                     jk_utils.formatBytes(nSizeTypo3Root))

        # process database directory

        dbHost, dbPort, dbName, dbUser, dbPwd = self.__getMySQLDBParameters()
        assert dbHost in ["127.0.0.1", "localhost"]
        assert dbPort == 3306

        nSizeDB = ThaniyaMySQL.mySQLDumpCalculateSize(
            ctx=ctx,
            dbName=dbName,
            dbUserName=dbUser,
            dbPassword=dbPwd,
        )

        ctx.log.info("I/O expected for the database: " +
                     jk_utils.formatBytes(nSizeDB))

        return nSizeTypo3Root + nSizeDB
示例#2
0
    def calculateSpaceRequired(self, ctx: ThaniyaBackupContext) -> int:
        # process root directory

        nErrorsWikiRoot, nSizeWikiRoot = ThaniyaTar.tarCalculateSize(
            ctx,
            jk_pathpatternmatcher2.walk(
                self.__mwHelper.wikiDirPath,
                ignoreDirPathPatterns=self.__ignoreDirPathPatterns))
        ctx.log.info("I/O expected: " + jk_utils.formatBytes(nSizeWikiRoot))

        # process database directory

        nErrorsDBRoot, nSizeDBRoot = ThaniyaTar.tarCalculateSize(
            ctx, jk_pathpatternmatcher2.walk(self.__mwHelper.wikiDBDirPath))
        ctx.log.info("I/O expected: " + jk_utils.formatBytes(nSizeDBRoot))

        # process cron script

        nErrorsCronScript, nSizeCronScript = ThaniyaTar.tarCalculateSize(
            ctx,
            jk_pathpatternmatcher2.walk(self.__mwHelper.cronScriptFilePath))
        ctx.log.info("I/O expected: " + jk_utils.formatBytes(nSizeCronScript))

        # process start cron script

        nErrorsStartCronScript, nSizeStartCronScript = ThaniyaTar.tarCalculateSize(
            ctx,
            jk_pathpatternmatcher2.walk(
                self.__mwHelper.startCronScriptFilePath))
        ctx.log.info("I/O expected: " +
                     jk_utils.formatBytes(nSizeStartCronScript))

        return nSizeWikiRoot + nSizeDBRoot + nSizeCronScript + nSizeStartCronScript
示例#3
0
def compress(compression: str):
    outFilePath = OUTPUT_BASE_FILE_NAME + (
        ("." + compression) if compression else "")

    t0 = time.time()

    with jk_uploadpack.Packer(outFilePath, compression) as up:
        up.fileGroup("default").bCleanDir = True

        allFiles = []
        for e in jk_pathpatternmatcher2.walk(
                SRC_DIR_PATH,
                acceptDirPathPatterns=None,
                acceptFilePathPatterns="**/*",
                acceptLinkPathPatterns=None,
                ignorePathPatterns="**/__*",
                ignoreDirPathPatterns=None,
                ignoreFilePathPatterns=None,
                ignoreLinkPathPatterns=None,
                emitDirs=False,
                emitFiles=True,
                emitLinks=False,
                emitBaseDirs=False,
                recursive=True,
                sort=True,
                emitErrorEntries=True,
                clazz=None,
                ioAdapter=None,
        ):

            allFiles.append((e.fullPath, e.relFilePath))

        sp = jk_terminal_essentials.Spinner(len(allFiles))
        for fullPath, relFilePath in allFiles:
            sp.spin("packing", relFilePath)
            up.fileGroup("default").addFile(fullPath, relFilePath)

    sp.hide()

    print()
    print("upload pack", compression if compression else "uncompressed")
    print()
    print("\ttotalSizeLogical =", jk_utils.formatBytes(up.totalSizeLogical))
    print("\ttotalSizeUncompressed =",
          jk_utils.formatBytes(up.totalSizeUncompressed))
    print("\ttotalSizeCompressed =",
          jk_utils.formatBytes(up.totalSizeCompressed))
    print("\tduration:",
          jk_utils.formatTime(time.time() - t0, withMilliseconds=True))
    print()
示例#4
0
    def __perform_calcDiskSpaceRequired(
            self, bd2: BD2,
            backupTasks: typing.List[AbstractThaniyaTask]) -> int:
        with ProcessingContext(text="Calculating disk space required",
                               bd2=bd2,
                               bMeasureDuration=True,
                               statsDurationKey="d0_calcDiskSpace") as ctx:
            nExpectedBytesToWrite = 0
            for job in backupTasks:
                #assert isinstance(job, AbstractThaniyaTask)
                Assert.isInstance(job, AbstractThaniyaTask)

                nestedCtx = ctx.descend(job.logMessageCalculateSpaceRequired)
                with nestedCtx.log as nestedLog:
                    nExpectedBytesToWrite += job.calculateSpaceRequired(
                        nestedCtx)

            ctx.log.info("Estimated total size of backup: " +
                         jk_utils.formatBytes(nExpectedBytesToWrite))

            bd2.statsContainer.setValue("expectedBytesToWrite",
                                        nExpectedBytesToWrite)

            # ----

            ctx.log.notice("Done.")

        return nExpectedBytesToWrite
    def calculateSpaceRequired(self, ctx: ThaniyaBackupContext) -> int:
        nErrors, nSize = ThaniyaTar.tarCalculateSize(
            ctx=ctx, walker=jk_pathpatternmatcher2.walk(self.__sourceDirPath))

        ctx.log.info("I/O expected: " + jk_utils.formatBytes(nSize))

        return nSize
示例#6
0
文件: Cat.py 项目: jkpubsrc/PyPine
    def processElement(self, ctx: Context, f):
        ctx.printDetail(self, "=" * 120)

        if f is None:
            ctx.printDetail(self, "(none)")

        else:
            ctx.printDetail(self,
                            ctx.__class__.__name__ + ": " + f.relFilePath)
            ctx.printDetail(self,
                            "Size: " + jk_utils.formatBytes(f.getFileSize()))
            ctx.printDetail(self, "isBinary: " + str(f.isBinary))
            ctx.printDetail(self, "isText: " + str(f.isText))
            ctx.printDetail(self, "-" * 120)

            if getattr(f, "isText", False):
                text = f.readText()
                for s in text.split("\n"):
                    ctx.printDetail(self, s)

            elif getattr(f, "isBinary", False):
                hexRowSize = 16

                raw = f.readBinary()
                sRawHex = raw.hex()
                n = len(raw)
                i = 0
                bufHex = []
                bufASCII = []
                while i < n:
                    addr = i.to_bytes(4, byteorder="big").hex()
                    for j in range(0, hexRowSize):
                        iPos1 = i + j
                        iPos2 = iPos1 * 2

                        if iPos1 < n:
                            bufHex.append(sRawHex[iPos2:iPos2 + 2])
                            c = raw[iPos1]
                            if 32 <= c <= 126:
                                bufASCII.append(chr(c))
                            else:
                                bufASCII.append(".")
                        else:
                            bufHex.append("  ")
                            bufASCII.append(" ")

                    s = addr + "    " + " ".join(bufHex) + "    |" + "".join(
                        bufASCII) + "|"
                    ctx.printDetail(self, s)

                    i += hexRowSize * 2
                    bufHex.clear()
                    bufASCII.clear()

            else:
                ctx.printDetail(self, "(unknown content)")

        ctx.printDetail(self, "=" * 120)
        return f
示例#7
0
    def getFileSize(ctx: ThaniyaBackupContext, filePath: str) -> int:
        assert isinstance(ctx, ThaniyaBackupContext)
        assert isinstance(filePath, str)
        assert filePath
        assert os.path.isabs(filePath)
        assert os.path.isfile(filePath)

        with ctx.descend("Getting size of file: " + repr(filePath)) as ctx:
            n = os.path.getsize(filePath)
            ctx.log.notice("Size of " + repr(filePath) + ": " +
                           jk_utils.formatBytes(n))
            return n
    def getDirTreeSize(ctx: ThaniyaBackupContext, dirPath: str) -> int:
        assert isinstance(ctx, ThaniyaBackupContext)
        assert isinstance(dirPath, str)
        assert dirPath
        assert os.path.isabs(dirPath)
        assert os.path.isdir(dirPath)

        ctx = ctx.descend("Calculating size of directory: " + repr(dirPath))
        with ctx.log as nestedLog:
            n = jk_utils.fsutils.getFolderSize(dirPath)
            nestedLog.notice("Size of " + repr(dirPath) + ": " +
                             jk_utils.formatBytes(n))
            return n
    def getSizeOfDevice(ctx: ThaniyaBackupContext,
                        devicePath: str) -> typing.Union[int, None]:
        assert devicePath.startswith("/dev/")

        with ctx.log as nestedLog:
            try:
                fd = os.open(devicePath, os.O_RDONLY)
                try:
                    n = os.lseek(fd, 0, os.SEEK_END)
                    nestedLog.notice("Size: " + jk_utils.formatBytes(n))
                    return n
                finally:
                    os.close(fd)
            except:
                raise Exception("Failed to access device: " + devicePath)
示例#10
0
    def __perform_backup(self, bd2: BD2,
                         backupTasks: typing.List[AbstractThaniyaTask]):

        # NOTE: we need to access this context later as it calculates the duration and we need this information separately to log it.
        processingContext = ProcessingContext(text="Writing the backup data",
                                              bd2=bd2,
                                              bMeasureDuration=True,
                                              statsDurationKey="d2_backup")

        with processingContext as ctx:

            for job in backupTasks:
                Assert.isInstance(job, AbstractThaniyaTask)

                with ctx.descend(job.logMessagePerformBackup) as nestedCtx:
                    job.performBackup(nestedCtx)

            ctx.log.notice("All backup tasks completed.")

            # calculate statistics

            with ctx.log.descend(
                    "Calculating size of backup performed ...") as nestedLog:
                nTotalBytesWritten = jk_utils.fsutils.getFolderSize(
                    bd2.effectiveTargetDirPath)

            fDuration = processingContext.duration
            if (nTotalBytesWritten > 0) and (fDuration > 0):
                fAvgWritingSpeed = nTotalBytesWritten / fDuration
                sAvgWritingSpeed = jk_utils.formatBytesPerSecond(
                    fAvgWritingSpeed)
            else:
                fAvgWritingSpeed = None
                sAvgWritingSpeed = "n/a"

            ctx.log.info("Total bytes written: " +
                         jk_utils.formatBytes(nTotalBytesWritten))
            ctx.log.info("Average writing speed: " + sAvgWritingSpeed)

            bd2.statsContainer.setValue("totalBytesWritten",
                                        nTotalBytesWritten)
            bd2.statsContainer.setValue("avgWritingSpeed", fAvgWritingSpeed)

            # ----

            ctx.log.notice("Done.")
    def __getDirTreeSize(self, dirPath: str,
                         log: jk_logging.AbstractLogger) -> int:
        assert dirPath
        assert os.path.isabs(dirPath)
        assert os.path.isdir(dirPath)

        nestedLog = log.descend("Calculating size of directory: " +
                                repr(dirPath))

        try:
            n = jk_utils.fsutils.getFolderSize(dirPath)

        except Exception as ee:
            nestedLog.exception(ee)
            raise

        nestedLog.notice("Size of " + repr(dirPath) + ": " +
                         jk_utils.formatBytes(n))

        return n
def _formatBytes(v: float) -> str:
    if v < 0:
        return "---"
    s = jk_utils.formatBytes(v)
    return s[:-1] + " " + s[-1] + "B"
#!/usr/bin/python3

import time
import os
import typing

import jk_utils
import jk_terminal_essentials
import jk_uploadpack

INPUT_FILE_NAME = "pack.up"

with jk_uploadpack.Unpacker(INPUT_FILE_NAME) as up:
    sp = jk_terminal_essentials.Spinner(len(up.fileGroup("default").files))
    up.fileGroup("default").unpackToDir("out", sp)

sp.hide()

print()
print("totalSizePacked =", jk_utils.formatBytes(up.totalSizePacked))
print("totalSizeUnpacked =", jk_utils.formatBytes(up.totalSizeUnpacked))
print()
示例#14
0
 def calculateSpaceRequired(self, ctx: ThaniyaBackupContext) -> int:
     nSize = ThaniyaIO.getSizeOfDevice(ctx, self.__devicePath)
     ctx.log.info("I/O expected: " + jk_utils.formatBytes(nSize))
     return nSize
    def performBackup(self, backupTasks: list, bSimulate: bool):

        for x in backupTasks:
            assert isinstance(x, AbstractThaniyaTask)
            #Assert.isInstance(x, AbstractThaniyaTask)

        mainLog = jk_logging.MulticastLogger.create(
            jk_logging.ConsoleLogger.create(
                logMsgFormatter=jk_logging.COLOR_LOG_MESSAGE_FORMATTER),
            jk_logging.BufferLogger.create())

        bError = False
        try:

            statsContainer = {
                "tStart": time.time(),
                "tEnd": None,
                "success": None,
                "expectedBytesToWrite": None,
                "totalBytesWritten": None,
                "avgWritingSpeed": None,
                "simulate": bSimulate,
            }

            effectiveTargetDirPath = None

            with ProcessingContext(
                    "Performing backup simulation" if bSimulate else
                    "Performing backup", None, mainLog) as ctxMain:

                # --------------------------------------------------------------------------------------------------------------------------------
                # >>>> estimate the number of bytes we will likely have to write for this backup

                with ProcessingContext(
                        text="Calculating disk space required",
                        targetDirPath=None,
                        log=ctxMain.log,
                        bMeasureDuration=True,
                        statsContainer=statsContainer,
                        statsDurationKey="d0_calcDiskSpace") as ctx:

                    nExpectedBytesToWrite = 0
                    for job in backupTasks:
                        assert isinstance(job, AbstractThaniyaTask)
                        #Assert.isInstance(job, AbstractThaniyaTask)

                        nestedCtx = ctx.descend(
                            job.logMessageCalculateSpaceRequired)
                        with nestedCtx.log as nestedLog:
                            nExpectedBytesToWrite += job.calculateSpaceRequired(
                                nestedCtx)

                    ctx.log.info("Estimated total size of backup: " +
                                 jk_utils.formatBytes(nExpectedBytesToWrite))

                    statsContainer[
                        "expectedBytesToWrite"] = nExpectedBytesToWrite

                # --------------------------------------------------------------------------------------------------------------------------------
                # >>>> now connect to the backup repository

                with ProcessingContext(
                        text=
                        "Connecting to backup repository and preparing backup",
                        targetDirPath=None,
                        log=ctxMain.log,
                        bMeasureDuration=True,
                        statsContainer=statsContainer,
                        statsDurationKey="d1_connectAndPrepare") as ctx:
                    # check if there is a suitable directory where we can mount the remote file system

                    ThaniyaIO.checkThatDirExists(ctx, self.__mountDirPath)
                    ThaniyaIO.ensureDirMode(ctx, self.__mountDirPath,
                                            jk_utils.ChModValue("rwx------"))

                    # mount the remote file system

                    self.__backupConnector.initialize(
                        ctx, self.__mountDirPath, nExpectedBytesToWrite,
                        self.__backupConnectorParameters)

                    if not self.__backupConnector.isReady:
                        raise Exception(
                            "Backup client unexpectedly not ready for writing!"
                        )

                    # select the target directory where we will store the data. the variable "effectiveTargetDirPath"
                    # will receive the directory selected by the target directory strategy. we will write data there.

                    effectiveTargetDirPath = self.__targetDirStrategy.selectEffectiveTargetDirectory(
                        self.__mountDirPath)
                    ctx.log.info("Selected target directory: " +
                                 repr(effectiveTargetDirPath))

                    # verify that we have the correct directory: the "effectiveTargetDirPath" must be lokated somewhere within
                    # the mounted directory tree.

                    if effectiveTargetDirPath.endswith("/"):
                        effectiveTargetDirPath2 = effectiveTargetDirPath
                    else:
                        effectiveTargetDirPath2 = effectiveTargetDirPath + "/"
                    assert effectiveTargetDirPath2[:len(
                        self.__mountDirPath2)] == self.__mountDirPath2

                    ctx.log.notice("Creating subdirectories if necessary ...")
                    ThaniyaIO.ensureDirExists(ctx, effectiveTargetDirPath,
                                              jk_utils.ChModValue("rwx------"))

                    # check that the target directory fits our requirements: it must be empty.

                    bIsEmpty, contentEntries = ThaniyaIO.checkIfDirIsEmpty(
                        ctx, effectiveTargetDirPath)
                    if not bIsEmpty:
                        print(contentEntries)
                        if STATS_JSON_FILE_NAME in contentEntries:
                            # target directory already seems to contain a backup
                            ctx.log.warn(
                                "Target directory already seems to contain a backup: "
                                + effectiveTargetDirPath2)
                            ctx.log.warn("Overwriting this backup.")
                        else:
                            raise Exception(
                                "Backup directory contains various non-backup files or directories!"
                            )

                    # now we are ready. but before we begin doing something let's write the backup stats first.

                    jk_json.saveToFilePretty(
                        statsContainer,
                        os.path.join(effectiveTargetDirPath,
                                     STATS_JSON_FILE_NAME))

                    # ----

                    ctx.log.notice("Done.")

                # --------------------------------------------------------------------------------------------------------------------------------
                # >>>> Writing the backup data

                if not bSimulate:
                    with ProcessingContext(
                            text="Writing the backup data",
                            targetDirPath=effectiveTargetDirPath,
                            log=ctxMain.log,
                            bMeasureDuration=True,
                            statsContainer=statsContainer,
                            statsDurationKey="d2_backup") as ctx:

                        for job in backupTasks:
                            assert isinstance(job, AbstractThaniyaTask)
                            #Assert.isInstance(job, AbstractThaniyaTask)

                            nestedCtx = ctx.descend(
                                job.logMessagePerformBackup)
                            with nestedCtx.log as nestedLog:
                                job.performBackup(nestedCtx)

                        nTotalBytesWritten = self.__getDirTreeSize(
                            effectiveTargetDirPath, ctx.log)
                        fDuration = ctx.duration
                        if (nTotalBytesWritten > 0) and (fDuration > 0):
                            fAvgWritingSpeed = nTotalBytesWritten / fDuration
                            sAvgWritingSpeed = jk_utils.formatBytesPerSecond(
                                fAvgWritingSpeed)
                        else:
                            fAvgWritingSpeed = None
                            sAvgWritingSpeed = "n/a"

                        ctx.log.info("Total bytes written: " +
                                     jk_utils.formatBytes(nTotalBytesWritten))
                        ctx.log.info("Average writing speed: " +
                                     sAvgWritingSpeed)

                        statsContainer[
                            "totalBytesWritten"] = nTotalBytesWritten
                        statsContainer["avgWritingSpeed"] = fAvgWritingSpeed

        except ProcessingFallThroughError as ee:
            bError = True
        except Exception as ee:
            bError = True
            mainLog.error(ee)

        # --------------------------------------------------------------------------------------------------------------------------------
        # >>>> Finish

        try:
            # detecting errors

            detectionLogger = self.__analyseLogMessages(mainLog)
            if detectionLogger.hasError() or detectionLogger.hasStdErr(
            ) or detectionLogger.hasException():
                bError = True

            # writing final status log message

            if bError:
                mainLog.error("Backup terminated erroneously.")
            else:
                mainLog.success("Backup successfully completed.")

            if effectiveTargetDirPath is not None:
                # let's try to write the backup stats before termination.

                statsContainer["tEnd"] = time.time()
                statsContainer["success"] = not bError

                jk_json.saveToFilePretty(
                    statsContainer,
                    os.path.join(effectiveTargetDirPath, STATS_JSON_FILE_NAME))

                # let's try to write the backup log before termination.

                bufferLogger = self.__getBufferLogger(mainLog)
                self.__writeLogToFiles(
                    bufferLogger,
                    os.path.join(effectiveTargetDirPath,
                                 PLAINTEXT_LOG_FILE_NAME),
                    os.path.join(effectiveTargetDirPath, JSON_LOG_FILE_NAME))

        except ProcessingFallThroughError as ee:
            bError = True
        except Exception as ee:
            bError = True
            mainLog.error(ee)

        # terminate connection

        try:
            with ProcessingContext("Terminating connection", None,
                                   mainLog) as ctxMain:
                self.__backupConnector.deinitialize(ctx, bError,
                                                    statsContainer)

        except ProcessingFallThroughError as ee:
            bError = True
        except Exception as ee:
            bError = True
            mainLog.error(ee)
#!/usr/bin/python3

import os
import time
import timeit

import jk_utils

DIRECTORY_TO_SCAN = os.path.abspath("..")

print("Scanning:", DIRECTORY_TO_SCAN)
n = jk_utils.fsutils.getFolderSize(DIRECTORY_TO_SCAN)
print(n)
print(jk_utils.formatBytes(n))

n = jk_utils.fsutils.getFolderSize(DIRECTORY_TO_SCAN, mode="exact")
print(n)
print(jk_utils.formatBytes(n))