def ensureDirExists(ctx: ThaniyaBackupContext,
                        dirPath: str,
                        dirMode: typing.Union[int, str,
                                              jk_utils.ChModValue] = None):
        assert isinstance(ctx, ThaniyaBackupContext)
        assert isinstance(dirPath, str)
        assert dirPath
        assert os.path.isabs(dirPath)
        if dirMode is not None:
            if isinstance(dirMode, int):
                dirMode = jk_utils.ChModValue(dirMode)
            elif isinstance(dirMode, str):
                dirMode = jk_utils.ChModValue(dirMode)
            elif isinstance(dirMode, jk_utils.ChModValue):
                pass
            else:
                raise Exception("dirMode is invalid!")

        ctx = ctx.descend("Ensuring directory exists: " + repr(dirPath))
        with ctx.log as nestedLog:
            if not os.path.isdir(dirPath):
                os.makedirs(dirPath)

            if dirMode is not None:
                nestedLog.notice("Setting directory mode: " + dirMode.toStr())
                os.chmod(dirPath, dirMode.toInt())
示例#2
0
    def __init__(self):
        self.__chmodValueDir = jk_utils.ChModValue("rwx------")
        self.__chmodValueDirI = self.__chmodValueDir.toInt()

        self.__chmodValueFile = jk_utils.ChModValue("rw-------")
        self.__chmodValueFileI = self.__chmodValueFile.toInt()

        self.__spoolChunkSize = 262144  # 256K
        self.__bAutoFlush = False
示例#3
0
def writeTempFile(fileMode:typing.Union[jk_utils.ChModValue,int,str], text:str) -> str:
	assert isinstance(text, str)

	if isinstance(fileMode, jk_utils.ChModValue):
		pass
	elif isinstance(fileMode, (int, str)):
		fileMode = jk_utils.ChModValue(fileMode)
	else:
		raise Exception("File mode must be int, str or jk_utils.ChModValue!")

	# ----

	characters = string.ascii_letters + string.digits
	nLength = 32
	baseFilePath = "/tmp/"

	filePath = None
	while True:
		filePath = baseFilePath + "".join(random.choice(characters) for i in range(0, nLength))
		if not os.path.exists(filePath):
			break

	with open(os.open(filePath, os.O_CREAT | os.O_WRONLY, fileMode.toInt()), "w") as f:
		f.write(text)
	
	return filePath
示例#4
0
    def __perform_initialize(self, bd2: BD2,
                             nExpectedBytesToWrite: typing.Union[int, None]):
        with ProcessingContext(
                text="Connecting to backup repository and preparing backup",
                bd2=bd2,
                bMeasureDuration=True,
                statsDurationKey="d1_connectAndPrepare") as ctx:
            # mount the remote file system

            if nExpectedBytesToWrite is None:
                nExpectedBytesToWrite = 1024

            with ctx.descend("Initializinig connection ...") as ctx2:
                self.__backupConnector.initialize(
                    ctx2, nExpectedBytesToWrite,
                    self.__backupConnectorParameters)

            if self.__backupConnector.performsMountUnmount:
                # connector performs mounting and unmounting
                assert self.__backupConnector.mountDirPath is not None
                # remember mount path
                bd2.mountDirPath = self.__backupConnector.mountDirPath
            else:
                # no mounting -> mount directory should be None
                assert self.__backupConnector.mountDirPath is None

            assert self.__backupConnector.baseTargetDirPath is not None
            bd2.baseTargetDirPath = self.__backupConnector.baseTargetDirPath

            if not self.__backupConnector.isReady:
                raise Exception(
                    "Backup connector unexpectedly not ready for writing!")

            # select the target directory where we will store the data. the variable "effectiveTargetDirPath"
            # will receive the directory selected by the target directory strategy. we will write data there.
            # verify that we have the correct directory: the "effectiveTargetDirPath" must be located somewhere within
            # the mounted directory tree.
            # check that the target directory fits our requirements: it must be empty.

            bd2.effectiveTargetDirPath = self.__buildAndCheckEffectiveTargetDirPath(
                ctx, bd2, True)

            # ensure that the directory exists

            ThaniyaIO.ensureDirExists(ctx, bd2.effectiveTargetDirPath,
                                      jk_utils.ChModValue("rwx------"))

            # now we are ready. but before we begin doing something let's write the backup stats first.

            filePath = os.path.join(bd2.effectiveTargetDirPath,
                                    STATS_JSON_FILE_NAME)
            ctx.log.notice("Writing to: " + filePath)
            bd2.statsContainer.writeToFile(filePath)

            # ----

            ctx.log.notice("Done.")
示例#5
0
	def __init__(self, baseDirPath:str):
		assert isinstance(baseDirPath, str)
		assert os.path.isdir(baseDirPath)

		self.__tempDirPath = tempfile.mkdtemp(".tmp", "tmp", baseDirPath)

		tempDirName = "".join([ random.choice(MountPointRandom.__CHARS) for x in range(0, 32) ])
		self.__mountDirPath = os.path.join(self.__tempDirPath, tempDirName)
		os.mkdir(self.__mountDirPath, jk_utils.ChModValue("rwx------").toInt())

		assert os.stat(self.__tempDirPath).st_mode == jk_utils.ChModValue("rwx------").toInt() | 16384
		assert os.stat(self.__mountDirPath).st_mode == jk_utils.ChModValue("rwx------").toInt() | 16384

		self._finalizer = weakref.finalize(
			self,
			self.__cleanup,
			self.__tempDirPath,
			warn_message="Implicitly cleaning up {!r}".format(self))
示例#6
0
    def writeToLocal(self, bForceWrite: bool = False) -> str:
        cfgFilePath = os.path.join(jk_utils.users.getUserHome(),
                                   ".config/thaniya/cfg-client.jsonc")
        if os.path.isfile(cfgFilePath):
            if not bForceWrite:
                raise Exception(
                    "Configuration file already exists and 'bForceWrite' was not specified: "
                    + cfgFilePath)

        iDirMode = jk_utils.ChModValue(userR=True, userW=True,
                                       userX=True).toInt()
        iFileMode = jk_utils.ChModValue(userR=True, userW=True).toInt()

        dirPath = os.path.dirname(cfgFilePath)
        os.makedirs(dirPath, iDirMode, exist_ok=True)
        os.chmod(dirPath, iDirMode)

        jk_json.saveToFilePretty(self.toJSON(), cfgFilePath)
        os.chmod(cfgFilePath, iFileMode)

        return cfgFilePath
示例#7
0
    def ensureDirMode(ctx: ThaniyaBackupContext, dirPath: str,
                      dirMode: typing.Union[int, str, jk_utils.ChModValue]):
        assert isinstance(ctx, ThaniyaBackupContext)
        assert isinstance(dirPath, str)
        assert dirPath
        assert os.path.isabs(dirPath)

        if isinstance(dirMode, int):
            dirMode = jk_utils.ChModValue(dirMode)
        elif isinstance(dirMode, str):
            dirMode = jk_utils.ChModValue(dirMode)
        elif isinstance(dirMode, jk_utils.ChModValue):
            pass
        else:
            raise Exception("dirMode is invalid!")

        with ctx.descend("Ensuring mode for directory: " +
                         repr(dirPath)) as ctx:
            if not os.path.isdir(dirPath):
                raise Exception("No such directory: " + dirPath)

            ctx.log.notice("Setting directory mode: " + dirMode.toStr())
            os.chmod(dirPath, dirMode.toInt())
示例#8
0
    def load():
        cfgFilePathCandidates = [
            os.path.join(jk_utils.users.getUserHome(),
                         ".config/thaniya/cfg-client.jsonc"),
            "/etc/thaniya/cfg-client.jsonc",
        ]
        for cfgFilePath in cfgFilePathCandidates:
            if os.path.isfile(cfgFilePath):
                ret = ThaniyaClientCfg.loadFromFile(cfgFilePath)

                # ensure that this file will always be private
                iFileMode = jk_utils.ChModValue(userR=True, userW=True).toInt()
                os.chmod(cfgFilePath, iFileMode)

                return ret

        raise Exception("No configuration file found!")
    def __init__(
        self,
        fileGroup,
        relDirPath: str,
        user: typing.Union[str, None],
        group: typing.Union[str, None],
        mode: typing.Union[int, str, None],
        bCleanDir: typing.Union[bool, None],
        files: list,
    ):
        assert fileGroup.__class__.__name__ == "UPFileGroup"
        self.__fileGroup = fileGroup

        assert isinstance(relDirPath, str)
        assert ":" not in relDirPath
        assert "|" not in relDirPath
        assert not relDirPath.endswith("/")
        self.relDirPath = relDirPath

        if user is not None:
            assert isinstance(user, str)
            assert user
        self.user = user

        if group is not None:
            assert isinstance(group, str)
            assert group
        self.group = group

        if mode is not None:
            if isinstance(mode, str):
                mode = jk_utils.ChModValue(mode).toInt()
            else:
                assert isinstance(mode, int)
        self.mode = mode

        if bCleanDir is not None:
            assert isinstance(bCleanDir, bool)
        self.bCleanDir = bCleanDir

        assert isinstance(files, list)
        for f in files:
            assert isinstance(f, UPFile)
        self.files = files
	def close(self):
		if self.__t is None:
			return

		rawData = json.dumps(self.__createMetaJSON()).encode("utf-8")

		tarInfo = tarfile.TarInfo("meta.json")
		tarInfo.size = len(rawData)
		tarInfo.mtime = time.time()
		tarInfo.uid = os.getuid()
		tarInfo.gid = os.getgid()
		tarInfo.mode = jk_utils.ChModValue("rwxrwxr-x").toInt()

		self.__totalSizeUncompressed += tarInfo.size

		self.__t.addfile(tarInfo, io.BytesIO(rawData))
		self.__t.close()
		self.__t = None

		self.__totalSizeCompressed = os.lstat(self.__outFilePath).st_size
示例#11
0
    def __init__(self, fileGroup, directory, fileName: str,
                 user: typing.Union[str, None], group: typing.Union[str, None],
                 mode: typing.Union[int, str, None], fileID: int):
        assert fileGroup.__class__.__name__ == "UPFileGroup"
        self.__fileGroup = fileGroup

        assert directory.__class__.__name__ == "UPDir"
        self.__directory = directory

        assert isinstance(fileName, str)
        assert fileName
        assert "/" not in fileName
        assert "\\" not in fileName
        assert ":" not in fileName
        assert "|" not in fileName
        self.fileName = fileName

        if user is not None:
            assert isinstance(user, str)
            assert user
        self.user = user

        if group is not None:
            assert isinstance(group, str)
            assert group
        self.group = group

        if mode is not None:
            if isinstance(mode, str):
                mode = jk_utils.ChModValue(mode).toInt()
            else:
                assert isinstance(mode, int)
        self.mode = mode

        assert isinstance(fileID, int)
        assert fileID >= 0
        self.__fileID = fileID
class SrcFileInfo(object):

    __DEFAULT_MODE = jk_utils.ChModValue("rwxrwxr-x").toInt()

    ################################################################################################################################
    ## Constructor
    ################################################################################################################################

    #
    # Constructor method.
    #
    def __init__(self, size: int, hashID: str, srcFilePath: typing.Union[str,
                                                                         None],
                 mode: int, mtime: float):
        assert isinstance(size, int)
        assert size >= 0
        assert isinstance(hashID, str)
        assert hashID
        if srcFilePath is not None:
            assert isinstance(srcFilePath, str)
            assert srcFilePath
        assert isinstance(mode, int)
        assert isinstance(mtime, (int, float))

        self.mode = mode
        self.size = size
        self.mtime = mtime
        self.hashID = hashID
        self.srcFilePath = srcFilePath

    #

    ################################################################################################################################
    ## Public Properties
    ################################################################################################################################

    ################################################################################################################################
    ## Helper Methods
    ################################################################################################################################

    ################################################################################################################################
    ## Public Methods
    ################################################################################################################################

    @staticmethod
    def fromFile(filePath: str):
        statStruct = os.lstat(filePath)
        mode = statStruct.st_mode
        size = statStruct.st_size
        uid = statStruct.st_uid
        gid = statStruct.st_gid
        mtime = float(statStruct.st_mtime)

        hashAlg = hashlib.sha256()
        with open(filePath, "rb") as fin:
            for chunk in iter(lambda: fin.read(4096), b""):
                hashAlg.update(chunk)
        hashDigest = hashAlg.hexdigest()
        hashID = "sha256:{}:{}".format(hashDigest, size)

        return SrcFileInfo(size, hashID, filePath, mode, mtime)

    #

    @staticmethod
    def fromRaw(raw: typing.Union[bytes, bytearray, io.BytesIO]):
        mode = SrcFileInfo.__DEFAULT_MODE
        size = len(raw)
        uid = 1000
        gid = 1000
        mtime = 0

        hashAlg = hashlib.sha256()
        hashAlg.update(raw)
        hashDigest = hashAlg.hexdigest()
        hashID = "sha256:{}:{}".format(hashDigest, size)

        return SrcFileInfo(size, hashID, None, mode, mtime)
    def testConnector(self):

        mainLog = jk_logging.MulticastLogger.create(
            jk_logging.ConsoleLogger.create(
                logMsgFormatter=jk_logging.COLOR_LOG_MESSAGE_FORMATTER),
            jk_logging.BufferLogger.create())

        N_EXPECTED_BYTES_TO_WRITE = 1000

        bError = False
        try:

            statsContainer = {
                "tStart": time.time(),
                "tEnd": None,
                "success": None,
                "expectedBytesToWrite": None,
                "totalBytesWritten": None,
                "avgWritingSpeed": None,
                "simulate": True,
            }

            effectiveTargetDirPath = None

            with ProcessingContext("Performing connector test", None,
                                   mainLog) as ctxMain:

                # --------------------------------------------------------------------------------------------------------------------------------
                # >>>> connect to the backup repository

                with ProcessingContext(
                        text=
                        "Connecting to backup repository and preparing backup",
                        targetDirPath=None,
                        log=ctxMain.log,
                        bMeasureDuration=True,
                        statsContainer=statsContainer,
                        statsDurationKey="d1_connectAndPrepare") as ctx:
                    # check if there is a suitable directory where we can mount the remote file system

                    ThaniyaIO.checkThatDirExists(ctx, self.__mountDirPath)
                    ThaniyaIO.ensureDirMode(ctx, self.__mountDirPath,
                                            jk_utils.ChModValue("rwx------"))

                    # mount the remote file system

                    self.__backupConnector.initialize(
                        ctx, self.__mountDirPath, N_EXPECTED_BYTES_TO_WRITE,
                        self.__backupConnectorParameters)

                    if not self.__backupConnector.isReady:
                        raise Exception(
                            "Backup client unexpectedly not ready for writing!"
                        )

                    # select the target directory where we will store the data. the variable "effectiveTargetDirPath"
                    # will receive the directory selected by the target directory strategy. we will write data there.

                    effectiveTargetDirPath = self.__targetDirStrategy.selectEffectiveTargetDirectory(
                        self.__mountDirPath)
                    ctx.log.info("Selected target directory: " +
                                 repr(effectiveTargetDirPath))

                    # verify that we have the correct directory: the "effectiveTargetDirPath" must be lokated somewhere within
                    # the mounted directory tree.

                    if effectiveTargetDirPath.endswith("/"):
                        effectiveTargetDirPath2 = effectiveTargetDirPath
                    else:
                        effectiveTargetDirPath2 = effectiveTargetDirPath + "/"
                    assert effectiveTargetDirPath2[:len(
                        self.__mountDirPath2)] == self.__mountDirPath2

                    ctx.log.notice("Creating subdirectories if necessary ...")
                    ThaniyaIO.ensureDirExists(ctx, effectiveTargetDirPath,
                                              jk_utils.ChModValue("rwx------"))

                    # check that the target directory fits our requirements: it must be empty.

                    bIsEmpty, contentEntries = ThaniyaIO.checkIfDirIsEmpty(
                        ctx, effectiveTargetDirPath)
                    if not bIsEmpty:
                        if STATS_JSON_FILE_NAME in contentEntries:
                            # target directory already seems to contain a backup
                            ctx.log.info(
                                "Directory already seems to contain a backup: "
                                + effectiveTargetDirPath2)
                        else:
                            raise Exception(
                                "Backup directory contains various non-backup files or directories!"
                            )

                    # now we are ready. but before we begin doing something let's write the backup stats first.

                    jk_json.saveToFilePretty(
                        statsContainer,
                        os.path.join(effectiveTargetDirPath,
                                     STATS_JSON_FILE_NAME))

                    # ----

                    ctx.log.notice("Done.")

                # --------------------------------------------------------------------------------------------------------------------------------

        except ProcessingFallThroughError as ee:
            bError = True
        except Exception as ee:
            bError = True
            if not ee.__class__.__name__.endswith(
                    "_ExceptionInChildContextException"):
                mainLog.error(ee)

        # --------------------------------------------------------------------------------------------------------------------------------
        # >>>> Finish

        try:
            # detecting errors

            detectionLogger = self.__analyseLogMessages(mainLog)
            if detectionLogger.hasError() or detectionLogger.hasStdErr(
            ) or detectionLogger.hasException():
                bError = True

            # writing final status log message

            if bError:
                mainLog.error("Backup terminated erroneously.")
            else:
                mainLog.success("Backup successfully completed.")

            if effectiveTargetDirPath is not None:
                # let's try to write the backup stats before termination.

                statsContainer["tEnd"] = time.time()
                statsContainer["success"] = not bError

                jk_json.saveToFilePretty(
                    statsContainer,
                    os.path.join(effectiveTargetDirPath, STATS_JSON_FILE_NAME))

                # write log

                bufferLogger = self.__getBufferLogger(mainLog)
                self.__writeLogToFiles(
                    bufferLogger,
                    os.path.join(effectiveTargetDirPath,
                                 PLAINTEXT_LOG_FILE_NAME),
                    os.path.join(effectiveTargetDirPath, JSON_LOG_FILE_NAME))

        except ProcessingFallThroughError as ee:
            bError = True
        except Exception as ee:
            bError = True
            mainLog.error(ee)

        # terminate connection

        try:
            with ProcessingContext("Terminating connection", None,
                                   mainLog) as ctxMain:
                self.__backupConnector.deinitialize(ctx, bError,
                                                    statsContainer)

        except ProcessingFallThroughError as ee:
            bError = True
        except Exception as ee:
            bError = True
            mainLog.error(ee)
    def performBackup(self, backupTasks: list, bSimulate: bool):

        for x in backupTasks:
            assert isinstance(x, AbstractThaniyaTask)
            #Assert.isInstance(x, AbstractThaniyaTask)

        mainLog = jk_logging.MulticastLogger.create(
            jk_logging.ConsoleLogger.create(
                logMsgFormatter=jk_logging.COLOR_LOG_MESSAGE_FORMATTER),
            jk_logging.BufferLogger.create())

        bError = False
        try:

            statsContainer = {
                "tStart": time.time(),
                "tEnd": None,
                "success": None,
                "expectedBytesToWrite": None,
                "totalBytesWritten": None,
                "avgWritingSpeed": None,
                "simulate": bSimulate,
            }

            effectiveTargetDirPath = None

            with ProcessingContext(
                    "Performing backup simulation" if bSimulate else
                    "Performing backup", None, mainLog) as ctxMain:

                # --------------------------------------------------------------------------------------------------------------------------------
                # >>>> estimate the number of bytes we will likely have to write for this backup

                with ProcessingContext(
                        text="Calculating disk space required",
                        targetDirPath=None,
                        log=ctxMain.log,
                        bMeasureDuration=True,
                        statsContainer=statsContainer,
                        statsDurationKey="d0_calcDiskSpace") as ctx:

                    nExpectedBytesToWrite = 0
                    for job in backupTasks:
                        assert isinstance(job, AbstractThaniyaTask)
                        #Assert.isInstance(job, AbstractThaniyaTask)

                        nestedCtx = ctx.descend(
                            job.logMessageCalculateSpaceRequired)
                        with nestedCtx.log as nestedLog:
                            nExpectedBytesToWrite += job.calculateSpaceRequired(
                                nestedCtx)

                    ctx.log.info("Estimated total size of backup: " +
                                 jk_utils.formatBytes(nExpectedBytesToWrite))

                    statsContainer[
                        "expectedBytesToWrite"] = nExpectedBytesToWrite

                # --------------------------------------------------------------------------------------------------------------------------------
                # >>>> now connect to the backup repository

                with ProcessingContext(
                        text=
                        "Connecting to backup repository and preparing backup",
                        targetDirPath=None,
                        log=ctxMain.log,
                        bMeasureDuration=True,
                        statsContainer=statsContainer,
                        statsDurationKey="d1_connectAndPrepare") as ctx:
                    # check if there is a suitable directory where we can mount the remote file system

                    ThaniyaIO.checkThatDirExists(ctx, self.__mountDirPath)
                    ThaniyaIO.ensureDirMode(ctx, self.__mountDirPath,
                                            jk_utils.ChModValue("rwx------"))

                    # mount the remote file system

                    self.__backupConnector.initialize(
                        ctx, self.__mountDirPath, nExpectedBytesToWrite,
                        self.__backupConnectorParameters)

                    if not self.__backupConnector.isReady:
                        raise Exception(
                            "Backup client unexpectedly not ready for writing!"
                        )

                    # select the target directory where we will store the data. the variable "effectiveTargetDirPath"
                    # will receive the directory selected by the target directory strategy. we will write data there.

                    effectiveTargetDirPath = self.__targetDirStrategy.selectEffectiveTargetDirectory(
                        self.__mountDirPath)
                    ctx.log.info("Selected target directory: " +
                                 repr(effectiveTargetDirPath))

                    # verify that we have the correct directory: the "effectiveTargetDirPath" must be lokated somewhere within
                    # the mounted directory tree.

                    if effectiveTargetDirPath.endswith("/"):
                        effectiveTargetDirPath2 = effectiveTargetDirPath
                    else:
                        effectiveTargetDirPath2 = effectiveTargetDirPath + "/"
                    assert effectiveTargetDirPath2[:len(
                        self.__mountDirPath2)] == self.__mountDirPath2

                    ctx.log.notice("Creating subdirectories if necessary ...")
                    ThaniyaIO.ensureDirExists(ctx, effectiveTargetDirPath,
                                              jk_utils.ChModValue("rwx------"))

                    # check that the target directory fits our requirements: it must be empty.

                    bIsEmpty, contentEntries = ThaniyaIO.checkIfDirIsEmpty(
                        ctx, effectiveTargetDirPath)
                    if not bIsEmpty:
                        print(contentEntries)
                        if STATS_JSON_FILE_NAME in contentEntries:
                            # target directory already seems to contain a backup
                            ctx.log.warn(
                                "Target directory already seems to contain a backup: "
                                + effectiveTargetDirPath2)
                            ctx.log.warn("Overwriting this backup.")
                        else:
                            raise Exception(
                                "Backup directory contains various non-backup files or directories!"
                            )

                    # now we are ready. but before we begin doing something let's write the backup stats first.

                    jk_json.saveToFilePretty(
                        statsContainer,
                        os.path.join(effectiveTargetDirPath,
                                     STATS_JSON_FILE_NAME))

                    # ----

                    ctx.log.notice("Done.")

                # --------------------------------------------------------------------------------------------------------------------------------
                # >>>> Writing the backup data

                if not bSimulate:
                    with ProcessingContext(
                            text="Writing the backup data",
                            targetDirPath=effectiveTargetDirPath,
                            log=ctxMain.log,
                            bMeasureDuration=True,
                            statsContainer=statsContainer,
                            statsDurationKey="d2_backup") as ctx:

                        for job in backupTasks:
                            assert isinstance(job, AbstractThaniyaTask)
                            #Assert.isInstance(job, AbstractThaniyaTask)

                            nestedCtx = ctx.descend(
                                job.logMessagePerformBackup)
                            with nestedCtx.log as nestedLog:
                                job.performBackup(nestedCtx)

                        nTotalBytesWritten = self.__getDirTreeSize(
                            effectiveTargetDirPath, ctx.log)
                        fDuration = ctx.duration
                        if (nTotalBytesWritten > 0) and (fDuration > 0):
                            fAvgWritingSpeed = nTotalBytesWritten / fDuration
                            sAvgWritingSpeed = jk_utils.formatBytesPerSecond(
                                fAvgWritingSpeed)
                        else:
                            fAvgWritingSpeed = None
                            sAvgWritingSpeed = "n/a"

                        ctx.log.info("Total bytes written: " +
                                     jk_utils.formatBytes(nTotalBytesWritten))
                        ctx.log.info("Average writing speed: " +
                                     sAvgWritingSpeed)

                        statsContainer[
                            "totalBytesWritten"] = nTotalBytesWritten
                        statsContainer["avgWritingSpeed"] = fAvgWritingSpeed

        except ProcessingFallThroughError as ee:
            bError = True
        except Exception as ee:
            bError = True
            mainLog.error(ee)

        # --------------------------------------------------------------------------------------------------------------------------------
        # >>>> Finish

        try:
            # detecting errors

            detectionLogger = self.__analyseLogMessages(mainLog)
            if detectionLogger.hasError() or detectionLogger.hasStdErr(
            ) or detectionLogger.hasException():
                bError = True

            # writing final status log message

            if bError:
                mainLog.error("Backup terminated erroneously.")
            else:
                mainLog.success("Backup successfully completed.")

            if effectiveTargetDirPath is not None:
                # let's try to write the backup stats before termination.

                statsContainer["tEnd"] = time.time()
                statsContainer["success"] = not bError

                jk_json.saveToFilePretty(
                    statsContainer,
                    os.path.join(effectiveTargetDirPath, STATS_JSON_FILE_NAME))

                # let's try to write the backup log before termination.

                bufferLogger = self.__getBufferLogger(mainLog)
                self.__writeLogToFiles(
                    bufferLogger,
                    os.path.join(effectiveTargetDirPath,
                                 PLAINTEXT_LOG_FILE_NAME),
                    os.path.join(effectiveTargetDirPath, JSON_LOG_FILE_NAME))

        except ProcessingFallThroughError as ee:
            bError = True
        except Exception as ee:
            bError = True
            mainLog.error(ee)

        # terminate connection

        try:
            with ProcessingContext("Terminating connection", None,
                                   mainLog) as ctxMain:
                self.__backupConnector.deinitialize(ctx, bError,
                                                    statsContainer)

        except ProcessingFallThroughError as ee:
            bError = True
        except Exception as ee:
            bError = True
            mainLog.error(ee)
示例#15
0
class PrivateTempDir(object):

    __CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"

    __FILE_MODE = jk_utils.ChModValue("rw-------").toInt()
    __DIR_MODE = jk_utils.ChModValue("rwx------").toInt()

    ################################################################################################################################
    ## Constructor
    ################################################################################################################################

    def __init__(self, baseDirPath: str):
        assert isinstance(baseDirPath, str)
        assert os.path.isdir(baseDirPath)
        assert os.path.isabs(baseDirPath)

        self.__tempDirPath = tempfile.mkdtemp(".tmp", "tmp", baseDirPath)

        assert os.stat(
            self.__tempDirPath).st_mode == PrivateTempDir.__DIR_MODE | 16384

        self._finalizer = weakref.finalize(
            self,
            self.__cleanup,
            self.__tempDirPath,
            warn_message="Implicitly cleaning up {!r}".format(self))

    #

    ################################################################################################################################
    ## Public Properties
    ################################################################################################################################

    ################################################################################################################################
    ## Helper Methods
    ################################################################################################################################

    def __newPath(self) -> str:
        while True:
            tempDirName = "".join(
                [random.choice(PrivateTempDir.__CHARS) for x in range(0, 32)])
            path = os.path.join(self.__tempDirPath, tempDirName)
            if not os.path.exists(path):
                return path

    #

    ################################################################################################################################
    ## Public Methods
    ################################################################################################################################

    def createDirectory(self) -> str:
        path = self.__newPath()

        os.mkdir(path, PrivateTempDir.__DIR_MODE)

        return path

    #

    #
    # Create a new text file with content. The file is cleaned up automatically as soon as this private
    # temporary directory is removed.
    #
    def writeTextFile(self, text: str) -> str:
        assert isinstance(text, str)

        path = self.__newPath()

        with open(
                os.open(path, os.O_CREAT | os.O_WRONLY | os.O_TRUNC,
                        PrivateTempDir.__FILE_MODE), "w") as f:
            f.write(text)

        return path

    #

    #
    # Create a new binary file with content. The file is cleaned up automatically as soon as this private
    # temporary directory is removed.
    #
    def writeBinaryFile(self, data: typing.Union[bytes, bytearray]) -> str:
        assert isinstance(data, (bytes, bytearray))

        path = self.__newPath()

        with open(
                os.open(path, os.O_CREAT | os.O_WRONLY | os.O_TRUNC,
                        PrivateTempDir.__FILE_MODE), "wb") as f:
            f.write(data)

        return path

    #

    @classmethod
    def __cleanup(cls, tempDirPath: str, warn_message):
        shutil.rmtree(tempDirPath)
        warnings.warn(warn_message, ResourceWarning)

    #

    def __repr__(self):
        return "<{} {!r}>".format(self.__class__.__name__, self.__tempDirPath)

    #

    def __enter__(self):
        return self

    #

    def __exit__(self, exc, value, tb):
        self.cleanup()

    #

    def cleanup(self):
        if self._finalizer.detach():
            shutil.rmtree(self.__tempDirPath)