def loadFromFile(filePath:str, scmgr = None, structureTypeName:str = None) -> FlexObject: assert isinstance(filePath, str) if bJSONCfgHelperAvailable: if scmgr or structureTypeName: Assert.isIn(scmgr.__class__.__name__, [ "StructureCheckerManager", "jk_jsoncfghelper2.StructureCheckerManager" ]) Assert.isInstance(structureTypeName, str) with open(filePath, "r") as f: data = json.load(f) assert isinstance(data, dict) if scmgr or structureTypeName: checker = scmgr.getE(structureTypeName) if checker.checkB(scmgr, data): return FlexObject(data) else: raise Exception("Data does not match type " + repr(structureTypeName)) # TODO else: return FlexObject(data) else: if (scmgr is not None) or (structureTypeName is not None): raise Exception("As module jk_jsoncfghelper2 is not installed, scmgr and structureTypeName must noe None!") with open(filePath, "r") as f: data = json.load(f) assert isinstance(data, dict) return FlexObject(data)
def __deserializeTable(textConverterMgr: TextConverterManager, jobj: dict) -> TokenizingTable: Assert.isInstance(jobj, dict) jrows = JSONLoader.__getE(jobj, "on", (list, tuple)) rows = [] if jrows is not None: for i in range(0, len(jrows)): rows.append( JSONLoader.__deserializeRow( textConverterMgr, JSONLoader.__getE(jrows, i, dict))) tableID = JSONLoader.__getE(jobj, "tableID", int) tableName = JSONLoader.__getE(jobj, "tableName", str) table = TokenizingTable(tableID) table.tableName = tableName for r in rows: table.addPatternRow(r.pattern, r.actions) jtemp = JSONLoader.__getE(jobj, "onOther", (list, tuple)) if jtemp is not None: table.setOther( JSONLoader.__deserializeActions(textConverterMgr, jtemp)) jtemp = JSONLoader.__getE(jobj, "onEOS", (list, tuple)) if jtemp is not None: table.setEOS( JSONLoader.__deserializeActions(textConverterMgr, jtemp)) return table
def __perform_calcDiskSpaceRequired( self, bd2: BD2, backupTasks: typing.List[AbstractThaniyaTask]) -> int: with ProcessingContext(text="Calculating disk space required", bd2=bd2, bMeasureDuration=True, statsDurationKey="d0_calcDiskSpace") as ctx: nExpectedBytesToWrite = 0 for job in backupTasks: #assert isinstance(job, AbstractThaniyaTask) Assert.isInstance(job, AbstractThaniyaTask) nestedCtx = ctx.descend(job.logMessageCalculateSpaceRequired) with nestedCtx.log as nestedLog: nExpectedBytesToWrite += job.calculateSpaceRequired( nestedCtx) ctx.log.info("Estimated total size of backup: " + jk_utils.formatBytes(nExpectedBytesToWrite)) bd2.statsContainer.setValue("expectedBytesToWrite", nExpectedBytesToWrite) # ---- ctx.log.notice("Done.") return nExpectedBytesToWrite
def __init__(self, tp:AbstractTokenPattern, emitName:str = None, emitValue = None): Assert.isInstance(tp, AbstractTokenPattern) if emitName is not None: Assert.isInstance(emitName, str) self.__tp = tp self.emitName = emitName self.emitValue = emitValue
def __getAny(d: dict, key1: str, key2: str, typeOrTypes): v = d.get(key1) if v is None: v = d.get(key2) if v is not None: Assert.isInstance(v, typeOrTypes) return v
def __get(d: Union[dict, list, tuple], key: Union[str, int], typeOrTypes): if isinstance(d, dict): v = d.get(key) else: v = d[key] if v is not None: Assert.isInstance(v, typeOrTypes) return v
def register(self, name:str, checker:AbstractValueChecker): Assert.isInstance(name, str) #assert isinstance(name, str) Assert.isInstance(checker, AbstractValueChecker) #assert isinstance(checker, AbstractValueChecker) self.__types[name] = checker return checker
def __init__(self, *args, emitName: str = None, emitValue=None): assert len(args) > 0 for a in args: Assert.isInstance(a, AbstractTokenPattern) self.__tokenPatterns = args if emitName is not None: Assert.isInstance(emitName, str) self.emitName = emitName self.emitValue = emitValue
def __getAnyE(d: dict, key1: str, key2: str, typeOrTypes): v = d.get(key1) if v is None: v = d.get(key2) if v is None: raise Exception("None of these keys exist: " + repr(key1) + ", " + repr(key2)) if v is not None: Assert.isInstance(v, typeOrTypes) return v
def __getE(d: Union[dict, list, tuple], key: Union[str, int], typeOrTypes): if isinstance(d, dict): v = d.get(key) else: v = d[key] if v is None: raise Exception("No such key: " + repr(key)) if v is not None: Assert.isInstance(v, typeOrTypes) return v
def __init__(self, *args, emitName: str = None, emitValue=None): assert len(args) > 0 for a in args: Assert.isInstance(a, AbstractTokenPattern) if isinstance(a, TPOptional): raise Exception( "An TPOptional element should not be part of a TPAlt element as this breaks matching!" ) self.__tokenPatterns = args self.emitName = emitName self.emitValue = emitValue
def __init__(self, tp: AbstractTokenPattern, delimiterPattern: AbstractTokenPattern, emitName: str = None, emitValue=None): assert isinstance(tp, AbstractTokenPattern) if delimiterPattern: assert isinstance(delimiterPattern, AbstractTokenPattern) if emitName is not None: Assert.isInstance(emitName, str) self.__tokenPattern = tp self.__delimiterPattern = delimiterPattern self.emitName = emitName self.emitValue = emitValue
def __perform_backup(self, bd2: BD2, backupTasks: typing.List[AbstractThaniyaTask]): # NOTE: we need to access this context later as it calculates the duration and we need this information separately to log it. processingContext = ProcessingContext(text="Writing the backup data", bd2=bd2, bMeasureDuration=True, statsDurationKey="d2_backup") with processingContext as ctx: for job in backupTasks: Assert.isInstance(job, AbstractThaniyaTask) with ctx.descend(job.logMessagePerformBackup) as nestedCtx: job.performBackup(nestedCtx) ctx.log.notice("All backup tasks completed.") # calculate statistics with ctx.log.descend( "Calculating size of backup performed ...") as nestedLog: nTotalBytesWritten = jk_utils.fsutils.getFolderSize( bd2.effectiveTargetDirPath) fDuration = processingContext.duration if (nTotalBytesWritten > 0) and (fDuration > 0): fAvgWritingSpeed = nTotalBytesWritten / fDuration sAvgWritingSpeed = jk_utils.formatBytesPerSecond( fAvgWritingSpeed) else: fAvgWritingSpeed = None sAvgWritingSpeed = "n/a" ctx.log.info("Total bytes written: " + jk_utils.formatBytes(nTotalBytesWritten)) ctx.log.info("Average writing speed: " + sAvgWritingSpeed) bd2.statsContainer.setValue("totalBytesWritten", nTotalBytesWritten) bd2.statsContainer.setValue("avgWritingSpeed", fAvgWritingSpeed) # ---- ctx.log.notice("Done.")
def __init__(self, type_: str, text_: str = None, bIgnoreCase: bool = False, emitName: str = None, emitValue=None): self.type = type_ if text_ is None: self.text = None self.bIgnoreCase = False else: self.text = text_.lower() if bIgnoreCase else text_ self.bIgnoreCase = bIgnoreCase if emitName is not None: Assert.isInstance(emitName, str) self.emitName = emitName self.emitValue = emitValue
]], ["|c|*|d", None, []], ["|a|b", None, []], ] for p in PATTERNS: print("NOW PROCESSING:", p) if len(p) == 3: spath, result = FlexDataSelector(p[0]).getOne(dataTree) print("\tspath = " + repr(spath)) print("\tresult = " + repr(result)) if p[1] is None: Assert.isNone(result) Assert.isNone(spath) else: Assert.isInstance(result, p[1]) Assert.isInstance(spath, str) Assert.isIn(spath, p[2]) elif len(p) == 4: spath, result = FlexDataSelector(p[0]).getOne(dataTree) if p[1] is None: Assert.isNone(spath) Assert.isNone(result) else: Assert.isInstance(result, p[1]) Assert.isEqual(result, p[2]) Assert.isInstance(spath, str) Assert.isIn(spath, p[3]) else: raise Exception()
def getE(self, name:str) -> AbstractValueChecker: Assert.isInstance(name, str) #assert isinstance(name, str) return self.__types[name]
def __init__(self, bd2, log: jk_logging.AbstractLogger): #Assert.isInstance(bd2, "BD2") # extend Assert to support string class names Assert.isInstance(log, jk_logging.AbstractLogger) self.__bd2 = bd2 self.__log = log
def toV(value, dataType: str = None, defaultType: str = None) -> jk_flexdata.FlexObject: if dataType is None: # autodetect type if value is None: # try to accept the default type if defaultType is None: raise Exception("Value is None and default type is not set!") assert defaultType in [ "int", "str", "bool", "float", "int[]", "str[]", "float[]", "tempc", "timestamputc", "timestamp", "duration", "bytes", "freq", "secsdiff" ] dataType = defaultType elif isinstance(value, bool): dataType = "bool" elif isinstance(value, int): dataType = "int" elif isinstance(value, float): dataType = "float" elif isinstance(value, str): dataType = "str" elif isinstance(value, list): nCountStr = 0 nCountInt = 0 nCountFloat = 0 for item in value: if isinstance(item, float): nCountFloat += 1 elif isinstance(item, int): nCountInt += 1 elif isinstance(item, str): nCountStr += 1 else: raise Exception("Unknown list item data type: " + repr(type(item))) if nCountInt == nCountFloat == nCountStr == 0: # assume it is a string lst dataType = "str[]" elif (nCountInt * nCountFloat != 0) or (nCountInt * nCountStr != 0) or (nCountFloat * nCountStr != 0): raise Exception("List with mixed item types!") else: if nCountFloat > 0: dataType = "float[]" elif nCountInt > 0: dataType = "int[]" else: dataType = "str[]" else: raise Exception("Unknown data type: " + repr(type(value))) else: # type has been specified if value is None: # accept the type as it is pass elif dataType == "bool": Assert.isInstance(value, bool) elif dataType == "int": Assert.isInstance(value, int) elif dataType == "float": Assert.isInstance(value, float) elif dataType == "str": Assert.isInstance(value, str) elif dataType == "tempc": Assert.isInstance(value, (int, float)) elif dataType == "timestamputc": if isinstance(value, datetime.datetime): value = value.timestamp() else: Assert.isInstance(value, (int, float)) elif dataType == "timestamp": if isinstance(value, datetime.datetime): value = value.timestamp() else: Assert.isInstance(value, (int, float)) elif dataType == "duration": Assert.isInstance(value, (int, float)) elif dataType == "bytes": Assert.isInstance(value, int) elif dataType == "freq": Assert.isInstance(value, (int, float)) elif dataType == "secsdiff": Assert.isInstance(value, (int, float)) else: raise Exception("Invalid data type: " + repr(dataType)) return jk_flexdata.FlexObject({"dt": dataType, "v": value})