def loadFromFile(filePath:str, scmgr = None, structureTypeName:str = None) -> FlexObject: assert isinstance(filePath, str) if bJSONCfgHelperAvailable: if scmgr or structureTypeName: Assert.isIn(scmgr.__class__.__name__, [ "StructureCheckerManager", "jk_jsoncfghelper2.StructureCheckerManager" ]) Assert.isInstance(structureTypeName, str) with open(filePath, "r") as f: data = json.load(f) assert isinstance(data, dict) if scmgr or structureTypeName: checker = scmgr.getE(structureTypeName) if checker.checkB(scmgr, data): return FlexObject(data) else: raise Exception("Data does not match type " + repr(structureTypeName)) # TODO else: return FlexObject(data) else: if (scmgr is not None) or (structureTypeName is not None): raise Exception("As module jk_jsoncfghelper2 is not installed, scmgr and structureTypeName must noe None!") with open(filePath, "r") as f: data = json.load(f) assert isinstance(data, dict) return FlexObject(data)
def __perform_calcDiskSpaceRequired( self, bd2: BD2, backupTasks: typing.List[AbstractThaniyaTask]) -> int: with ProcessingContext(text="Calculating disk space required", bd2=bd2, bMeasureDuration=True, statsDurationKey="d0_calcDiskSpace") as ctx: nExpectedBytesToWrite = 0 for job in backupTasks: #assert isinstance(job, AbstractThaniyaTask) Assert.isInstance(job, AbstractThaniyaTask) nestedCtx = ctx.descend(job.logMessageCalculateSpaceRequired) with nestedCtx.log as nestedLog: nExpectedBytesToWrite += job.calculateSpaceRequired( nestedCtx) ctx.log.info("Estimated total size of backup: " + jk_utils.formatBytes(nExpectedBytesToWrite)) bd2.statsContainer.setValue("expectedBytesToWrite", nExpectedBytesToWrite) # ---- ctx.log.notice("Done.") return nExpectedBytesToWrite
def __init__(self, tp:AbstractTokenPattern, emitName:str = None, emitValue = None): Assert.isInstance(tp, AbstractTokenPattern) if emitName is not None: Assert.isInstance(emitName, str) self.__tp = tp self.emitName = emitName self.emitValue = emitValue
def __getAny(d: dict, key1: str, key2: str, typeOrTypes): v = d.get(key1) if v is None: v = d.get(key2) if v is not None: Assert.isInstance(v, typeOrTypes) return v
def __deserializeTable(textConverterMgr: TextConverterManager, jobj: dict) -> TokenizingTable: Assert.isInstance(jobj, dict) jrows = JSONLoader.__getE(jobj, "on", (list, tuple)) rows = [] if jrows is not None: for i in range(0, len(jrows)): rows.append( JSONLoader.__deserializeRow( textConverterMgr, JSONLoader.__getE(jrows, i, dict))) tableID = JSONLoader.__getE(jobj, "tableID", int) tableName = JSONLoader.__getE(jobj, "tableName", str) table = TokenizingTable(tableID) table.tableName = tableName for r in rows: table.addPatternRow(r.pattern, r.actions) jtemp = JSONLoader.__getE(jobj, "onOther", (list, tuple)) if jtemp is not None: table.setOther( JSONLoader.__deserializeActions(textConverterMgr, jtemp)) jtemp = JSONLoader.__getE(jobj, "onEOS", (list, tuple)) if jtemp is not None: table.setEOS( JSONLoader.__deserializeActions(textConverterMgr, jtemp)) return table
def __get(d: Union[dict, list, tuple], key: Union[str, int], typeOrTypes): if isinstance(d, dict): v = d.get(key) else: v = d[key] if v is not None: Assert.isInstance(v, typeOrTypes) return v
def _compile_float(scmgr: StructureCheckerManager, x: JDef): Assert.isEqual(x.dataType, "float") #assert x.dataType == "float" return FloatValueChecker(scmgr, minValue=x.minValue, maxValue=x.maxValue, required=x.required, nullable=x.nullable)
def _compile_int(scmgr: StructureCheckerManager, x: JDef): Assert.isIn(x.dataType, ["int", "integer"]) #assert x.dataType in [ "int", "integer" ] return IntValueChecker(scmgr, minValue=x.minValue, maxValue=x.maxValue, required=x.required, nullable=x.nullable)
def _compile_anydict(scmgr: StructureCheckerManager, x: JDef): Assert.isIn(x.dataType, ["dict", "dictionary"]) #assert x.dataType in [ "dict", "dictionary" ] return AnyDictionaryValueChecker( scmgr, required=x.required, allowedElementTypes=__compile_allowedElements(scmgr, x.elementTypes), nullable=x.nullable)
def __init__(self, *args, emitName: str = None, emitValue=None): assert len(args) > 0 for a in args: Assert.isInstance(a, AbstractTokenPattern) self.__tokenPatterns = args if emitName is not None: Assert.isInstance(emitName, str) self.emitName = emitName self.emitValue = emitValue
def register(self, name:str, checker:AbstractValueChecker): Assert.isInstance(name, str) #assert isinstance(name, str) Assert.isInstance(checker, AbstractValueChecker) #assert isinstance(checker, AbstractValueChecker) self.__types[name] = checker return checker
def _compile_str(scmgr: StructureCheckerManager, x: JDef): Assert.isIn(x.dataType, ["str", "string"]) #assert x.dataType in [ "str", "string" ] return StringValueChecker(scmgr, minLength=x.minLength, maxLength=x.maxLength, allowedValues=x.allowedValues, required=x.required, nullable=x.nullable)
def __getAnyE(d: dict, key1: str, key2: str, typeOrTypes): v = d.get(key1) if v is None: v = d.get(key2) if v is None: raise Exception("None of these keys exist: " + repr(key1) + ", " + repr(key2)) if v is not None: Assert.isInstance(v, typeOrTypes) return v
def __getE(d: Union[dict, list, tuple], key: Union[str, int], typeOrTypes): if isinstance(d, dict): v = d.get(key) else: v = d[key] if v is None: raise Exception("No such key: " + repr(key)) if v is not None: Assert.isInstance(v, typeOrTypes) return v
def _compile_list(scmgr: StructureCheckerManager, x: JDef): Assert.isEqual(x.dataType, "list") #assert x.dataType == "list" return ListValueChecker(scmgr, minLength=x.minLength, maxLength=x.maxLength, allowedElementTypes=__compile_allowedElements( scmgr, x.elementTypes), required=x.required, nullable=x.nullable)
def __init__(self, *args, emitName: str = None, emitValue=None): assert len(args) > 0 for a in args: Assert.isInstance(a, AbstractTokenPattern) if isinstance(a, TPOptional): raise Exception( "An TPOptional element should not be part of a TPAlt element as this breaks matching!" ) self.__tokenPatterns = args self.emitName = emitName self.emitValue = emitValue
def __init__(self, tp: AbstractTokenPattern, delimiterPattern: AbstractTokenPattern, emitName: str = None, emitValue=None): assert isinstance(tp, AbstractTokenPattern) if delimiterPattern: assert isinstance(delimiterPattern, AbstractTokenPattern) if emitName is not None: Assert.isInstance(emitName, str) self.__tokenPattern = tp self.__delimiterPattern = delimiterPattern self.emitName = emitName self.emitValue = emitValue
def __init__(self, type_: str, text_: str = None, bIgnoreCase: bool = False, emitName: str = None, emitValue=None): self.type = type_ if text_ is None: self.text = None self.bIgnoreCase = False else: self.text = text_.lower() if bIgnoreCase else text_ self.bIgnoreCase = bIgnoreCase if emitName is not None: Assert.isInstance(emitName, str) self.emitName = emitName self.emitValue = emitValue
def __perform_backup(self, bd2: BD2, backupTasks: typing.List[AbstractThaniyaTask]): # NOTE: we need to access this context later as it calculates the duration and we need this information separately to log it. processingContext = ProcessingContext(text="Writing the backup data", bd2=bd2, bMeasureDuration=True, statsDurationKey="d2_backup") with processingContext as ctx: for job in backupTasks: Assert.isInstance(job, AbstractThaniyaTask) with ctx.descend(job.logMessagePerformBackup) as nestedCtx: job.performBackup(nestedCtx) ctx.log.notice("All backup tasks completed.") # calculate statistics with ctx.log.descend( "Calculating size of backup performed ...") as nestedLog: nTotalBytesWritten = jk_utils.fsutils.getFolderSize( bd2.effectiveTargetDirPath) fDuration = processingContext.duration if (nTotalBytesWritten > 0) and (fDuration > 0): fAvgWritingSpeed = nTotalBytesWritten / fDuration sAvgWritingSpeed = jk_utils.formatBytesPerSecond( fAvgWritingSpeed) else: fAvgWritingSpeed = None sAvgWritingSpeed = "n/a" ctx.log.info("Total bytes written: " + jk_utils.formatBytes(nTotalBytesWritten)) ctx.log.info("Average writing speed: " + sAvgWritingSpeed) bd2.statsContainer.setValue("totalBytesWritten", nTotalBytesWritten) bd2.statsContainer.setValue("avgWritingSpeed", fAvgWritingSpeed) # ---- ctx.log.notice("Done.")
def connect(self) -> fabric.Connection: if self.__c is None: pwd = self.__passwordProvider(self.__hostName, self.__userName) config = fabric.Config(overrides={'sudo': {'password': pwd}}) self.__c = fabric.Connection(self.__hostName, self.__userName, self.__port, config=config, connect_kwargs={"password": pwd}) r = self.__c.run( self._encodeSSHCmdLine("/bin/echo", "foo \" \" bar"), hide=True) Assert.isTrue(self.__c.is_connected) Assert.isEqual(r.exited, 0) Assert.isEqual(r.stderr, "") Assert.isEqual(r.stdout, "foo \" \" bar\n") return self.__c
import datetime import jk_utils.datetime from jk_utils.datatypes import * from jk_testing import Assert t = getTypeAsStr(123) print(t) Assert.isEqual(t, "int") t = getTypeAsStr(3.14) print(t) Assert.isEqual(t, "float") t = getTypeAsStr("abc") print(t) Assert.isEqual(t, "str") t = getTypeAsStr(True) print(t) Assert.isEqual(t, "bool") t = getTypeAsStr(False) print(t)
def getE(self, name:str) -> AbstractValueChecker: Assert.isInstance(name, str) #assert isinstance(name, str) return self.__types[name]
def __init__(self, bd2, log: jk_logging.AbstractLogger): #Assert.isInstance(bd2, "BD2") # extend Assert to support string class names Assert.isInstance(log, jk_logging.AbstractLogger) self.__bd2 = bd2 self.__log = log
#!/usr/bin/python3 from jk_utils.datetime import * from jk_testing import Assert d1 = D.now() d2 = d1.nextDay().nextDay() Assert.isEqual(d2 - d1, 2) Assert.isEqual((d1.dayOfWeek + 2) % 7, d2.dayOfWeek) d1 = D.now().startOfWeek() d2 = d1.nextWeek() Assert.isEqual(d2 - d1, 7) d1 = D.now() d2 = d1.nextWeek() Assert.isTrue((d2 - d1) > 0) Assert.isTrue((d2 - d1) <= 7) Assert.isTrue(d2.dayOfWeek == 1) Assert.isTrue((d2 - 1).dayOfWeek == 7) Assert.isTrue(d2 >= d1) Assert.isTrue(d1 <= d2) d1 = D.now() d2 = d1.nextWeek().previousWeek() Assert.isTrue(d1 >= d2) for d in dateRange(d1.startOfWeek(), d2.nextWeek()): print(d, "\t", d.toJSON()) Assert.isEqual(len(list(dateRange(d1.startOfWeek(), d2.nextWeek()))), 7)
#!/usr/bin/python3 from jk_testing import Assert from jk_trioinput import ConsoleInputHistory h = ConsoleInputHistory() h.append("123") h.append("wieu") h.append("LODJFKJHS") h.resetCursor() Assert.isEqual(h.prev(), "LODJFKJHS") Assert.isEqual(h.prev(), "wieu") Assert.isEqual(h.next(), "LODJFKJHS") Assert.isEqual(h.prev(), "wieu") Assert.isEqual(h.prev(), "123") Assert.isEqual(h.next(), "wieu") Assert.isEqual(h.prev(), "123") Assert.isEqual(h.prev(), None) Assert.isEqual(h.next(), "wieu") Assert.isEqual(h.next(), "LODJFKJHS") Assert.isEqual(h.next(), None) h = ConsoleInputHistory() h.append("123") h.append("wieu") h.append("LODJFKJHS") h.resetCursor()
def toV(value, dataType: str = None, defaultType: str = None) -> jk_flexdata.FlexObject: if dataType is None: # autodetect type if value is None: # try to accept the default type if defaultType is None: raise Exception("Value is None and default type is not set!") assert defaultType in [ "int", "str", "bool", "float", "int[]", "str[]", "float[]", "tempc", "timestamputc", "timestamp", "duration", "bytes", "freq", "secsdiff" ] dataType = defaultType elif isinstance(value, bool): dataType = "bool" elif isinstance(value, int): dataType = "int" elif isinstance(value, float): dataType = "float" elif isinstance(value, str): dataType = "str" elif isinstance(value, list): nCountStr = 0 nCountInt = 0 nCountFloat = 0 for item in value: if isinstance(item, float): nCountFloat += 1 elif isinstance(item, int): nCountInt += 1 elif isinstance(item, str): nCountStr += 1 else: raise Exception("Unknown list item data type: " + repr(type(item))) if nCountInt == nCountFloat == nCountStr == 0: # assume it is a string lst dataType = "str[]" elif (nCountInt * nCountFloat != 0) or (nCountInt * nCountStr != 0) or (nCountFloat * nCountStr != 0): raise Exception("List with mixed item types!") else: if nCountFloat > 0: dataType = "float[]" elif nCountInt > 0: dataType = "int[]" else: dataType = "str[]" else: raise Exception("Unknown data type: " + repr(type(value))) else: # type has been specified if value is None: # accept the type as it is pass elif dataType == "bool": Assert.isInstance(value, bool) elif dataType == "int": Assert.isInstance(value, int) elif dataType == "float": Assert.isInstance(value, float) elif dataType == "str": Assert.isInstance(value, str) elif dataType == "tempc": Assert.isInstance(value, (int, float)) elif dataType == "timestamputc": if isinstance(value, datetime.datetime): value = value.timestamp() else: Assert.isInstance(value, (int, float)) elif dataType == "timestamp": if isinstance(value, datetime.datetime): value = value.timestamp() else: Assert.isInstance(value, (int, float)) elif dataType == "duration": Assert.isInstance(value, (int, float)) elif dataType == "bytes": Assert.isInstance(value, int) elif dataType == "freq": Assert.isInstance(value, (int, float)) elif dataType == "secsdiff": Assert.isInstance(value, (int, float)) else: raise Exception("Invalid data type: " + repr(dataType)) return jk_flexdata.FlexObject({"dt": dataType, "v": value})
def _compile_bool(scmgr: StructureCheckerManager, x: JDef): Assert.isIn(x.dataType, ["bool", "boolean"]) #assert x.dataType in [ "bool", "boolean" ] return BooleanValueChecker(scmgr, required=x.required, nullable=x.nullable)
def _compile_null(scmgr: StructureCheckerManager, x: JDef): Assert.isEqual(x.dataType, "null") #assert x.dataType == "null" return NullValueChecker(scmgr, required=x.required)
["|c|d|e", None, []], ["|c|*", int, 123, [ "|c|d", ]], ["|c|*|d", None, []], ["|a|b", None, []], ] for p in PATTERNS: print("NOW PROCESSING:", p) if len(p) == 3: spath, result = FlexDataSelector(p[0]).getOne(dataTree) print("\tspath = " + repr(spath)) print("\tresult = " + repr(result)) if p[1] is None: Assert.isNone(result) Assert.isNone(spath) else: Assert.isInstance(result, p[1]) Assert.isInstance(spath, str) Assert.isIn(spath, p[2]) elif len(p) == 4: spath, result = FlexDataSelector(p[0]).getOne(dataTree) if p[1] is None: Assert.isNone(spath) Assert.isNone(result) else: Assert.isInstance(result, p[1]) Assert.isEqual(result, p[2]) Assert.isInstance(spath, str) Assert.isIn(spath, p[3])
TEST_VECTOR_1 = ( ( "A1", "/", "/ab.cd", "/ab.cd" ), ( "A2", "/", "ab.cd", "/ab.cd" ), ( "A3", "/xy/z/", "ab.cd", "/xy/z/ab.cd" ), ( "A4", "/xy/z/", "../ab.cd", "/xy/ab.cd" ), ( "A5", "/xy/z/", ".././ab.cd", "/xy/ab.cd" ), ( "A6", "/xy/z/", ".././../ab.cd", "/ab.cd" ), ( "A7", "/xy/z/", ".././../../ab.cd", "/../ab.cd" ), ( "A8", "/xy/z/", "/a/ab.cd", "/a/ab.cd" ), ( "A9", "/xy/z/", "/../ab.cd", "/../ab.cd" ), ) for label, mergeURL1, mergeURL2, validationURL in TEST_VECTOR_1: print(label) Assert.isEqual( mergeURLPaths(mergeURL1, mergeURL2), validationURL ) print() urlCanonicalizer = URLCanonicalizer("https://www.xyz/startx/") TEST_VECTOR_2 = ( ( "B1", "http://test.abc/def.yz", "http://test.abc/def.yz", ), ( "B2", "//test.abc/def.yz", "https://test.abc/def.yz", ), ( "B3", "http:///def.yz", "http://www.xyz/def.yz", ), ( "B4", "/def.yz", "https://www.xyz/def.yz", ),