def test_fileOperations_1(self): testDirFullPath = tempfile.mkdtemp('','unitTest_pUtils_') self.assertEqual(os.path.exists(testDirFullPath),True) string = 'print \'Hello World!\'' pUtils.quickFileWrite(os.path.join(testDirFullPath,'hw.py'),string) data = pUtils.quickFileRead(os.path.join(testDirFullPath,'hw.py')) self.assertEqual(string,data) self.assertEqual(pUtils.getFileSha1(os.path.join(testDirFullPath,'hw.py')),'a849bee4b303051f907d64b6c461ee6c699c3e79') pUtils.pSlice(os.path.join(testDirFullPath,'hw.py'),testDirFullPath,1) self.assertEqual(len(pUtils.filterListByRegex(os.listdir(testDirFullPath),r'hw\.py\.[0-9]+')),21) os.remove(os.path.join(testDirFullPath,'hw.py')) self.assertEqual(os.path.exists(os.path.join(testDirFullPath,'hw.py')),False) pUtils.pUnSlice(os.path.join(testDirFullPath,'hw.py.0'),os.path.join(testDirFullPath,'hw.py')) self.assertEqual(os.path.exists(os.path.join(testDirFullPath,'hw.py')),True) self.assertEqual(pUtils.quickFileRead(os.path.join(testDirFullPath,'hw.py')),string) pUtils.createZipFile(testDirFullPath,['hw.py'],os.path.join(testDirFullPath,'aFile.zip')) self.assertEqual(os.path.exists(os.path.join(testDirFullPath,'aFile.zip')),True) os.remove(os.path.join(testDirFullPath,'hw.py')) self.assertEqual(os.path.exists(os.path.join(testDirFullPath,'hw.py')),False) pUtils.unzipFile(os.path.join(testDirFullPath,'aFile.zip'),testDirFullPath) self.assertEqual(os.path.exists(os.path.join(testDirFullPath,'hw.py')),True) self.assertEqual(pUtils.quickFileRead(os.path.join(testDirFullPath,'hw.py')),string) pUtils.emptyDirectory(testDirFullPath) self.assertEqual(len(os.listdir(testDirFullPath)),0) pUtils.createDirectory(os.path.join(testDirFullPath,'ttt','ttt2')) self.assertEqual(os.path.exists(os.path.join(testDirFullPath,'ttt','ttt2')),True) pUtils.removeDirectory(testDirFullPath) self.assertEqual(os.path.exists(testDirFullPath),False)
def loadImage(filePath, nullColor=None): def genPlaceHolder(): # If we couldn't load an image we generate a placeholder # This is necessary since we may be dealing with a list of images # and we can't just end the application when one 'unloadable' image # is found. # By generating a place holder instead, we allow the user to keep # reviewing the images on the list as well as indicating something # went wrong. width = 100 height = 100 t = Rgba8888Image( bytearray(nullColor + [0]) * width * height, width, height) t.srcFileFormat = 'nullImage' return t try: data = pUtils.quickFileRead(filePath, 'rb') except Exception: if nullColor: return genPlaceHolder() raise try: img = Rgba8888Image() img.load(filePath, data=data) return img except Exception: pass try: img = Rgb888Image() img.load(filePath, data=data) return img except Exception: pass try: img = Image.open(BytesIO(data)) except Exception: raise Exception('Unable to identify image format') try: if img.format != 'PNG': raise Exception('Unsupported image format ' + img.format) width, height = img.size data = bytearray(img.tobytes()) if img.mode == 'RGBA': t = Rgba8888Image(data, width, height) elif img.mode == 'RGB': t = Rgb888Image(data, width, height) else: raise Exception('Unknown Image mode') t.srcFilePath = filePath t.srcFileFormat = 'PNG' return t except Exception: if nullColor: return genPlaceHolder() raise
def prepsForTestRun(fileFullPath,testRunID): s = 'INSERT INTO TestRun' s+= ' (testRunID,SN,siteID,stationID,startTimestamp,endTimestamp,isPass,lastTestEntered)' s+= ' VALUES (%s,%s,%s,%s,%s,%s,%s,%s);' data = pUtils.quickFileRead(fileFullPath) v = [testRunID]+data.split(',') return s,v
def setupMTPdatabase(self): """ Assumes it is connected to an empty database and it sets it up as an MTP database. """ fileFullPath = os.path.join(os.environ['MTP_TESTSTATION'],'MTP','scripts','initDB.sql') s = pUtils.quickFileRead(fileFullPath) v = [] self.sql.quickSqlWrite(s,v)
def load(self): if self.configFilePath is None: return 1 try: self.configData = pUtils.quickFileRead(self.configFilePath, 'json') except Exception: pprint('Error: ', color=COLOR.RED, endLine=False) pprint('Unable to load json file:') pprint(' ' + self.configFilePath, color=COLOR.TEAL) exit(1) return 0
def verify(**kwargs): directory = os.path.dirname(kwargs["manifestFileFullPath"]) manifestData = json.loads(pUtils.quickFileRead(kwargs["manifestFileFullPath"])) for fileName, checksum in manifestData["checksumDict"].iteritems(): fileFullPath = os.path.join(directory, fileName) if pUtils.getFileSha1(fileFullPath) != checksum: return {"retCode": 1, "errMsg": "Checksum missmatch: " + fileName} return {"retCode": 0, "errMsg": None}
def run(self): """ | The code for the thread. | It takes care of uploading all the configurations, instantiating a *Sequencer* and passing the configuration to it. | This way if some dynamic manipulation to the configuration is required, one can create its own *SequencerThread* type object. """ configRoot = os.path.join(os.environ['MTP_TESTSTATION'],'MTP','config') configWorkspace = sys.argv[2] testStationConfigFileFullPath = os.path.join(configRoot,configWorkspace,'testStationConfig',sys.argv[3]) limitFileFullPath = os.path.join(configRoot,configWorkspace,'limits',sys.argv[4]) dbConfigFileFullPath = os.path.join(configRoot,configWorkspace,'database',sys.argv[5]) siteID = sys.argv[1] configData = json.loads(pUtils.quickFileRead(testStationConfigFileFullPath)) configData = self.processImport(configData) configData ['testSequenceID'] = sys.argv[3].split('.')[0] limitDict = json.loads(pUtils.quickFileRead(limitFileFullPath)) dbConfig = json.loads(pUtils.quickFileRead(dbConfigFileFullPath)) if len(sys.argv)>6: routeControlProcessRoot = os.path.join(configRoot,configWorkspace,'routeControl','processes',sys.argv[6]) processDict = {} routeControlLookUp = {} for fileName in os.listdir(routeControlProcessRoot): fileFullPath = os.path.join(routeControlProcessRoot,fileName) data = json.loads(pUtils.quickFileRead(fileFullPath)) processID = fileName.split('.')[0] processDict[processID] = data for node in data['transitionDict']: routeControlLookUp[node] = processID Sequencer(siteID,configData,limitDict,dbConfig,(processDict,routeControlLookUp),self.guiApi) else: Sequencer(siteID,configData,limitDict,dbConfig,(),self.guiApi) self.guiApi.sendMessage({'command':'reInitLayout'})
def preprocessFileList(kwargs): fList = kwargs.get('fList') if not fList: return fListVarNameList = kwargs['fListVarNameList'] for fListVarName in fListVarNameList: fListVar = kwargs.get(fListVarName) if fListVar is None: continue t = [] for item in fListVar: t += pUtils.quickFileRead(item, 'txt') kwargs[fListVarName] = t
def transport(**kwargs): srcPath = os.path.dirname(kwargs["packetZeroPath"]) baseFileName = os.path.basename(kwargs["packetZeroPath"])[:-2] def scpStr(index): s = "scp -P " + kwargs["port"] + " " + kwargs["user"] + "@" + kwargs["host"] + ":" s += os.path.join(srcPath, baseFileName + "." + str(index)) s += " " + kwargs["dstPath"] return s if not os.path.exists(os.path.join(kwargs["dstPath"], baseFileName + ".0")): pUtils.createDirectory(kwargs["dstPath"]) cmd = scpStr(0) t = pUtils.runProgram(cmd, True) if t["returnCode"] != 0: return {"retCode": 2, "errMsg": "Unable to retrieve packet zero", "debug": t, "cmd": cmd} fileFullPath = os.path.join(kwargs["dstPath"], baseFileName + ".0") manifestData = json.loads(pUtils.quickFileRead(fileFullPath)) sliceAmount = manifestData["sliceAmount"] checksumDict = manifestData["checksumDict"] for i in range(1, sliceAmount + 1): fileFullPath = os.path.join(kwargs["dstPath"], baseFileName + "." + str(i)) if os.path.exists(fileFullPath): fileSha1 = pUtils.getFileSha1(fileFullPath) if fileSha1 == checksumDict[baseFileName + "." + str(i)]: continue cmd = scpStr(i) t = None retryCounter = 0 while True: retryCounter += 1 if retryCounter > int(kwargs["try"]): return {"retCode": 1, "errMsg": "Max retry reached", "debug": t, "cmd": cmd} t = pUtils.runProgram(cmd, True) if t["returnCode"] != 0: continue fileSha1 = pUtils.getFileSha1(fileFullPath) if fileSha1 != checksumDict[baseFileName + "." + str(i)]: continue break return {"retCode": 0, "errMsg": None}
def prepsForDictionary(fileFullPath,testRunID): data = pUtils.quickFileRead(fileFullPath) d = json.loads(data) if len(d)==0: return '',[] s = 'INSERT INTO StringDictionary' s+= ' (testRunID,key,value)' s+= ' VALUES (%s,%s,%s)' s+= ',(%s,%s,%s)'*(len(d)-1) s+= ';' v = [] for key in d: v += [testRunID]+[key]+[d[key]] return s,v
def load(self, filePath, data=None): if data is None: data = pUtils.quickFileRead(filePath, 'rb') index = data.find(b'\x0A') header = data[:index] body = data[index + 1:] # Sample string to match: rgba8888 320 240 t = re.match(b'rgba8888 ([\x30-\x39]+) ([\x30-\x39]+)', header) if t is None: raise Exception('Invalid header for a rgba8888 file type') self.width = int(t.group(1)) self.height = int(t.group(2)) self.data = bytearray(body) self.srcFilePath = filePath self.srcFileFormat = 'RGBA8888'
def prepsForTestMeasurement(fileFullPath,testRunID): data = pUtils.quickFileRead(fileFullPath) measurementList = data.split('\n') if len(measurementList)==0: return '',[] s = 'INSERT INTO TestMeasurement' s+= ' (testRunID,startTimestamp,endTimestamp,testName,testMeasurementName,dataType,stringMin,stringMeasurement,stringMax,doubleMin,doubleMeasurement,doubleMax,isPass)' s+= ' VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)' s+= ',(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)'*(len(measurementList)-1) s+= ';' v = [] for measurement in measurementList: data = measurement.split(',') if data[4]=='DOUBLE': v += [testRunID]+data[0:8]+data[5:8]+data[8:] else: #Is dataTtype is string or something else just store it as string v += [testRunID]+data[0:8]+[0,0,0]+data[8:] return s,v
def processImport(self,configData): """ | Process the import section of the configData. | If there is no import section, do nothing. """ if 'import' not in configData: return configData srcDict = {} for entry in configData['import']['config']: srcDict[entry['fileID']] = json.loads(pUtils.quickFileRead(os.path.join(os.environ['MTP_TESTSTATION'],entry['filePartialPath']))) for entry in configData['import']['data']: srcPointer = srcDict[entry['fileID']] for item in entry['varJsonSrcPath']: srcPointer = srcPointer[item] dstPointer = configData for item in entry['varJsonDstPath'][:-1]: dstPointer = dstPointer[item] dstPointer[entry['varJsonDstPath'][-1]] = srcPointer return configData
self.testRunSummary = [SN,self.siteID,self.stationID,self.testSequenceID,self.startTimestamp,self.endTimestamp,self.lastTestEntered,self.cycleTestResult] self.testMeasurementList = self.limitManager.testMeasurementList if self.testSuite: self.stringDictionary.update(self.testSuite.stringDictionary) self.numericDictionary.update(self.testSuite.numericDictionary) self.filePointerDictionary.update(self.testSuite.filePointerDictionary) else: self.stringDictionary = {} self.numericDictionary = {} self.dependencyDict = self.configurationManager.getDependencyDict() self.fileDictionary = {} if self.isMemoryOnly==False: for pointerID, fileRelativePath in self.filePointerDictionary.iteritems(): fileFullPath = os.path.join(self.testRunFolder,fileRelativePath) fileData = pUtils.quickFileRead(fileFullPath,'rb') self.fileDictionary[pointerID] = pUtils.pPack(fileData) ### Write data file self.writeTestRunDataFiles() if self.isDatabaseEnable: #Test Database connectivity if not self.isDatabaseReachable(): self.guiApi.sendMessage( {'command':'pDialog', 'buttonTextList':['OK'], 'msg':'Unable to reach the database', 'imageFileName':'networkProblem.png'})
def load(**kwargs): directoryFullPath = kwargs["directoryFullPath"] t = verify(manifestFileFullPath=os.path.join(directoryFullPath, MANIFEST_FILE_NAME)) if t["retCode"] != 0: return {"retCode": 1, "errMsg": "Checksum verification failed!", "debug": t} if kwargs["configFileFullPath"]: DATAMINING_CONFIG = json.loads(pUtils.quickFileRead(kwargs["configFileFullPath"])) else: DATAMINING_CONFIG = json.loads( pUtils.quickFileRead( os.path.join(os.environ["MTP_TESTSTATION"], "MTP", "config", "miselu", "database", kwargs["config"]) ) ) sql = SQL(**DATAMINING_CONFIG) sql.conn() for tableName in tableNameList: # Create Empty Temp table v = [] s = "SELECT * INTO TEMP TABLE %s FROM %s" s += "\n WHERE testRunID!=testRunID" s = s % (tableName + "_t", tableName) sql.execute(s, v) # Load temp table v = [] s = "COPY" s += "\n %s" s += "\n FROM STDIN" s += "\n WITH CSV " s += "\n HEADER" s = s % (tableName + "_t") fileFullPath = os.path.join(directoryFullPath, fileBaseName + "_" + tableName + "." + extension) with open(fileFullPath, "rt") as f: sql.cur.copy_expert(s, f) # Create TestRubID_input reference table v = ["Table1", "Table2", "Table2"] s = "WITH Table2 as (SELECT testrunID FROM TestRun_t)" s += "\n, TableDiff AS (" s += "\n SELECT MIN(tableName) as tableName" s += "\n , testrunID" s += "\n FROM" s += "\n (" s += "\n SELECT %s AS tableName" s += "\n , testrunID" s += "\n FROM TestRun" s += "\n UNION ALL" s += "\n SELECT %s AS tableName" s += "\n , testrunID" s += "\n FROM Table2" s += "\n ) AS MCH_UnionTable1" s += "\n GROUP BY testRunID" s += "\n HAVING COUNT(*) = 1" s += "\n ORDER BY tableName" s += "\n )" s += "SELECT * INTO TEMP TABLE TestRunID_input FROM TableDiff WHERE tableName = %s" s += "\n;" sql.execute(s, v) for tableName in tableNameList: # Insert new records v = [] s = "INSERT INTO %s" s += "\n (SELECT %s.* FROM %s,TestRunID_input WHERE TestRunID_input.testRunID = %s.testRunID)" s += "\n;" s = s % ((tableName,) + (tableName + "_t",) * 3) sql.execute(s, v) newRecordsAmount = sql.read("SELECT COUNT(*) FROM TestRunID_input", [])[0][0] sql.commit() sql.close() return {"retCode": 0, "errMsg": None, "newRecordsAmount": newRecordsAmount}
def determineConfigFilePath(self, configFilePath, configName): """ ConfigManager determines what config file to use in the following way: 1) If configFilePath is not None uses that and returns. 2) If configStart.txt exists: 2.1) If configName is not None then: - ConfigManager loads 'configStart.txt' from the same directory as this file. 'configStart.txt' points to a config menu file - ConfigManager loads the config menu file The config menu file is a dictionary of 'configName: path' pairs - ConfigManager uses configName with the dictionary and gets the path 2.2) if configName is None, is then the same as in #2.1 except that defaultConfigName (from the config menu file) is used as configName. 3) Otherwise no configFile would be load """ if configFilePath is not None: self.configFilePath = configFilePath if not os.path.isabs(self.configFilePath): self.configFilePath = os.path.join( os.path.dirname(self.configStartPath), self.configFilePath) return try: self.configMenuPath = pUtils.quickFileRead(self.configStartPath, 'txt')[0] except Exception: if self.verbose: pprint('---------------------------------') pprint('Info: ', color=COLOR.TEAL, endLine=False) pprint('Unable to load:') pprint(' %s' % self.configStartPath, color=COLOR.TEAL) pprint('Internal defaults will be used') pprint('---------------------------------') return if not os.path.isabs(self.configMenuPath): self.configMenuPath = os.path.join( os.path.dirname(self.configStartPath), self.configMenuPath) try: configMenuData = pUtils.quickFileRead(self.configMenuPath, 'json') except Exception: pprint('Error: ', color=COLOR.RED, endLine=False) pprint('Unable to load json file:') pprint(' ' + self.configMenuPath, color=COLOR.TEAL) exit(1) if configName is None: try: defaultConfigName = configMenuData['defaultConfigName'] except Exception: pprint('Error: ', color=COLOR.RED, endLine=False) pprint('On file:') pprint(' ' + self.configMenuPath, color=COLOR.TEAL) pprint('Key: "', endLine=False) pprint('defaultConfigName', endLine=False, color=COLOR.TEAL) pprint('" not found') exit(1) configName = defaultConfigName try: self.configFilePathFromMenu = configMenuData['configFilePathDict'][ configName] except Exception: pprint('Error: ', color=COLOR.RED, endLine=False) pprint('On file:') pprint(' ' + self.configMenuPath, color=COLOR.TEAL) pprint('Key sequence: "', endLine=False) pprint('configFilePathDict ' + configName, endLine=False, color=COLOR.TEAL) pprint('" not found') exit(1) self.configFilePath = self.configFilePathFromMenu if not os.path.isabs(self.configFilePath): self.configFilePath = os.path.join( os.path.dirname(self.configMenuPath), self.configFilePath)
def dump(**kwargs): if kwargs["configFileFullPath"]: DATAMINING_CONFIG = json.loads(pUtils.quickFileRead(kwargs["configFileFullPath"])) else: DATAMINING_CONFIG = json.loads( pUtils.quickFileRead( os.path.join(os.environ["MTP_TESTSTATION"], "MTP", "config", "miselu", "database", kwargs["config"]) ) ) sql = SQL(**DATAMINING_CONFIG) sql.conn() ### Create virtual table of testRunID's ### SNlist = None v = [] s = "SELECT testRunID INTO TEMP TABLE SelectedTestRunID FROM TestRun" s += "\n WHERE creationTimestamp>=%s" v.append(kwargs["creationTimestamp_rangeStart"]) if kwargs["creationTimestamp_rangeEnd"] != None: s += "\n AND creationTimestamp<=%s" if kwargs["startTimestamp_rangeStart"] != None: s += "\n AND startTimestamp>=%s" v.append(kwargs["startTimestamp_rangeStart"]) if kwargs["startTimestamp_rangeEnd"] != None: s += "\n AND startTimestamp<=%s" v.append(kwargs["startTimestamp_rangeEnd"]) if kwargs["endTimestamp_rangeStart"] != None: s += "\n AND endTimestamp>%s" v.append(kwargs["endTimestamp_rangeStart"]) if kwargs["endTimestamp_rangeEnd"] != None: s += "\n AND endTimestamp<%s" v.append(kwargs["endTimestamp_rangeEnd"]) if SNlist != None: s += "\n AND ( SN=%s" s += "\n OR SN=%s" * (len(SNlist) - 1) s += "\n )" v += SNlist if kwargs["siteID"] != None: s += "\n AND siteID=%s" v.append(kwargs["siteID"]) if kwargs["stationID"] != None: s += "\n AND stationID=%s" v.append(kwargs["stationID"]) if kwargs["testSequenceID"] != None: s += "\n AND testSequenceID=%s" v.append(kwargs["testSequenceID"]) if kwargs["isPass"] != None: s += "\n AND isPass=%s" v.append(kwargs["isPass"]) # s+=' limit 10' s += "\n ;" sql.execute(s, v) ##TODO check if it is blocking or not # For every table, filtered with the TestRunID's table and write it to a file directoryFullPath = kwargs["directoryFullPath"] pUtils.createDirectory(directoryFullPath) if len(os.listdir(directoryFullPath)) != 0: raise Exception("Directory specified for dump is not empty") for tableName in tableNameList: v = [] s = "COPY" s += "\n (SELECT %s.* FROM %s,SelectedTestRunID" s += "\n WHERE SelectedTestRunID.testRunID = %s.testRunID)" s += "\n TO STDOUT" s += "\n WITH CSV " s += "\n HEADER " s = s % ((tableName,) * 3) fileFullPath = os.path.join(directoryFullPath, fileBaseName + "_" + tableName + "." + extension) with open(fileFullPath, "wt") as f: sql.cur.copy_expert(s, f) sql.close() createManifest(MANIFEST_FILE_NAME, directoryFullPath) return {"retCode": 0, "errMsg": None}