def run_testcase(caseName, curan_path, arguementList): logger.info('start run the case ...\n') testReport = TestReport() if caseName == 'all_case' or caseName == 'all': run_allTestcase(caseName, curan_path, arguementList, testReport) else: run_singleTestcase(caseName, curan_path, arguementList, testReport) testReport.generateReport() del testReport
def downloadFile(url): pkgType, downlnk, targetFileName = getdownloadlink(url) targetFileNameAbsPath = os.path.join(os.getcwd(), targetFileName) newFileFlag = False if os.path.exists(targetFileNameAbsPath) and checkzipfile( targetFileNameAbsPath) == True: logger.info('file %s exist, download ignore ...\n' % targetFileName) elif os.path.exists(targetFileNameAbsPath) and checkzipfile( targetFileNameAbsPath) == False: logger.warning('file {} is broken, download it again...\n'.format( targetFileNameAbsPath)) os.remove(targetFileNameAbsPath) os.system('wget %s' % downlnk) logger.info('download cuda ran sdk done!\n') newFileFlag = True else: logger.info( 'start download cuda ran sdk pkgType={} ...\n'.format(pkgType)) os.system('wget %s' % downlnk) logger.info('download cuda ran sdk done!\n') newFileFlag = True return pkgType, targetFileName, newFileFlag
def runCommandsAndSaveLog(cmd, iter_times, duration, temp_file, logname, suitename, report): if (duration == 0): logger.info('run case {} times:'.format(iter_times)) logger.info('{}'.format(cmd)) loopRunCommand_byiter(cmd, iter_times, temp_file) else: logger.info('run case in {} minutes:'.format(duration)) logger.info('{}'.format(cmd)) loopRunCommand_bytime(cmd, duration, temp_file) #result, reason = logAnalyze.checkResultBycase(temp_file, suitename.split(':')[0].strip()) result, reason = logAnalyze.checkResult(temp_file, suitename) caseReport = CaseAnalyze(suiteName=suitename, caseName=logname.split('-')[0], command=cmd, result=result, rltDetails=reason, logfile=logname) report.addCaseresult(caseReport) del caseReport writelog(temp_file, 'logs/%s.txt' % logname, cmd, suitename) os.remove(temp_file)
def extractAndcompile(file_zip, path, pkgType): tempTGZFile = extractZipFile(file_zip, path) extractTarfile(tempTGZFile, path) newsdkFolder = tempTGZFile[0:-4] if pkgType == 'binary' or pkgType == 'stress': compilecuPHY_binary(newsdkFolder) else: compilecuPHY_Src(newsdkFolder) tvFolder = os.path.join(os.path.join(os.getcwd(), newsdkFolder), 'testVectors') logger.info("JJJJJ={}".format(tvFolder)) os.system('cp ../private_TV/* %s' % tvFolder) return newsdkFolder
def compilecuPHY_Src(cuda_ran_sdk): logger.info('start compile cuphy src ...\n') currpath = os.getcwd() os.system("rm -rf %s/build" % cuda_ran_sdk) os.system("mkdir %s/build" % cuda_ran_sdk) newPath = os.path.join(currpath, '%s/build' % cuda_ran_sdk) os.chdir(newPath) os.system('cmake ..') os.system('make -j 44') os.chdir(currpath) logger.info('\ncompile cuda ran sdk done\n')
def compilecuPHY_binary(cuda_ran_sdk): logger.info('start compile cuda ran sdk ...\n') currpath = os.getcwd() os.system("rm -rf %s/cuPHY/build" % cuda_ran_sdk) os.system("mkdir %s/cuPHY/build" % cuda_ran_sdk) libPath = os.path.join(currpath, '%s/cuPHY/lib:$LD_LIBRARY_PATH' % cuda_ran_sdk) newPath = os.path.join(currpath, '%s/cuPHY/build' % cuda_ran_sdk) #os.system('export PATH=/usr/local/cuda/bin:$PATH') #os.system('export LD_LIBRARY_PATH=%s:$LD_LIBRARY_PATH' % libPath) #sys.path.append('/usr/local/cuda/bin') os.chdir(newPath) os.system('cmake ..') os.system('make -j 44') os.chdir(currpath) logger.info('compile cuda ran sdk done\n')
def generateReport(self): #for i, report in enumerate(caseRltList): # print(report) #print("report len={}".format(len(self.caseRltList))) reportfile = 'report-%s.xlsx' % datetime.datetime.now().strftime( '%Y-%m-%d-%H:%M:%S') suiteNameList = [] caseNameList = [] commandList = [] criteriaList = [] resultList = [] rltDetailsList = [] logfileList = [] for i, report in enumerate(self.caseRltList): suiteNameList.append(report._suiteName) caseNameList.append(report._caseName) commandList.append(report._command) criteriaList.append(report._criteria) resultList.append(report._result) rltDetailsList.append(report._rltDetails) logfileList.append(report._logfile) reportTable = { 'suiteName': suiteNameList, 'case': caseNameList, 'command': commandList, 'result': resultList, 'detail': rltDetailsList, 'log file': logfileList } if os.path.exists(reportfile): os.remove(reportfile) df = DataFrame(reportTable) df.to_excel(reportfile) logger.info('generate testing report : {}'.format( os.path.join(os.getcwd(), reportfile))) self.cleanData()
def runCases(caseCmds, curan_path, arguementList, report): logger.info('casecmd={}\n'.format(caseCmds)) casename = caseCmds[0].strip().replace('\n', '').strip().split(':')[-1].strip() suitename = caseCmds[0][5:].split(':')[0].strip() logname = '-'.join([casename, getcurrDate()]) tempfile = 'logs/log.txt' if checkFileStatus(tempfile): os.remove(tempfile) for cmd in caseCmds: if '#' in cmd or cmd.strip() == '': continue elif suitename == 'cuPHY_PUSCH_LDPC_support_multiple_code_rates_including_HARQ_rate': runParticularCase_cuPHY_PUSCH_LDPC_support_multiple_code_rates_including_HARQ_rate( cmd, suitename, curan_path, tempfile, logname, report, arguementList.duration, arguementList.iter) else: cmd = convertCmdToAbspath(cmd, curan_path, arguementList.pkg) runCommandsAndSaveLog(cmd, arguementList.iter, arguementList.duration, tempfile, logname, suitename, report)
def analyze_PDCCH_Tx_Pipeline(logContent): mismatchErrorCount = logMatch.getmisMatch_PDCCH_Tx_Pipeline(logContent) logger.info('-------------------- LOG Analyze Result --------------------') logger.info('mismatchErrorCount={}'.format(mismatchErrorCount)) errMisMatch = filter(lambda x: x > 0, mismatchErrorCount) result, reason = 'PASS', '' if len(mismatchErrorCount) == 0: result, reason = 'FAILED', 'the data is 0' if len(errMisMatch) > 0: result, reason = 'FAILED', 'mismatch count > 0' if result == 'PASS': logger.info( '--------------------------------------------------------\033[32m{} \033[0m{}\n' .format(result, reason)) else: logger.info( '--------------------------------------------------------\033[31m{} \033[0m{}\n' .format(result, reason)) return result, reason
def analyze_cuPHY_PUCCH_Format_1_complete(logContent): mismatchErrorCount = logMatch.getmisMatch_cuPHY_PUCCH_Format_1_complete( logContent) elapsedtimelist = logMatch.getelapseTime_cuPHY_PUCCH_Format_1_complete( logContent) errMisMatch = filter(lambda x: x > 0, mismatchErrorCount) avg_time, stdevValue_elapsedTime, stdev_elapsedTime = calcStandardEv( elapsedtimelist) result, reason = logMatch.check_cuPHY_PUCCH_Format_1_complete( errMisMatch, stdev_elapsedTime) logger.info('-------------------- LOG Analyze Result --------------------') logger.info('mismatchErrorCount={}'.format(mismatchErrorCount)) logger.info('elapsedtimelist={}'.format(elapsedtimelist)) logger.info('elapsedtime={}, avg time={:.2f}, stdev time={:.2f}'.format( elapsedtimelist, avg_time, stdev_elapsedTime)) if len(elapsedtimelist) == 0: result, reason = 'FAILED', 'the data is 0' if result == 'PASS': logger.info( '--------------------------------------------------------\033[32m{} \033[0m{}\n' .format(result, reason)) else: logger.info( '--------------------------------------------------------\033[31m{} \033[0m{}\n' .format(result, reason)) return result, reason
def checkResultBycase(logContent, suitename): tputlist = logMatch.getTputList(suitename, logContent) elapsedtimelist = logMatch.getElapsedTimeList(suitename, logContent) errorBitList = logMatch.getErrorBitList(suitename, logContent) if len(tputlist) == 0 or len(elapsedtimelist) == 0: logger.info( '-------------------- LOG Analyze Result --------------------') logger.info( '------------------------------------------------------FAILED') return 'FAILED', '' avg_tput, stdevValue_tput, stdev_tput = calcStandardEv(tputlist) logger.debug('tputlist={}'.format(tputlist)) logger.debug("avg tput={}, stdev_tput={}".format(avg_tput, stdev_tput)) avg_time, stdevValue_elapsedTime, stdev_elapsedTime = calcStandardEv( elapsedtimelist) logger.debug('elapsedtimelist={}'.format(elapsedtimelist)) logger.debug('avg time={}, stdev time={}'.format(avg_time, stdev_elapsedTime)) err = filter(lambda x: x > 0, errorBitList) result = '' reason = '\033[0m' if stdev_tput < 5 and stdev_elapsedTime < 5 and len(err) <= 0: result, reason = 'PASS', '' elif stdev_tput > 5: result, reason = 'FAILED', 'tput avg=%s stdev=%s, %s > 5' % ( avg_tput, stdevValue_tput, stdev_tput) elif stdev_elapsedTime > 5: result, reason = 'FAILED', 'elapsedtime avg=%s stdev=%s, %s > 5' % ( avg_time, stdevValue_elapsedTime, stdev_elapsedTime) else: result, reason = 'FAILED', 'bit error>0' #print('\n') logger.info('-------------------- LOG Analyze Result --------------------') logger.info('tput={}, avg tput={:.2f}, stdev tput={:.2f}'.format( tputlist, avg_tput, stdev_tput)) logger.info('elapsedtime={}, avg time={:.2f}, stdev time={:.2f}'.format( elapsedtimelist, avg_time, stdev_elapsedTime)) logger.info('bit error count={}'.format(errorBitList)) if result == 'PASS': logger.info( '--------------------------------------------------------\033[32m{} \033[0m{}\n' .format(result, reason)) else: logger.info( '--------------------------------------------------------\033[31m{} \033[0m{}\n' .format(result, reason)) #logger.debug('\033[0m') #print('------------------------------------------------------------') return result, reason
def analyze_bySuite(suitename, logContent): tputlist = logMatch.getTputList(suitename, logContent) elapsedtimelist = logMatch.getElapsedTimeList(suitename, logContent) errorBitList = logMatch.getErrorBitList(suitename, logContent) avg_tput, stdevValue_tput, stdev_tput = calcStandardEv(tputlist) avg_time, stdevValue_elapsedTime, stdev_elapsedTime = calcStandardEv( elapsedtimelist) errorBitcount = filter(lambda x: x > 0, errorBitList) result, reason = logMatch.check_AnalyzeResult_bySuite( stdev_tput, stdev_elapsedTime, errorBitcount) if len(tputlist) == 0 or len(elapsedtimelist) == 0 or len( errorBitList) == 0: result, reason = 'FAILED', 'the data is 0' logger.info('-------------------- LOG Analyze Result --------------------') logger.info('tput={}, avg tput={:.2f}, stdev tput={:.2f}'.format( tputlist, avg_tput, stdev_tput)) logger.info('elapsedtime={}, avg time={:.2f}, stdev time={:.2f}'.format( elapsedtimelist, avg_time, stdev_elapsedTime)) logger.info('bit error count={}'.format(errorBitList)) if result == 'PASS': logger.info( '--------------------------------------------------------\033[32m{} \033[0m{}\n' .format(result, reason)) else: logger.info( '--------------------------------------------------------\033[31m{} \033[0m{}\n' .format(result, reason)) return result, reason
def analyze_cuPHY_PDSCH_pipeline_integration(logContent): crcErrorCount = logMatch.getCRCErrorCount_PDSCH_pipeline(logContent) ldpcErrorCount = logMatch.getLDPCErrorCount_PDSCH_pipeline(logContent) rateMatchErrorCount = logMatch.getRateMatchErrorCount_PDSCH_pipeline( logContent) mismatchErrorCount = logMatch.getMismatchCount_PDSCH_pipeline(logContent) elapsedtimelist = logMatch.getelapseTime_PDSCH_pipeline(logContent) errCrc = filter(lambda x: x > 0, crcErrorCount) errLDPC = filter(lambda x: x > 0, ldpcErrorCount) errRateMatch = filter(lambda x: x > 0, rateMatchErrorCount) errMisMatch = filter(lambda x: x > 0, mismatchErrorCount) avg_time, stdevValue_elapsedTime, stdev_elapsedTime = calcStandardEv( elapsedtimelist) result, reason = logMatch.check_PDSCH_pipe_AnalyzeResult( errCrc, errLDPC, errRateMatch, errMisMatch, stdev_elapsedTime) if len(elapsedtimelist) == 0: result, reason = 'FAILED', 'the data is 0' logger.info('-------------------- LOG Analyze Result --------------------') logger.info('crcErrorCount={}'.format(crcErrorCount)) logger.info('ldpcErrorCount={}'.format(ldpcErrorCount)) logger.info('rateMatchErrorCount={}'.format(rateMatchErrorCount)) logger.info('mismatchErrorCount={}'.format(mismatchErrorCount)) logger.info('elapsedtime={}, avg time={:.2f}, stdev time={:.2f}'.format( elapsedtimelist, avg_time, stdev_elapsedTime)) if result == 'PASS': logger.info( '--------------------------------------------------------\033[32m{} \033[0m{}\n' .format(result, reason)) else: logger.info( '--------------------------------------------------------\033[31m{} \033[0m{}\n' .format(result, reason)) return result, reason
cuRanSdkexistFolder = checkcuRanSdkFolder(args, pkgFolder) if cuRanSdkexistFolder != '': logger.debug("cuRanSdkexistFolder : {}".format(cuRanSdkexistFolder)) else: logger.debug("the cuRan sdk folder don't exist, will create it") cuda_ran_sdk = args.curan[ 0] if args.curan != None else preparation.doPrepare( cuRanSdkexistFolder, args, pkgFolder) return cuda_ran_sdk if __name__ == '__main__': args = commandLine_parse.parse_args() pkgFolder = 'pkg' testcase_Run.createFolder('logs') testcase_Run.createFolder(pkgFolder) logger.info("args:{}".format(args)) cuda_ran_sdk = getcudaransdk(args, pkgFolder) logger.info('the running cuda_ran_sdk folder : {}'.format(cuda_ran_sdk)) if args.case: testcase_Run.run_testcase(args.case[0], cuda_ran_sdk, args) elif args.all_case: print("run all cases") testcase_Run.run_allTestcase("all_case", args.curan[0], args) elif args.analyze: print("analyze the log")
def extractTarfile(file_tar, path): with tarfile.open(file_tar, 'r:gz') as tf: for tarinfo in tf: tf.extract(tarinfo.name, path) logger.info('decompress cuda ran sdk done\n')
def extractZipFile(file_zip, path): with zipfile.ZipFile(file_zip) as zf: logger.info('start decompress cuda ran sdk ...\n') zf.extractall(path) return zf.namelist()[0]