def updateResults(self, dryRun=False): startDir = os.getcwd() os.chdir('newPerf-'+self.part) print 'revg> in', os.getcwd(),' going to update results' for item in self.toDo: baseIn, xmlFileIn = os.path.split(item) base = baseIn.replace('newPerf-1of2/', '').replace('newPerf-2of2/', '') xmlFileFull = xmlFileIn.replace('_vlgd.xml', '_valgrind-1.xml') xmlFile = xmlFileIn.replace('_vlgd.xml', '_vlgd-1.xml') print xmlFileFull if not os.path.exists( os.path.join(base,xmlFileFull) ) : continue # ignore files which did not get redone # first convert the files to some manageable size cmd = 'cd '+base+';' cmd += 'xsltproc --output '+xmlFile cmd += ' ' + scriptPath+'/filterOutValgrindLeakErrors.xsl ' + xmlFileFull try: doCmd(cmd, dryRun) except Exception, e: print "revg> Error when filtering XML file " + xmlFileFull + ' in ' + base print " got ", str(e) # then copy over the files to AFS: from helpers import getStamp try: cyc, day, stamp = getStamp(self.release) except Exception, e: print "revg> ERROR when trying to get cyc,day,stamp from release for ", self.release print " got :", str(e) cyc, day, stamp = ('none', 'none', 'none')
def renameAndCopyToIgProf(dryRun, platform, relTag, os, cmd, doCmd, test, fqnTestDirName, testOutputFile): fqnTestOutputFile = os.path.join(fqnTestDirName, testOutputFile) testOutputFileParts = testOutputFile.rsplit('_') sql3filePartsSeparator = '___' candle = 'MultiJet' tiers = 'ALL-PROMPTRECO' pileup = 'RealData' global_tag = 'AUTOCOM_' + test process = 'PROMPT-RECO' counter = 'MEM_LIVE' number_of_events = '500' i = 0 for testOutputFilePart in testOutputFileParts: if testOutputFilePart.isdigit(): number_of_events = str(testOutputFilePart) elif testOutputFilePart == 'MEM': next_part = testOutputFileParts[i + 1].rsplit('.')[0] if next_part == 'TOT': next_part = 'TOTAL' counter = "MEM_" + next_part i += 1 testOutputSQL3FileName = candle + sql3filePartsSeparator + tiers + sql3filePartsSeparator + pileup + sql3filePartsSeparator + global_tag + sql3filePartsSeparator + process + sql3filePartsSeparator + counter + sql3filePartsSeparator + number_of_events + '.sql3' igProfOutputDirRoot = profDataRootDir + platform igProfOutputDir = igProfOutputDirRoot + "/" + relTag print 'igProfOutputDir=', igProfOutputDir if not os.path.exists(igProfOutputDir): os.makedirs(igProfOutputDir) cmd = 'cp ' + fqnTestOutputFile + " " + os.path.join(igProfOutputDir, testOutputSQL3FileName) doCmd.doCmd(cmd, dryRun)
def doCmd(self, cmd, forceRun=False, inDir=None): ret = 0 if forceRun: ret = doCmd.doCmd(cmd, False, inDir) else: ret = doCmd.doCmd(cmd, self.dryRun, inDir) return ret
def doCmd(self,cmd, forceRun=False, inDir=None): ret = 0 if forceRun: ret = doCmd.doCmd(cmd, False, inDir) else: ret = doCmd.doCmd(cmd, self.dryRun, inDir) return ret
def checkAndRemove(self, dirIn, dryRun=False): startDir = os.getcwd() if not os.path.isdir(dirIn): return os.chdir(dirIn) print "\n--------------------------------------------------------------------------------" print "\ncleaning out ", dirIn entries = os.listdir(".") for entry in entries: if not os.path.isdir(entry): continue # we're only interested in directories here # check if there is a dontTouch file, if so, remove only the file, but leave dir dontTouch = entry + ".dontTouch" if os.path.exists(dontTouch): cmd = "rm -f " + dontTouch try: doCmd(cmd, dryRun) except: pass continue # no dontTouch file for entry, remove dir relDir = os.listdir(entry) rel = None for d in relDir: if d[:6] == "CMSSW_": rel = d break if not rel: try: doCmd("rm -rf " + entry, dryRun) except: pass continue else: print "Going to clean out release", rel, entry cmd = "rm -rf " + entry try: doCmd(cmd, dryRun) except: pass print "\n" cmd = "/usr/bin/fs lq " + dirIn try: doCmd(cmd, False) except: pass print "\n" os.chdir(startDir) return
def rebuildPackages(self, dryRun=False): """docstring for rebuildPackages""" print "revg> rebuilding packages ... " self.startDir = os.getcwd() cmd = '' # 'cd ..;' cmd += 'eval `scram run -sh`;' cmd += 'export CVSROOT=:gserver:cmssw.cvs.cern.ch:/cvs/CMSSW;' for lib in self.libList: package = self.findPkg(lib) if not package: continue cmd += 'addpkg -z ' + package + ';' cmd += 'checkdeps -a ;' for lib in self.libList: package = self.findPkg(lib) if not package: continue cmd += '(cd ' + package + '&& scram b -v USER_CXXFLAGS="-g" -j 10 >vg-rebuild.log 2>&1 );' print 'revg> in: ', os.getcwd(), "going to execute:", cmd.replace(';','\n') ret = doCmd(cmd, dryRun) if ret != 0: print 'revg> ERROR executing cmd:"'+cmd+'"' print ' return code :', ret else: print 'revg> Packages successfully rebuilt.'
def rebuildPackages(self, dryRun=False): """docstring for rebuildPackages""" print "revg> rebuilding packages ... " self.startDir = os.getcwd() cmd = '' # 'cd ..;' cmd += 'eval `scram run -sh`;' cmd += 'export CVSROOT=:gserver:cmssw.cvs.cern.ch:/cvs/CMSSW;' for lib in self.libList: package = self.findPkg(lib) if not package: continue cmd += 'addpkg -z ' + package + ';' cmd += 'checkdeps -a ;' for lib in self.libList: package = self.findPkg(lib) if not package: continue cmd += '(cd ' + package + '&& scram b -v USER_CXXFLAGS="-g" -j 10 >vg-rebuild.log 2>&1 );' print 'revg> in: ', os.getcwd(), "going to execute:", cmd.replace( ';', '\n') ret = doCmd(cmd, dryRun) if ret != 0: print 'revg> ERROR executing cmd:"' + cmd + '"' print ' return code :', ret else: print 'revg> Packages successfully rebuilt.'
def checkAndRemove(self, dirIn, dryRun=False): startDir = os.getcwd() if not os.path.isdir(dirIn): return os.chdir(dirIn) print '\n--------------------------------------------------------------------------------' print '\ncleaning out ', dirIn entries = os.listdir(".") for entry in entries: if not os.path.isdir(entry): continue # we're only interested in directories here # check if there is a dontTouch file, if so, remove only the file, but leave dir dontTouch = entry + '.dontTouch' if os.path.exists(dontTouch): cmd = 'rm -f ' + dontTouch try: doCmd(cmd, dryRun) except: pass continue # no dontTouch file for entry, remove dir relDir = os.listdir(entry) rel = None for d in relDir: if d[:6] == 'CMSSW_': rel = d break if not rel: try: doCmd('rm -rf ' + entry, dryRun) except: pass continue else: print "Going to clean out release", rel, entry cmd = 'rm -rf ' + entry try: doCmd(cmd, dryRun) except: pass print '\n' cmd = '/usr/bin/fs lq ' + dirIn try: doCmd(cmd, False) except: pass print '\n' os.chdir(startDir) return
def doCmd(self, cmd): fullCmd = '' # 'project CMSSW;' fullCmd += cmd ret = doCmd(fullCmd) return ret
def renameAndCopyToIgProf(dryRun, platform, relTag, os, cmd, doCmd, test, fqnTestDirName, testOutputFile): fqnTestOutputFile = os.path.join(fqnTestDirName, testOutputFile) testOutputFileParts = testOutputFile.rsplit("_") sql3filePartsSeparator = "___" candle = "MultiJet" tiers = "ALL-PROMPTRECO" pileup = "RealData" global_tag = "AUTOCOM_" + test process = "PROMPT-RECO" counter = "MEM_LIVE" number_of_events = "500" i = 0 for testOutputFilePart in testOutputFileParts: if testOutputFilePart.isdigit(): number_of_events = str(testOutputFilePart) elif testOutputFilePart == "MEM": next_part = testOutputFileParts[i + 1].rsplit(".")[0] if next_part == "TOT": next_part = "TOTAL" counter = "MEM_" + next_part i += 1 testOutputSQL3FileName = ( candle + sql3filePartsSeparator + tiers + sql3filePartsSeparator + pileup + sql3filePartsSeparator + global_tag + sql3filePartsSeparator + process + sql3filePartsSeparator + counter + sql3filePartsSeparator + number_of_events + ".sql3" ) igProfOutputDirRoot = profDataRootDir + platform igProfOutputDir = igProfOutputDirRoot + "/" + relTag print "igProfOutputDir=", igProfOutputDir if not os.path.exists(igProfOutputDir): os.makedirs(igProfOutputDir) cmd = "cp " + fqnTestOutputFile + " " + os.path.join(igProfOutputDir, testOutputSQL3FileName) doCmd.doCmd(cmd, dryRun)
def createWebLog(self): if not self.doInstall: return try: cmd = 'cd ' + self.webDir + ';' cmd += 'ln -s ' + os.path.join(self.relDir, 'tmp', self.plat, 'cache', 'log', 'html') + ' new ;' doCmd(cmd, self.dryRun) except: # ignore failures ... pass try: cmd = 'cd ' + self.webDir + ';' cmd += 'ln -s ' + os.path.join(self.relDir + "/..", 'prebuild.log') + ' . ;' doCmd(cmd, self.dryRun) except: # ignore failures ... pass try: startFlag = False inFile = open( os.path.join(self.relDir, "logs", self.plat, 'release-build.log'), 'r') outFile = open(os.path.join(self.webDir, 'scramInfo.log'), 'w') for line in inFile.readlines(): if not startFlag: if line.startswith("Resetting caches"): startFlag = True else: if line.startswith(">> Local Products Rules"): break outFile.write(line) inFile.close() outFile.close() print "scramInfo log file created at", os.path.join( self.webDir, 'scramInfo.log') except: # ignore failures ... pass try: cmd = 'cd ' + self.webDir + ';' cmd += 'ln -s ' + os.path.join(self.relDir, "logs", self.plat, 'release-build.log') + ' . ;' doCmd(cmd, self.dryRun) except: # ignore failures ... pass try: cmd = 'cd ' + self.relDir + '/src ;' cmd += 'for D in `ls -d */*/data`; do echo $D " : " `ls $D/*.cf[ifg] 2>/dev/null | wc -l` ; done ' cmd += '> ' + os.path.join(self.webDir, 'cfgInfo.log') + ' 2>/dev/null;' doCmd(cmd, self.dryRun) except: # ignore failures ... pass return
def cleanupOld(igProfOutputDirRoot, dryRun): igProfOutputDirRoot = igProfOutputDirRoot.strip() if igProfOutputDirRoot != '' and igProfOutputDirRoot != '/' and not igProfOutputDirRoot.startswith('//'): #to prevent deleting /* folder dirs = igProfOutputDirRoot+'/*' dirsToCheck = [ igProfOutputDirRoot+'/*' ] print 'dirsToCheck=',dirsToCheck delDirOlder = time.time() - (60 * 60 * 24 * daysToKeepIgProfData) print 'Cleanup> deleting data older than ',daysToKeepIgProfData,' days from ', igProfOutputDirRoot for xdir in dirsToCheck: for sdir in glob.glob(xdir): if os.path.getctime(sdir)<=delDirOlder: try: doCmd.doCmd('rm -rf '+sdir, dryRun) except: pass else: print "ignoring " + igProfOutputDirRoot + " folder as it may delete important files" return
def cleanupOld(igProfOutputDirRoot, dryRun): igProfOutputDirRoot = igProfOutputDirRoot.strip() if ( igProfOutputDirRoot != "" and igProfOutputDirRoot != "/" and not igProfOutputDirRoot.startswith("//") ): # to prevent deleting /* folder dirs = igProfOutputDirRoot + "/*" dirsToCheck = [igProfOutputDirRoot + "/*"] print "dirsToCheck=", dirsToCheck delDirOlder = time.time() - (60 * 60 * 24 * daysToKeepIgProfData) print "Cleanup> deleting data older than ", daysToKeepIgProfData, " days from ", igProfOutputDirRoot for xdir in dirsToCheck: for sdir in glob.glob(xdir): if os.path.getctime(sdir) <= delDirOlder: try: doCmd.doCmd("rm -rf " + sdir, dryRun) except: pass else: print "ignoring " + igProfOutputDirRoot + " folder as it may delete important files" return
def createWebLog(self): if not self.doInstall: return try: cmd = 'cd '+self.webDir + ';' cmd += 'ln -s '+ os.path.join(self.relDir, 'tmp', self.plat,'cache','log','html') + ' new ;' doCmd(cmd, self.dryRun) except: # ignore failures ... pass try: cmd = 'cd '+self.webDir + ';' cmd += 'ln -s '+ os.path.join(self.relDir+"/..", 'prebuild.log') + ' . ;' doCmd(cmd, self.dryRun) except: # ignore failures ... pass try: startFlag = False inFile = open( os.path.join(self.relDir,"logs",self.plat, 'release-build.log'), 'r') outFile = open ( os.path.join(self.webDir, 'scramInfo.log'), 'w') for line in inFile.readlines(): if not startFlag: if line.startswith("Resetting caches"): startFlag = True else: if line.startswith(">> Local Products Rules"): break outFile.write(line) inFile.close() outFile.close() print "scramInfo log file created at", os.path.join(self.webDir, 'scramInfo.log') except: # ignore failures ... pass try: cmd = 'cd '+self.webDir + ';' cmd += 'ln -s '+ os.path.join(self.relDir,"logs",self.plat, 'release-build.log') + ' . ;' doCmd(cmd, self.dryRun) except: # ignore failures ... pass try: cmd = 'cd '+self.relDir + '/src ;' cmd += 'for D in `ls -d */*/data`; do echo $D " : " `ls $D/*.cf[ifg] 2>/dev/null | wc -l` ; done ' cmd += '> '+ os.path.join(self.webDir, 'cfgInfo.log') + ' 2>/dev/null;' doCmd(cmd, self.dryRun) except: # ignore failures ... pass return
def updateResults(self, dryRun=False): startDir = os.getcwd() os.chdir('newPerf-' + self.part) print 'revg> in', os.getcwd(), ' going to update results' for item in self.toDo: baseIn, xmlFileIn = os.path.split(item) base = baseIn.replace('newPerf-1of2/', '').replace('newPerf-2of2/', '') xmlFileFull = xmlFileIn.replace('_vlgd.xml', '_valgrind-1.xml') xmlFile = xmlFileIn.replace('_vlgd.xml', '_vlgd-1.xml') print xmlFileFull if not os.path.exists(os.path.join(base, xmlFileFull)): continue # ignore files which did not get redone # first convert the files to some manageable size cmd = 'cd ' + base + ';' cmd += 'xsltproc --output ' + xmlFile cmd += ' ' + scriptPath + '/filterOutValgrindLeakErrors.xsl ' + xmlFileFull try: doCmd(cmd, dryRun) except Exception, e: print "revg> Error when filtering XML file " + xmlFileFull + ' in ' + base print " got ", str(e) # then copy over the files to AFS: from helpers import getStamp try: cyc, day, stamp = getStamp(self.release) except Exception, e: print "revg> ERROR when trying to get cyc,day,stamp from release for ", self.release print " got :", str(e) cyc, day, stamp = ('none', 'none', 'none')
def prepare(dirIn=None, platIn=None, relIn=None): swDir = dirIn if not swDir: swDir = os.getcwd() plat = platIn if not platIn: plat = os.environ['SCRAM_ARCH'] if not os.path.exists(plat): os.makedirs(plat) lnx32 = "" if ((plat[:9] == 'slc4_ia32') and (os.uname()[-1] == 'x86_64')): lnx32 = "linux32 " # following the wiki page at: https://twiki.cern.ch/twiki/bin/view/CMS/CMSSW_bootstrap cmd = "export VO_CMS_SW_DIR=" + swDir + " " cmd += "; wget -O $VO_CMS_SW_DIR/bootstrap.sh http://cmsrep.cern.ch/cmssw/bootstrap-" + plat + ".sh " cmd += "; chmod +x $VO_CMS_SW_DIR/bootstrap.sh" cmd += '; ' + lnx32 + ' $VO_CMS_SW_DIR/bootstrap.sh setup -repository cms.eulisse -path $VO_CMS_SW_DIR ' doCmd(cmd) shellUsed = os.environ['SHELL'] shell = '-sh' if shellUsed in ['csh', 'tcsh']: shell = '-csh' aptDir = os.path.join(swDir, plat, 'external', 'apt') aptVers = os.listdir(aptDir)[0] # for the time being ... cmd = 'sed -i -e "s/cms.eulisse/cms/g" ' + os.path.join( aptDir, aptVers, 'etc', 'sources.list') doCmd(cmd) print "found apt version:", aptVers if not relIn: print 'now please do:' print ' source ' + os.path.join(aptDir, aptVers, 'etc', 'profile.d', 'init.sh') print ' apt-get update' print ' apt-get install cms+cmssw+<release>' else: cmd = ' source ' + os.path.join(aptDir, aptVers, 'etc', 'profile.d', 'init.sh') cmd += '; apt-get update' cmd += '; apt-get install cms+cmssw+' + relIn doCmd(cmd) return
def prepare(dirIn=None, platIn=None, relIn=None): swDir = dirIn if not swDir: swDir = os.getcwd() plat = platIn if not platIn: plat = os.environ["SCRAM_ARCH"] if not os.path.exists(plat): os.makedirs(plat) lnx32 = "" if (plat[:9] == "slc4_ia32") and (os.uname()[-1] == "x86_64"): lnx32 = "linux32 " # following the wiki page at: https://twiki.cern.ch/twiki/bin/view/CMS/CMSSW_bootstrap cmd = "export VO_CMS_SW_DIR=" + swDir + " " cmd += "; wget -O $VO_CMS_SW_DIR/bootstrap.sh http://cmsrep.cern.ch/cmssw/bootstrap-" + plat + ".sh " cmd += "; chmod +x $VO_CMS_SW_DIR/bootstrap.sh" cmd += "; " + lnx32 + " $VO_CMS_SW_DIR/bootstrap.sh setup -repository cms.eulisse -path $VO_CMS_SW_DIR " doCmd(cmd) shellUsed = os.environ["SHELL"] shell = "-sh" if shellUsed in ["csh", "tcsh"]: shell = "-csh" aptDir = os.path.join(swDir, plat, "external", "apt") aptVers = os.listdir(aptDir)[0] # for the time being ... cmd = 'sed -i -e "s/cms.eulisse/cms/g" ' + os.path.join(aptDir, aptVers, "etc", "sources.list") doCmd(cmd) print "found apt version:", aptVers if not relIn: print "now please do:" print " source " + os.path.join(aptDir, aptVers, "etc", "profile.d", "init.sh") print " apt-get update" print " apt-get install cms+cmssw+<release>" else: cmd = " source " + os.path.join(aptDir, aptVers, "etc", "profile.d", "init.sh") cmd += "; apt-get update" cmd += "; apt-get install cms+cmssw+" + relIn doCmd(cmd) return
class WebLogger(BuilderBase): def __init__(self, dayIn=None, relCycIn=None, dryRunIn=False, doInstall=True): BuilderBase.__init__(self) self.day = dayIn if not self.day: self.day = time.strftime("%a").lower() self.dryRun = dryRunIn self.topDir = os.path.join(self.installDir, self.plat) self.relCycle = relCycIn self.doInstall = doInstall self.cand = None config.setDefaults(relCycIn) return # -------------------------------------------------------------------------------- def makeWebLog(self, rel, candIn, tcTag=None): self.prepare(rel, candIn, tcTag) self.createWebLog() return # -------------------------------------------------------------------------------- def prepare(self, rel, candIn, tcTag=None): if tcTag == None: tcTag = rel self.cand = candIn self.release = rel self.relDir = os.path.join(self.topDir, self.day, candIn, rel) self.webDir = os.path.join(self.topDir, 'www', self.day, candIn, rel) self.tcTag = tcTag if not self.doInstall: return if not os.path.exists(self.webDir) and not self.dryRun: print "Preparing web log for rel ", rel, 'candIn', candIn, 'tctag', tcTag print ' in ', self.topDir, 'day', self.day os.makedirs(self.webDir) return # -------------------------------------------------------------------------------- def createWebLog(self): if not self.doInstall: return try: cmd = 'cd ' + self.webDir + ';' cmd += 'ln -s ' + os.path.join(self.relDir, 'tmp', self.plat, 'cache', 'log', 'html') + ' new ;' doCmd(cmd, self.dryRun) except: # ignore failures ... pass try: cmd = 'cd ' + self.webDir + ';' cmd += 'ln -s ' + os.path.join(self.relDir + "/..", 'prebuild.log') + ' . ;' doCmd(cmd, self.dryRun) except: # ignore failures ... pass try: startFlag = False inFile = open( os.path.join(self.relDir, "logs", self.plat, 'release-build.log'), 'r') outFile = open(os.path.join(self.webDir, 'scramInfo.log'), 'w') for line in inFile.readlines(): if not startFlag: if line.startswith("Resetting caches"): startFlag = True else: if line.startswith(">> Local Products Rules"): break outFile.write(line) inFile.close() outFile.close() print "scramInfo log file created at", os.path.join( self.webDir, 'scramInfo.log') except: # ignore failures ... pass try: cmd = 'cd ' + self.webDir + ';' cmd += 'ln -s ' + os.path.join(self.relDir, "logs", self.plat, 'release-build.log') + ' . ;' doCmd(cmd, self.dryRun) except: # ignore failures ... pass try: cmd = 'cd ' + self.relDir + '/src ;' cmd += 'for D in `ls -d */*/data`; do echo $D " : " `ls $D/*.cf[ifg] 2>/dev/null | wc -l` ; done ' cmd += '> ' + os.path.join(self.webDir, 'cfgInfo.log') + ' 2>/dev/null;' doCmd(cmd, self.dryRun) except: # ignore failures ... pass return # -------------------------------------------------------------------------------- def sendMailAlerts(self, scriptDir): # get config to see if we should send mails: sendMails = False try: sendMails = config.Configuration[self.relCycle]['sendDevMail'] except Exception, e: print "ERROR when trying to see if we should send mails, nothing send." print str(e) return #-ap: temporarily disable mails for non-standard platforms (as we # now switched the build of those from 1.9 to 2.0 ... if self.plat != 'slc4_ia32_gcc345': print "Will not send mails on non-standard platform" return if not sendMails: print "Config requested to NOT send mails to developers." return # send mails only for "official builds" hour = self.tcTag[-4:] if (hour != '0200' and hour != '1600'): print "Ignoring non-official release", self.tcTag, hour return cmd = scriptDir + "/send_mail_alerts.pl " cmd += self.webDir + '/nightly-alerts ' print "in: ", os.getcwd(), "going to execute:" print cmd doCmd(cmd, self.dryRun) return
def makeComparison(buildNameFQNPath, buildArchitecture, no_successes, rootfileBuild, rootFileBuildRef, outputReportDir, threshold, statTest, doPngs, doCompare, doReport, sample, workflowFQNParentPath, black_list, success_percentage, stamp, rel, outputReportName, buildName, workflow, referenceBuildName, report_path, report_relative_path, dryRun): preCmd = "SCRAM_ARCH=%s; cd %s; eval `scramv1 runtime -sh`;" % ( buildArchitecture, buildNameFQNPath) cmd = "compare_using_files.py" if no_successes != 0: cmd += " --no_successes" cmd += " %s" % rootfileBuild cmd += " %s" % rootFileBuildRef cmd += ' --sample Global --metas "%s@@@%s"' % (buildName, referenceBuildName) cmd = preCmd + cmd if len(black_list) > 0: cmd += " -B" for element in black_list: cmd += (" %s" % element) if cmp(outputReportDir, "") != 0: cmd += " -o %s" % outputReportDir if cmp(threshold, "") != 0: cmd += " -t %s" % threshold if cmp(statTest, "") != 0: cmd += " -s %s" % statTest if doPngs: cmd += " -p" if doCompare: cmd += " -C" if doReport: cmd += " -R" logFileFQNPath = '%s.log' % outputReportDir cmd += ' > %s 2>&1' % logFileFQNPath fQNPathToLogFile, logFileName = os.path.split(logFileFQNPath) fQNPathToLogFile = fQNPathToLogFile.strip() print cmd comparisonState = 'NOTRUN' ## in case of not running the comparison or raised exceptions ret = -1 try: ret = doCmd.doCmd(cmd, dryRun) print "Comparison finished with exit code: %s" % str(ret) if ret == 0: relMonSummaryFQNPath = os.path.join(outputReportDir, 'RelMonSummary.html') statisticsParser = StatisticsHTMLParser( ) # create new parser object f = open(relMonSummaryFQNPath, 'r') statisticsParser.feed(f.read()) statisticsParser.close() n_successes = float(statisticsParser.n_successes) n_fails = float(statisticsParser.n_fails) n_nulls = float(statisticsParser.n_nulls) n_total = n_successes + n_fails + n_nulls if n_total != 0: fail_percentage = (n_fails + n_nulls) * 100.00 / n_total success_percentage = float(success_percentage) if fail_percentage > success_percentage: comparisonState = 'FAILED' else: comparisonState = 'PASSED' cmd = '%s dir2webdir.py %s' % (preCmd, outputReportDir) doCmd.doCmd(cmd, dryRun) except Exception, e: print "ERROR: Caught exception during making comparison report: %s" % str( e)
def main(): parser = OptionParser() usage = "usage: %prog [option] arg" parser = OptionParser(usage=usage) parser.add_option("-r", "--releaseDir", dest="releaseDir", help="release directory") parser.add_option("-p", "--platform", dest="platform", help="release architecture") parser.add_option("-d", "--dryRun", action="store_true", dest="dryRun", help="is a dry run?") (options, args) = parser.parse_args() dryRun = False platform = None releaseDir = None relTag = None if not options.dryRun == None: dryRun = options.dryRun if options.releaseDir == None: print "Release directory is not specified!" parser.print_help() return errno.EINVAL if options.platform == None: print "Release architecture is not specified!" parser.print_help() return errno.EINVAL platform = options.platform releaseDir = options.releaseDir head, relTag = os.path.split(releaseDir) if relTag == None or relTag == "": relTag = os.path.basename(head) tests = '11,15' testsArr = tests.rsplit(',') testDir = "DQMServices/Components/test" igProfOutputDirRoot = profDataRootDir + platform cleanupOld(igProfOutputDirRoot, dryRun) cmd = "cd " + releaseDir + "/src/"+testDir+"; python whiteRabbit.py -j 2 -n " + tests doCmd.doCmd(cmd, dryRun) for test in testsArr: test = test.strip() fqnDQMToolsTestsDirName = os.path.join(options.releaseDir, "src", testDir) dirs = os.listdir(fqnDQMToolsTestsDirName) for dir in dirs: fqnDir = os.path.join(fqnDQMToolsTestsDirName, dir) if os.path.isdir(fqnDir): if dir.isdigit(): fqnTestDirName = os.path.join(fqnDQMToolsTestsDirName, dir, test) testOutputFiles = os.listdir(fqnTestDirName) for testOutputFile in testOutputFiles: if testOutputFile.endswith('.sql'): fqnTestOutputFile = os.path.join(fqnTestDirName, testOutputFile) testOutputSQL3FileName = "OutPutSqliteFileInIgprofFormat.sql3" fqnTestOutputSQL3File = os.path.join(fqnTestDirName, testOutputSQL3FileName) cmd = "sqlite3 < " + fqnTestOutputFile + " " + fqnTestOutputSQL3File doCmd.doCmd(cmd, dryRun) renameAndCopyToIgProf(dryRun, platform, relTag, os, cmd, doCmd, test, fqnTestDirName, testOutputSQL3FileName) elif testOutputFile.endswith('.sql3'): renameAndCopyToIgProf(dryRun, platform, relTag, os, cmd, doCmd, test, fqnTestDirName, testOutputFile) break
def main(): parser = OptionParser() usage = "usage: %prog [option1] arg1" parser = OptionParser(usage=usage) parser.add_option("-r", "--releaseDir", dest="releaseDir", help="release directory") parser.add_option("-d", "--dryRun", action="store_true", dest="dryRun", help="is a dry run?") parser.add_option("-p", "--platform", dest="platform", help="used architecture") (options, args) = parser.parse_args() dryRun = False if not options.dryRun == None: dryRun = options.dryRun if options.releaseDir == None: print "The Release directory is not specified" return errno.EINVAL buildNameFQNPath = options.releaseDir (buildDir, buildName) = os.path.split(options.releaseDir) if buildName == '': #if releaseDir is ending with a separator (buildDir, buildName) = os.path.split(buildDir) if options.platform == None: buildArchitecture = getBuildArchitecture(buildName, buildDir) else: buildArchitecture = options.platform preparePythonPath(buildDir, buildName, buildArchitecture) buildReferenceBuildArchitectureFQNPath = os.path.join( reference_build_path, buildArchitecture) if not os.path.exists(buildReferenceBuildArchitectureFQNPath): print "Creating architecture folderfolder ", buildReferenceBuildArchitectureFQNPath os.makedirs(buildReferenceBuildArchitectureFQNPath) else: print "Using existing architecture folder ", buildReferenceBuildArchitectureFQNPath buildReferenceBuildFQNPath = os.path.join( buildReferenceBuildArchitectureFQNPath, buildName) if not os.path.exists(buildReferenceBuildFQNPath): print "Creating build folder ", buildReferenceBuildFQNPath os.makedirs(buildReferenceBuildFQNPath) else: print "Warning! Using existing build folder ", buildReferenceBuildFQNPath, ". The folder should not exist normally!" workflowFQNParentPath = os.path.join(buildNameFQNPath, workflowDataRelativePath) if not os.path.exists(workflowFQNParentPath): print 'The path with workflows is not found: ' + str(buildNameFQNPath) workflowFQNParentPath = os.path.join(buildNameFQNPath, 'pyRelval') print 'looking for the worklfows in ' + workflowFQNParentPath if not os.path.exists(workflowFQNParentPath): print 'The workflow location cannot be found, terminating ...' return errno.ENOENT dirs = os.listdir(workflowFQNParentPath) for workflowDir in dirs: if 'HARVEST' in workflowDir: workflowDirFQN = os.path.join(workflowFQNParentPath, workflowDir) dqmHarvestedRootFile = getDQMHarvestedRootFile(workflowDirFQN) if dqmHarvestedRootFile is not None: dqmHarvestedRootFileFQN = os.path.join(workflowDirFQN, dqmHarvestedRootFile) buildReferenceWorkflowFQNPath = os.path.join( buildReferenceBuildFQNPath, workflowDir) if not os.path.exists(buildReferenceWorkflowFQNPath): print "Creating build folder ", buildReferenceWorkflowFQNPath os.makedirs(buildReferenceWorkflowFQNPath) else: print "Warning! Using existing build folder ", buildReferenceWorkflowFQNPath, ". The folder should not exist normally!" buildReferenceWorkflowDQMRootFQNFileName = os.path.join( buildReferenceWorkflowFQNPath, dqmHarvestedRootFile) doCmd.doCmd( 'cp ' + dqmHarvestedRootFileFQN + " " + buildReferenceWorkflowDQMRootFQNFileName, dryRun) else: print workflowDir + ' folder doesn\'t contain the DQM harvested root file.' print "Done" return
def main(): parser = OptionParser() usage = "usage: %prog [option1] arg1" parser = OptionParser(usage=usage) parser.add_option("-r", "--releaseDir", dest="releaseDir", help="release directory") parser.add_option("-d", "--dryRun", action="store_true", dest="dryRun", help="is a dry run?") parser.add_option("-p", "--platform", dest="platform", help="used architecture") (options, args) = parser.parse_args() dryRun = False if not options.dryRun == None: dryRun = options.dryRun if options.releaseDir == None: print "The Release directory is not specified" return errno.EINVAL buildNameFQNPath = options.releaseDir (buildDir, buildName) = os.path.split(options.releaseDir) if buildName == '': #if releaseDir is ending with a separator (buildDir, buildName) = os.path.split(buildDir) if options.platform == None: buildArchitecture = getBuildArchitecture(buildName, buildDir) else: buildArchitecture = options.platform preparePythonPath(buildDir, buildName, buildArchitecture) buildReferenceBuildArchitectureFQNPath = os.path.join(reference_build_path, buildArchitecture) if not os.path.exists(buildReferenceBuildArchitectureFQNPath): print "Creating architecture folderfolder ", buildReferenceBuildArchitectureFQNPath os.makedirs(buildReferenceBuildArchitectureFQNPath) else: print "Using existing architecture folder ", buildReferenceBuildArchitectureFQNPath buildReferenceBuildFQNPath = os.path.join(buildReferenceBuildArchitectureFQNPath, buildName) if not os.path.exists(buildReferenceBuildFQNPath): print "Creating build folder ", buildReferenceBuildFQNPath os.makedirs(buildReferenceBuildFQNPath) else: print "Warning! Using existing build folder ", buildReferenceBuildFQNPath , ". The folder should not exist normally!" workflowFQNParentPath = os.path.join(buildNameFQNPath, workflowDataRelativePath) if not os.path.exists(workflowFQNParentPath): print 'The path with workflows is not found: ' + str(buildNameFQNPath) workflowFQNParentPath = os.path.join(buildNameFQNPath, 'pyRelval') print 'looking for the worklfows in ' + workflowFQNParentPath if not os.path.exists(workflowFQNParentPath): print 'The workflow location cannot be found, terminating ...' return errno.ENOENT dirs = os.listdir(workflowFQNParentPath) for workflowDir in dirs: if 'HARVEST' in workflowDir: workflowDirFQN = os.path.join(workflowFQNParentPath, workflowDir) dqmHarvestedRootFile = getDQMHarvestedRootFile(workflowDirFQN) if dqmHarvestedRootFile is not None: dqmHarvestedRootFileFQN = os.path.join(workflowDirFQN, dqmHarvestedRootFile) buildReferenceWorkflowFQNPath = os.path.join(buildReferenceBuildFQNPath, workflowDir) if not os.path.exists(buildReferenceWorkflowFQNPath): print "Creating build folder ", buildReferenceWorkflowFQNPath os.makedirs(buildReferenceWorkflowFQNPath) else: print "Warning! Using existing build folder ", buildReferenceWorkflowFQNPath , ". The folder should not exist normally!" buildReferenceWorkflowDQMRootFQNFileName = os.path.join(buildReferenceWorkflowFQNPath, dqmHarvestedRootFile) doCmd.doCmd('cp ' + dqmHarvestedRootFileFQN + " " + buildReferenceWorkflowDQMRootFQNFileName, dryRun) else: print workflowDir +' folder doesn\'t contain the DQM harvested root file.' print "Done" return
def main(): parser = OptionParser() usage = "usage: %prog [option] arg" parser = OptionParser(usage=usage) parser.add_option("-r", "--releaseDir", dest="releaseDir", help="release directory") parser.add_option("-p", "--platform", dest="platform", help="release architecture") parser.add_option("-d", "--dryRun", action="store_true", dest="dryRun", help="is a dry run?") (options, args) = parser.parse_args() dryRun = False platform = None releaseDir = None relTag = None if not options.dryRun == None: dryRun = options.dryRun if options.releaseDir == None: print "Release directory is not specified!" parser.print_help() return errno.EINVAL if options.platform == None: print "Release architecture is not specified!" parser.print_help() return errno.EINVAL platform = options.platform releaseDir = options.releaseDir head, relTag = os.path.split(releaseDir) if relTag == None or relTag == "": relTag = os.path.basename(head) tests = "11,15" testsArr = tests.rsplit(",") testDir = "DQMServices/Components/test" igProfOutputDirRoot = profDataRootDir + platform cleanupOld(igProfOutputDirRoot, dryRun) cmd = "cd " + releaseDir + "/src/" + testDir + "; python whiteRabbit.py -j 2 -n " + tests doCmd.doCmd(cmd, dryRun) for test in testsArr: test = test.strip() fqnDQMToolsTestsDirName = os.path.join(options.releaseDir, "src", testDir) dirs = os.listdir(fqnDQMToolsTestsDirName) for dir in dirs: fqnDir = os.path.join(fqnDQMToolsTestsDirName, dir) if os.path.isdir(fqnDir): if dir.isdigit(): fqnTestDirName = os.path.join(fqnDQMToolsTestsDirName, dir, test) testOutputFiles = os.listdir(fqnTestDirName) for testOutputFile in testOutputFiles: if testOutputFile.endswith(".sql"): fqnTestOutputFile = os.path.join(fqnTestDirName, testOutputFile) testOutputSQL3FileName = "OutPutSqliteFileInIgprofFormat.sql3" fqnTestOutputSQL3File = os.path.join(fqnTestDirName, testOutputSQL3FileName) cmd = "sqlite3 < " + fqnTestOutputFile + " " + fqnTestOutputSQL3File doCmd.doCmd(cmd, dryRun) renameAndCopyToIgProf( dryRun, platform, relTag, os, cmd, doCmd, test, fqnTestDirName, testOutputSQL3FileName ) elif testOutputFile.endswith(".sql3"): renameAndCopyToIgProf( dryRun, platform, relTag, os, cmd, doCmd, test, fqnTestDirName, testOutputFile ) break
def __del__(self): if 'logger' in self.__dict__: if self.sendMail: cmd = 'cat ' + self.logger.logPath + ' | mail -s "IB requests state check has failed one or more requests" ' + EMAIL_ADDR doCmd.doCmd(cmd, dryRun=self.dryRun) self.logger.removeLogFile()
cyc, day, stamp = getStamp(self.release) except Exception, e: print "revg> ERROR when trying to get cyc,day,stamp from release for ", self.release print " got :", str(e) cyc, day, stamp = ('none', 'none', 'none') topDir = os.path.join('/afs/cern.ch/cms/sw/ReleaseCandidates/', os.environ['SCRAM_ARCH'], day, cyc + '-' + stamp, self.release, 'qaLogs') # GEN-DIGI2RAW/cpu5/TTbar_Memcheck/TTBAR__GEN-SIM-DIGI-L1-DIGI2RAW_memcheck_vlgd.xml ' cmd = 'cp ' + os.path.join(base, xmlFile) + ' ' + os.path.join( topDir, base.replace('newPerf-1of2/', '').replace('newPerf-2of2/', ''), xmlFile) try: doCmd(cmd, dryRun) except Exception, e: print "revg> Error when copying file to AFS " + os.path.join( base, xmlFile) print " cmd '" + cmd + "'" print " returned ", str(e) # ... and to the web (maybe this can be taken from AFS later): topDir = os.path.join('/data/intBld/incoming/', 'newPerf-' + self.part, os.environ['SCRAM_ARCH'], self.release, 'newPerf-' + self.part) #cmd = 'scp '+os.path.join(base, xmlFile) + ' vocms06:'+os.path.join(topDir, base.replace('newPerf-1of2/', '').replace('newPerf-2of2/', ''), xmlFile) #try: # doCmd(cmd, dryRun) #except Exception, e:
if report_path: pathToWWW = report_path #i.e. '/afs/cern.ch/cms/sdt/internal/requests/customIB/slc5_amd64_gcc470/yana/1520' else: pathToWWW = str( os.path.join(buildReportsAFSBaseFQNPath, buildArchitecture, 'www', stamp[:3], rel + '-' + stamp)) pathToDQMComp = str( os.path.join(dqmReportsAFSBaseFQNPath, buildReportRelativePath)) report = "" if ret == 0: report = os.path.join(buildReportRelativePath, reportWWWDir, workflow) reportFQNPath = os.path.join(pathToDQMComp, reportWWWDir) try: doCmd.doCmd( 'ssh %s "mkdir -p %s"' % (frontEndMachine, reportFQNPath), dryRun) doCmd.doCmd( 'scp -r %s %s:%s' % (outputReportDir, frontEndMachine, reportFQNPath), dryRun) except: print "Error: an Error occured while copying reports into build machine" else: report = os.path.join(buildReportRelativePath, reportWWWDir, workflow, logFileName) reportFQNPath = os.path.join(pathToDQMComp, reportWWWDir, workflow) try: doCmd.doCmd( 'ssh %s "mkdir -p %s"' % (frontEndMachine, reportFQNPath), dryRun) doCmd.doCmd(
print " got ", str(e) # then copy over the files to AFS: from helpers import getStamp try: cyc, day, stamp = getStamp(self.release) except Exception, e: print "revg> ERROR when trying to get cyc,day,stamp from release for ", self.release print " got :", str(e) cyc, day, stamp = ('none', 'none', 'none') topDir = os.path.join('/afs/cern.ch/cms/sw/ReleaseCandidates/',os.environ['SCRAM_ARCH'], day, cyc+'-'+stamp, self.release, 'qaLogs') # GEN-DIGI2RAW/cpu5/TTbar_Memcheck/TTBAR__GEN-SIM-DIGI-L1-DIGI2RAW_memcheck_vlgd.xml ' cmd = 'cp ' + os.path.join(base,xmlFile) + ' ' + os.path.join(topDir, base.replace('newPerf-1of2/', '').replace('newPerf-2of2/', ''), xmlFile) try: doCmd(cmd, dryRun) except Exception, e: print "revg> Error when copying file to AFS " + os.path.join(base, xmlFile) print " cmd '"+cmd+"'" print " returned ", str(e) # ... and to the web (maybe this can be taken from AFS later): topDir = os.path.join('/data/intBld/incoming/', 'newPerf-'+self.part, os.environ['SCRAM_ARCH'], self.release, 'newPerf-'+self.part) #cmd = 'scp '+os.path.join(base, xmlFile) + ' vocms06:'+os.path.join(topDir, base.replace('newPerf-1of2/', '').replace('newPerf-2of2/', ''), xmlFile) #try: # doCmd(cmd, dryRun) #except Exception, e: # print "revg> Error when scp-ing file ", os.path.join(base, xmlFile) # print " cmd '"+cmd+"'" # print " returned ", str(e) cmd = 'scp '+os.path.join(base, xmlFile) + ' vocms12:'+os.path.join(topDir, base.replace('newPerf-1of2/', '').replace('newPerf-2of2/', ''), xmlFile)
if report_relative_path: buildReportRelativePath = report_relative_path else: buildReportRelativePath = os.path.join(buildArchitecture, buildName) if report_path: pathToWWW = report_path #i.e. '/afs/cern.ch/cms/sdt/internal/requests/customIB/slc5_amd64_gcc470/yana/1520' else: pathToWWW = str(os.path.join(buildReportsAFSBaseFQNPath, buildArchitecture, 'www', stamp[:3], rel+'-'+stamp)) pathToDQMComp = str(os.path.join(dqmReportsAFSBaseFQNPath, buildReportRelativePath)) report = "" if ret == 0: report = os.path.join(buildReportRelativePath, reportWWWDir, workflow) reportFQNPath = os.path.join(pathToDQMComp, reportWWWDir) try: doCmd.doCmd('ssh %s "mkdir -p %s"' % (frontEndMachine, reportFQNPath) , dryRun) doCmd.doCmd('scp -r %s %s:%s' % (outputReportDir, frontEndMachine, reportFQNPath) , dryRun) except: print "Error: an Error occured while copying reports into build machine" else: report = os.path.join(buildReportRelativePath, reportWWWDir, workflow, logFileName) reportFQNPath = os.path.join(pathToDQMComp, reportWWWDir, workflow) try: doCmd.doCmd('ssh %s "mkdir -p %s"' % (frontEndMachine, reportFQNPath) , dryRun) doCmd.doCmd('scp %s %s:%s' % (logFileFQNPath, frontEndMachine, reportFQNPath), dryRun) except: print "Error: an Exception occured while copying reports into build machine" line = '%s Comparison-%s report: %s\r\n' % (sample, comparisonState, report) sumlog=os.path.join(os.path.dirname(os.path.abspath(outputReportDir)), 'runall-comparison.log') f = open(sumlog, 'a') f.write(line)
def makeComparison(buildNameFQNPath, buildArchitecture, no_successes, rootfileBuild, rootFileBuildRef, outputReportDir, threshold, statTest, doPngs, doCompare, doReport, sample, workflowFQNParentPath, black_list, success_percentage, stamp, rel, outputReportName, buildName, workflow, referenceBuildName, report_path, report_relative_path, dryRun): preCmd = "SCRAM_ARCH=%s; cd %s; eval `scramv1 runtime -sh`;" % (buildArchitecture, buildNameFQNPath) cmd = "compare_using_files.py" if no_successes != 0: cmd += " --no_successes" cmd += " %s" % rootfileBuild cmd += " %s" % rootFileBuildRef cmd += ' --sample Global --metas "%s@@@%s"' % (buildName, referenceBuildName) cmd = preCmd + cmd if len(black_list)>0: cmd += " -B" for element in black_list: cmd += (" %s" % element) if cmp(outputReportDir, "") != 0: cmd += " -o %s" % outputReportDir if cmp(threshold, "") != 0: cmd += " -t %s" % threshold if cmp(statTest, "") != 0: cmd += " -s %s" % statTest if doPngs: cmd += " -p" if doCompare: cmd += " -C" if doReport: cmd += " -R" logFileFQNPath = '%s.log' % outputReportDir cmd += ' > %s 2>&1' % logFileFQNPath fQNPathToLogFile, logFileName = os.path.split(logFileFQNPath) fQNPathToLogFile = fQNPathToLogFile.strip() print cmd comparisonState = 'NOTRUN' ## in case of not running the comparison or raised exceptions ret = -1 try: ret = doCmd.doCmd(cmd, dryRun) print "Comparison finished with exit code: %s" % str(ret) if ret == 0: relMonSummaryFQNPath = os.path.join(outputReportDir, 'RelMonSummary.html') statisticsParser = StatisticsHTMLParser() # create new parser object f = open(relMonSummaryFQNPath, 'r') statisticsParser.feed(f.read()) statisticsParser.close() n_successes = float(statisticsParser.n_successes) n_fails = float(statisticsParser.n_fails) n_nulls = float(statisticsParser.n_nulls) n_total = n_successes + n_fails + n_nulls if n_total != 0: fail_percentage = (n_fails + n_nulls) * 100.00 / n_total success_percentage = float(success_percentage) if fail_percentage > success_percentage: comparisonState = 'FAILED' else: comparisonState = 'PASSED' cmd = '%s dir2webdir.py %s' % (preCmd, outputReportDir) doCmd.doCmd(cmd, dryRun) except Exception, e: print "ERROR: Caught exception during making comparison report: %s" % str(e)