def toCSVLumiByLS(lumidata,filename,resultlines,scalefactor,isverbose): result=[] fieldnames=['Run','LS','UTCTime','Beam Status','E(GeV)','Delivered(/ub)','Recorded(/ub)'] for rline in resultlines: result.append(rline) for run in sorted(lumidata): rundata=lumidata[run] if rundata is None: result.append([run,'n/a','n/a','n/a','n/a','n/a']) continue for lsdata in rundata: lumilsnum=lsdata[0] cmslsnum=lsdata[1] ts=lsdata[2] bs=lsdata[3] begev=lsdata[4] deliveredlumi=lsdata[5] recordedlumi=lsdata[6] result.append([run,str(lumilsnum)+':'+str(cmslsnum),ts.strftime('%m/%d/%y %H:%M:%S'),bs,begev,deliveredlumi*scalefactor,recordedlumi*scalefactor]) sortedresult=sorted(result,key=lambda x : int(x[0])) assert(filename) if filename.upper()=='STDOUT': r=sys.stdout r.write(','.join(fieldnames)+'\n') for l in sortedresult: r.write(str(l)+'\n') else: r=csvReporter.csvReporter(filename) r.writeRow(fieldnames) r.writeRows(sortedresult)
def createRunList(c,p='.',o='.',dryrun=False): ''' input: c connect string p authenticaion path ''' msg=coral.MessageStream('') msg.setMsgVerbosity(coral.message_Level_Error) os.environ['CORAL_AUTH_PATH']=p svc = coral.ConnectionService() connectstr=c session=svc.connect(connectstr,accessMode=coral.access_ReadOnly) session.typeConverter().setCppTypeForSqlType("unsigned int","NUMBER(10)") session.typeConverter().setCppTypeForSqlType("unsigned long long","NUMBER(20)") session.transaction().start(True) schema=session.nominalSchema() allruns=lumiQueryAPI.allruns(schema,requireLumisummary=True,requireTrg=True,requireHlt=True) session.transaction().commit() del session del svc allruns.sort() if not dryrun: report=csvReporter.csvReporter(os.path.join(o,'runlist.txt')) for run in allruns: report.writeRow([run]) else: print allruns
def toCSVConfHlt(hltconfdata,filename,iresults=[],isverbose=False): ''' input:{runnumber,[(hltpath,l1seedexpr,l1bitname),...]} ''' result=[] for rline in iresults: result.append(rline) for run in sorted(hltconfdata): pathdata=hltconfdata[run] if pathdata is None: result.append([str(run),'n/a','n/a','n/a']) continue for thispathinfo in pathdata: thispath=thispathinfo[0] thisseed=thispathinfo[1] thisbit=thispathinfo[2] if not thisbit: thisbit='n/a' result.append([str(run),thispath,thisseed,thisbit]) fieldnames=['Run','hltpath','l1seedexpr','l1bit'] assert(filename) if filename.upper()=='STDOUT': r=sys.stdout r.write(','.join(fieldnames)+'\n') for l in result: r.write(str(l)+'\n') else: r=csvReporter.csvReporter(filename) r.writeRow(fieldnames) r.writeRows(result)
def dumpData(lumidata,filename): """ input params: lumidata [{'fieldname':value}] filename csvname """ r=csvReporter.csvReporter(filename) r.writeRows(lumidata)
def toCSVOverview(lumidata,filename,resultlines,scalefactor,isverbose): ''' input: lumidata {run:[lumilsnum,cmslsnum,timestamp,beamstatus,beamenergy,deliveredlumi,recordedlumi,calibratedlumierror,(bxidx,bxvalues,bxerrs),(bxidx,b1intensities,b2intensities)]} resultlines [[resultrow1],[resultrow2],...,] existing result row ''' result=[] fieldnames = ['Run', 'DeliveredLS', 'Delivered(/ub)','SelectedLS','Recorded(/ub)'] r=csvReporter.csvReporter(filename) for rline in resultlines: result.append(rline) for run in lumidata.keys(): lsdata=lumidata[run] if lsdata is None: result.append([run,'n/a','n/a','n/a','n/a']) continue nls=len(lsdata) deliveredData=[x[5] for x in lsdata] recordedData=[x[6] for x in lsdata if x[6] is not None] totdeliveredlumi=0.0 totrecordedlumi=0.0 if len(deliveredData)!=0: totdeliveredlumi=sum(deliveredData) if len(recordedData)!=0: totrecordedlumi=sum(recordedData) selectedcmsls=[x[1] for x in lsdata if x[1]!=0] if len(selectedcmsls)==0: selectedlsStr='n/a' else: selectedlsStr = CommonUtil.splitlistToRangeString(selectedcmsls) result.append([run,nls,totdeliveredlumi*scalefactor,selectedlsStr,totrecordedlumi*scalefactor]) sortedresult=sorted(result,key=lambda x : int(x[0])) r=None assert(filename) if filename.upper()=='STDOUT': r=sys.stdout r.write(','.join(fieldnames)+'\n') for l in sortedresult: r.write(str(l)+'\n') else: r=csvReporter.csvReporter(filename) r.writeRow(fieldnames) r.writeRows(sortedresult)
def toCSVLSHlt(hltdata,filename,iresults=None,isverbose=False): ''' input:{runnumber:[(cmslsnum,[(hltpath,hltprescale,l1pass,hltaccept),...]),(cmslsnum,[])})} ''' result=[] fieldnames=['Run','LS','hltpath,hltprescale'] if isverbose: fieldnames[-1]+=',l1pass,hltaccept' for rline in iresults: result.append(rline) for run in sorted(hltdata): lsdata=hltdata[run] if lsdata is None: result.append([run,'n/a','n/a']) continue for thislsdata in lsdata: cmslsnum=thislsdata[0] bitsdata=thislsdata[1] allbitsresult=[] for mybit in bitsdata: hltpath=mybit[0] if not hltpath: continue hltprescale=mybit[1] if hltprescale is None: hltprescale='n/a' else: hltprescale='%d'%hltprescale if isverbose: l1pass=mybit[2] if l1pass is None: l1pass='******' else: l1pass='******'%l1pass hltaccept=mybit[3] if hltaccept is None: hltaccept='n/a' else: hltaccept='%d'%hltaccept mybitStr=','.join([hltpath,hltprescale,l1pass,hltaccept]) else: mybitStr=','.join([hltpath,hltprescale]) allbitsresult.append(mybitStr) allbitsresult=';'.join(allbitsresult) result.append([run,cmslsnum,allbitsresult]) assert(filename) if filename.upper()=='STDOUT': r=sys.stdout r.write(','.join(fieldnames)+'\n') for l in result: r.write(str(l)+'\n') else: r=csvReporter.csvReporter(filename) r.writeRow(fieldnames) r.writeRows(result)
def toCSVLSBeam(beamdata,filename,resultlines,dumpIntensity=False,isverbose=False): ''' input: {run:[(lumilsnum(0),cmslsnum(1),beamstatus(2),beamenergy(3),beaminfolist(4)),..]} beaminfolist:[(bxidx,b1,b2)] ''' result=[] fieldnames=['Run','LS','beamstatus','egev'] if dumpIntensity: fieldnames.append('(bxidx,b1,b2)') for rline in resultlines: result.append(rline) for run in sorted(beamdata): perrundata=beamdata[run] if perrundata is None: ll=[str(run),'n/a','n/a'] if dumpIntensity: ll.extend('n/a') continue for lsdata in perrundata: lumilsnum=lsdata[0] cmslsnum=lsdata[1] beamstatus=lsdata[2] beamenergy=lsdata[3] if not dumpIntensity: result.append([str(run),str(lumilsnum)+':'+str(cmslsnum),beamstatus,'%.2f'%beamenergy]) continue allbxinfo=lsdata[4] #print 'allbxinfo ',allbxinfo allbxresult=[] for thisbxinfo in allbxinfo: thisbxresultStr='(n/a,n/a,n/a)' bxidx=thisbxinfo[0] b1=thisbxinfo[1] b2=thisbxinfo[2] thisbxresultStr=','.join(['%d'%bxidx,'%.3e'%b1,'%.3e'%b2]) allbxresult.append(thisbxresultStr) allbxresultStr=' '.join(allbxresult) result.append([str(run),str(lumilsnum)+':'+str(cmslsnum),beamstatus,'%.2f'%beamenergy,allbxresultStr]) assert(filename) if filename.upper()=='STDOUT': r=sys.stdout r.write(','.join(fieldnames)+'\n') for l in result: r.write(str(l)+'\n') else: r=csvReporter.csvReporter(filename) r.writeRow(fieldnames) r.writeRows(result)
def main(*args): runnum = 0 try: runnum = args[1] report = csvReporter.csvReporter('instlumibytime-' + str(runnum) + '.csv') msg = coral.MessageStream('') msg.setMsgVerbosity(coral.message_Level_Error) os.environ['CORAL_AUTH_PATH'] = '/afs/cern.ch/cms/lumi' svc = coral.ConnectionService() connectstr = 'oracle://cms_orcoff_prod/cms_lumi_prod' session = svc.connect(connectstr, accessMode=coral.access_ReadOnly) session.typeConverter().setCppTypeForSqlType("unsigned int", "NUMBER(10)") session.typeConverter().setCppTypeForSqlType("unsigned long long", "NUMBER(20)") session.transaction().start(True) schema = session.nominalSchema() q = schema.newQuery() runsummaryOut = lumiQueryAPI.runsummaryByrun(q, runnum) del q q = schema.newQuery() lumisummaryOut = lumiQueryAPI.lumisummaryByrun(q, runnum, '0001') del q session.transaction().commit() del session del svc #print runsummaryOut starttimestr = runsummaryOut[3] t = lumiTime.lumiTime() report.writeRow(['cmslsnum', 'utctime', 'unixtimestamp', 'instlumi']) for dataperls in lumisummaryOut: cmslsnum = dataperls[0] instlumi = dataperls[1] startorbit = dataperls[3] orbittime = t.OrbitToTime(starttimestr, startorbit) orbittimestamp = time.mktime( orbittime.timetuple()) + orbittime.microsecond / 1e6 report.writeRow([cmslsnum, orbittime, orbittimestamp, instlumi]) except IndexError: print 'runnumber should be provided' return 1 except Exception as er: print str(er) return 2 else: return 0
def toCSVLSEffective(lumidata,filename,resultlines,scalefactor,isverbose): ''' input: {run:[lumilsnum(0),cmslsnum(1),timestamp(2),beamstatus(3),beamenergy(4),deliveredlumi(5),recordedlumi(6),calibratedlumierror(7),{hltpath:[l1name,l1prescale,hltprescale,efflumi]},bxdata,beamdata]} ''' result=[]#[run,ls,hltpath,l1bitname,hltpresc,l1presc,efflumi] for rline in resultlines: result.append(rline) for run in sorted(lumidata):#loop over runs rundata=lumidata[run] if rundata is None: result.append([str(run),'n/a','n/a','n/a','n/a','n/a','n/a','n/a']) continue for lsdata in rundata: efflumiDict=lsdata[8]# this ls has no such path? if not efflumiDict: continue cmslsnum=lsdata[1] recorded=lsdata[6] if not recorded: recorded=0.0 for hltpathname in sorted(efflumiDict): pathdata=efflumiDict[hltpathname] l1name=pathdata[0] if l1name is None: l1name='n/a' else: l1name=l1name.replace('"','') l1prescale=pathdata[1] hltprescale=pathdata[2] lumival=pathdata[3] if lumival is not None: result.append([run,cmslsnum,hltpathname,l1name,hltprescale,l1prescale,recorded*scalefactor,lumival*scalefactor]) else: result.append([run,cmslsnum,hltpathname,l1name,hltprescale,l1prescale,recorded*scalefactor,'n/a']) fieldnames = ['Run','LS','HLTpath','L1bit','HLTpresc','L1presc','Recorded(/ub)','Effective(/ub)'] assert(filename) if filename.upper()=='STDOUT': r=sys.stdout r.write(','.join(fieldnames)+'\n') for l in result: r.write(str(l)+'\n') else: r=csvReporter.csvReporter(filename) r.writeRow(fieldnames) r.writeRows(result)
def main(*args): runnum=0 try: runnum=args[1] report=csvReporter.csvReporter('instlumibytime-'+str(runnum)+'.csv') msg=coral.MessageStream('') msg.setMsgVerbosity(coral.message_Level_Error) os.environ['CORAL_AUTH_PATH']='/afs/cern.ch/cms/lumi' svc = coral.ConnectionService() connectstr='oracle://cms_orcoff_prod/cms_lumi_prod' session=svc.connect(connectstr,accessMode=coral.access_ReadOnly) session.typeConverter().setCppTypeForSqlType("unsigned int","NUMBER(10)") session.typeConverter().setCppTypeForSqlType("unsigned long long","NUMBER(20)") session.transaction().start(True) schema=session.nominalSchema() q=schema.newQuery() runsummaryOut=lumiQueryAPI.runsummaryByrun(q,runnum) del q q=schema.newQuery() lumisummaryOut=lumiQueryAPI.lumisummaryByrun(q,runnum,'0001') del q session.transaction().commit() del session del svc #print runsummaryOut starttimestr=runsummaryOut[3] t=lumiTime.lumiTime() report.writeRow(['cmslsnum','utctime','unixtimestamp','instlumi']) for dataperls in lumisummaryOut: cmslsnum=dataperls[0] instlumi=dataperls[1] startorbit=dataperls[3] orbittime=t.OrbitToTime(starttimestr,startorbit) orbittimestamp=time.mktime(orbittime.timetuple())+orbittime.microsecond/1e6 report.writeRow([cmslsnum,orbittime,orbittimestamp,instlumi]) except IndexError: print('runnumber should be provided') return 1 except Exception as er: print(str(er)) return 2 else: return 0
def toCSVTotDelivered(lumidata,filename,resultlines,scalefactor,isverbose): ''' input: {run:[lumilsnum,cmslsnum,timestamp,beamstatus,beamenergy,deliveredlumi,calibratedlumierror,(bxidx,bxvalues,bxerrs),(bxidx,b1intensities,b2intensities)]} ''' result=[] fieldnames = ['Run', 'Total LS', 'Delivered(/ub)','UTCTime','E(GeV)'] if isverbose: fieldnames.append('Selected LS') for rline in resultlines: result.append(rline) for run in lumidata.keys(): lsdata=lumidata[run] if lsdata is None: result.append([run,'n/a','n/a','n/a','n/a']) if isverbose: result.extend(['n/a']) continue nls=len(lsdata) totlumival=sum([x[5] for x in lsdata]) beamenergyPerLS=[float(x[4]) for x in lsdata] avgbeamenergy=0.0 if len(beamenergyPerLS): avgbeamenergy=sum(beamenergyPerLS)/len(beamenergyPerLS) runstarttime=lsdata[0][2] if isverbose: selectedls=[(x[0],x[1]) for x in lsdata] result.append([run,nls,totlumival*scalefactor,runstarttime.strftime("%m/%d/%y %H:%M:%S"),avgbeamenergy, str(selectedls)]) else: result.append([run,nls,totlumival*scalefactor,runstarttime.strftime("%m/%d/%y %H:%M:%S"),avgbeamenergy]) sortedresult=sorted(result,key=lambda x : int(x[0])) r=None assert(filename) if filename.upper()=='STDOUT': r=sys.stdout r.write(','.join(fieldnames)+'\n') for l in sortedresult: r.write(str(l)+'\n') else: r=csvReporter.csvReporter(filename) r.writeRow(fieldnames) r.writeRows(sortedresult)
def toCSVLSTrg(trgdata,filename,iresults=[],isverbose=False): ''' input:{run:[[cmslsnum,deadfrac,deadtimecount,bitzero_count,bitzero_prescale,[(name,count,presc),]],..] ''' result=[] fieldnames=['Run','LS','dfrac','dcount','bit,cout,presc'] for rline in iresults: result.append(rline) for run in sorted(trgdata): rundata=trgdata[run] if rundata is None: ll=[run,'n/a','n/a','n/a',] if isverbose: ll.append('n/a') result.append(ll) continue deadfrac=0.0 bitdataStr='n/a' for lsdata in rundata: cmslsnum=lsdata[0] deadfrac=lsdata[1] dcount=lsdata[2] bitdata=lsdata[5] flatbitdata=[x[0]+',%d'%x[1]+',%d'%x[2] for x in bitdata if x[0]!='False'] bitdataStr=';'.join(flatbitdata) if isverbose: result.append([run,cmslsnum,deadfrac,dcount,bitdataStr]) else: result.append([run,cmslsnum,deadfrac,dcount]) assert(filename) if filename.upper()=='STDOUT': r=sys.stdout r.write(','.join(fieldnames)+'\n') for l in result: r.write(str(l)+'\n') else: r=csvReporter.csvReporter(filename) r.writeRow(fieldnames) r.writeRows(result)
def toCSVLumiByLSXing(lumidata,scalefactor,filename): ''' input:{run:[lumilsnum(0),cmslsnum(1),timestamp(2),beamstatus(3),beamenergy(4),deliveredlumi(5),recordedlumi(6),calibratedlumierror(7),{hltpath:[l1name,l1prescale,hltprescale,efflumi]},bxdata,beamdata]} output: fieldnames=['Run','CMSLS','Delivered(/ub)','Recorded(/ub)','BX'] ''' result=[] assert(filename) fieldnames=['run','ls','delivered(/ub)','recorded(/ub)','bx'] for run in sorted(lumidata): rundata=lumidata[run] if rundata is None: result.append([run,'n/a','n/a','n/a','n/a']) continue for lsdata in rundata: cmslsnum=lsdata[1] if cmslsnum==0: continue deliveredlumi=lsdata[5] recordedlumi=lsdata[6] (bxidxlist,bxvaluelist,bxerrorlist)=lsdata[8] bxresult=[] if bxidxlist and bxvaluelist: bxinfo=CommonUtil.transposed([bxidxlist,bxvaluelist]) bxresult=CommonUtil.flatten([run,cmslsnum,deliveredlumi*scalefactor,recordedlumi*scalefactor,bxinfo]) result.append(bxresult) else: result.append([run,cmslsnum,deliveredlumi*scalefactor,recordedlumi*scalefactor]) r=None if filename.upper()=='STDOUT': r=sys.stdout r.write(','.join(fieldnames)+'\n') for l in result: r.write(str(l)+'\n') else: r=csvReporter.csvReporter(filename) r.writeRow(fieldnames) r.writeRows(result)
def toCSVConfTrg(trgconfdata,filename,iresults=[],isverbose=False): ''' input {run:[datasource,normbitname,[allbits]]} ''' result=[] fieldnames=['Run','source','bitnames'] if isverbose: fieldnames.append('normbit') for rline in iresults: result.append(rline) for run in sorted(trgconfdata): rundata=trgconfdata[run] if rundata is None: ll=[run,'n/a','n/a'] if isverbose: ll.append('n/a') result.append(ll) continue datasource=rundata[0] if datasource: datasource=datasource.split('/')[-1] normbit=rundata[1] bitdata=rundata[2] bitnames=','.join(bitdata) if isverbose: result.append([run,datasource,bitnames,normbit]) else: result.append([run,datasource,bitnames]) assert(filename) if filename.upper()=='STDOUT': r=sys.stdout r.write(','.join(fieldnames)+'\n') for l in result: r.write(str(l)+'\n') else: r=csvReporter.csvReporter(filename) r.writeRow(fieldnames) r.writeRows(result)
def plotPeakPerday_Time(self, rawdata={}, resultlines=[], minTime=None, maxTime=None, nticks=6, withannotation=False, yscale='linear', referenceLabel='Delivered', labels=['Delivered'], textoutput=None): ''' THIS PLOT IS DELIVERED ONLY Input: rawdata={'Delivered':[(day,run,ls,instlumi)]} resultlines=[[day,run,ls,maxinstlum],[]] minTime (python DateTime) : min *begin* time to draw: format %m/%d/%y %H:%M:%S maxTime (python DateTime): max *begin* time to draw %m/%d/%y %H:%M:%S withannotation: wheather the boundary points should be annotated referenceLabel: the one variable that decides the total unit and the plot x-axis range labels: labels of the variables to plot ''' xpoints = [] ypoints = {} legendlist = [] maxinfo = '' ymax = {} lut = lumiTime.lumiTime() if not minTime: minTime = '03/01/10 00:00:00' minTime = lut.StrToDatetime(minTime, customfm='%m/%d/%y %H:%M:%S') if not maxTime: maxTime = datetime.datetime.utcnow() else: maxTime = lut.StrToDatetime(maxTime, customfm='%m/%d/%y %H:%M:%S') for r in resultlines: day = int(r[0]) runnumber = int(r[1]) lsnum = int(r[2].split('.')[0]) if rawdata and day in [int(t[0]) for t in rawdata[referenceLabel]]: continue if day < minTime.date().toordinal(): continue if day > maxTime.date().toordinal(): continue for i, lab in enumerate(labels): v = float(r[-(len(labels) - i) - 1]) rawdata.setdefault(lab, []).append((day, runnumber, lsnum, v)) if not rawdata: print('[WARNING]: no data, do nothing') return maxlum = max([t[3] for t in rawdata[referenceLabel]]) minlum = min([t[3] for t in rawdata[referenceLabel] if t[3] > 0 ]) #used only for log scale, fin the non-zero bottom (unitstring, denomitor) = guessInstLumiUnit(maxlum) csvreport = None rows = [] flat = [] MinDay = minTime.date().toordinal() MaxDay = maxTime.date().toordinal() fulldays = range(MinDay, MaxDay + 1) for label in rawdata.keys(): yvalues = sorted(rawdata[label]) alldays = [t[0] for t in yvalues] alldates = [str(datetime.date.fromordinal(t)) for t in alldays] ypoints[label] = [] lumivals = [t[3] for t in yvalues] flat.append(lumivals) for d in fulldays: if not d in alldays: ypoints[label].append(0.0) else: thisdaylumi = [t[3] for t in yvalues if t[0] == d][0] if yscale == 'log': if thisdaylumi < minlum: thisdaylumi = minlum / denomitor else: thisdaylumi = thisdaylumi / denomitor else: thisdaylumi = thisdaylumi / denomitor ypoints[label].append(thisdaylumi) ymax[label] = max(lumivals) / denomitor 'ymax ', max(lumivals) xpoints = fulldays if textoutput: csvreport = csvReporter.csvReporter(textoutput) head = ['#day', 'run', 'lsnum', 'maxinstlumi', 'date'] csvreport.writeRow(head) flat.insert(0, alldays) allruns = [t[1] for t in rawdata[referenceLabel]] allls = [t[2] for t in rawdata[referenceLabel]] flat.insert(1, allruns) flat.insert(2, allls) flat.append(alldates) rows = list(zip(*flat)) csvreport.writeRows([list(t) for t in rows]) yearStrMin = minTime.strftime('%Y') yearStrMax = maxTime.strftime('%Y') if yearStrMin == yearStrMax: dateFmt = matplotlib.dates.DateFormatter('%d/%m') else: dateFmt = matplotlib.dates.DateFormatter('%d/%m/%y') ax = self.__fig.add_subplot(111) if yscale == 'linear': ax.set_yscale('linear') elif yscale == 'log': ax.set_yscale('log') else: raise 'unsupported yscale ', yscale majorLoc = matplotlib.ticker.LinearLocator(numticks=nticks) minorLoc = matplotlib.ticker.LinearLocator(numticks=nticks * 4) ax.xaxis.set_major_formatter(dateFmt) ax.set_xlabel(r'Date', position=(0.84, 0)) ax.set_ylabel(r'L ' + unitstring, position=(0, 0.9)) ax.xaxis.set_major_locator(majorLoc) ax.xaxis.set_minor_locator(minorLoc) xticklabels = ax.get_xticklabels() for tx in xticklabels: tx.set_horizontalalignment('right') ax.grid(True) cl = self.colormap['Max Inst'] textsummaryhead = ['#TotalRunningDays'] textsummaryline = ['#' + str(len(alldays))] for ylabel in labels: cl = 'k' if ylabel in self.colormap: cl = self.colormap[ylabel] ax.plot(xpoints, ypoints[ylabel], label='Max Inst', color=cl, drawstyle='steps') legendlist.append('Max Inst %.3f' % (ymax[ylabel]) + ' ' + unitstring) textsummaryhead.append('Max Inst' + ylabel) textsummaryline.append('%.3f' % (ymax[ylabel]) + ' ' + unitstring) if textoutput: csvreport.writeRow(textsummaryhead) csvreport.writeRow(textsummaryline) ax.legend(tuple(legendlist), loc='upper left') ax.set_xbound(lower=matplotlib.dates.date2num(minTime), upper=matplotlib.dates.date2num(maxTime)) if withannotation: #annotations trans = matplotlib.transforms.BlendedGenericTransform( ax.transData, ax.transAxes) ax.text(xpoints[0], 1.025, beginfo, transform=trans, horizontalalignment='left', size='x-small', color='green', bbox=dict(facecolor='white')) ax.text(xpoints[-1], 1.025, endinfo, transform=trans, horizontalalignment='left', size='x-small', color='green', bbox=dict(facecolor='white')) ax.annotate(maxinfo, xy=(xmax, ymax), xycoords='data', xytext=(0, 13), textcoords='offset points', arrowprops=dict(facecolor='green', shrink=0.05), size='x-small', horizontalalignment='center', color='green', bbox=dict(facecolor='white')) firstday = datetime.date.fromordinal(rawdata[referenceLabel][0][0]) lastday = datetime.date.fromordinal(rawdata[referenceLabel][-1][0]) firstdayStr = firstday.strftime('%Y %b %d') lastdayStr = lastday.strftime('%Y %b %d') ax.set_title('CMS Peak Luminosity/Day (' + firstdayStr + ' - ' + lastdayStr + ')', size='small') #ax.autoscale(tight=True) ax.autoscale_view(tight=True, scalex=True, scaley=False) #ax.set_xmargin(0.015) self.__fig.autofmt_xdate(bottom=0.18, rotation=15, ha='right') self.__fig.subplots_adjust(bottom=0.2, left=0.15)
def plotSumX_Time(self, rawdata={}, resultlines=[], minTime=None, maxTime=None, nticks=6, yscale='linear', withannotation=False, referenceLabel='Delivered', labels=['Delivered', 'Recorded'], textoutput=None): ''' input: rawdata = {'Delivered':[(runnumber,starttimestamp,stoptimestamp,lumiperrun)],'Recorded':[(runnumber,starttimestamp,stoptimestamp,lumiperrun)]} resultlines = [[runnumber,starttimestampStr,stoptimestampStr,dellumiperrun,reclumiperrun],[runnumber,starttimestampStr,stoptimestampStr,dellumiperrun,reclumiperrun],] minTime (python DateTime) : min *begin* time to draw: format %m/%d/%y %H:%M:%S maxTime (python DateTime): max *begin* time to draw %m/%d/%y %H:%M:%S yscale: linear,log or both withannotation: wheather the boundary points should be annotated referenceLabel: the one variable that decides the total unit and the plot x-axis range labels: labels of the variables to plot ''' xpoints = [] ypoints = {} ytotal = {} lut = lumiTime.lumiTime() if not minTime: minTime = '03/01/10 00:00:00' minTime = lut.StrToDatetime(minTime, customfm='%m/%d/%y %H:%M:%S') if not maxTime: maxTime = datetime.datetime.utcnow() else: maxTime = lut.StrToDatetime(maxTime, customfm='%m/%d/%y %H:%M:%S') for r in resultlines: runnumber = int(r[0]) starttimeStr = r[1].split('.')[0] starttime = lut.StrToDatetime(starttimeStr, customfm='%Y-%m-%d %H:%M:%S') stoptimeStr = r[2].split('.')[0] stoptime = lut.StrToDatetime(stoptimeStr, customfm='%Y-%m-%d %H:%M:%S') if rawdata and runnumber in [ t[0] for t in rawdata[referenceLabel] ]: continue if starttime < minTime: continue if starttime > maxTime: continue for i, lab in enumerate(labels): v = float(r[-(len(labels) - i)]) rawdata.setdefault(lab, []).append( (runnumber, starttime, stoptime, v)) if not rawdata: print('[WARNING]: no data, do nothing') return tot = sum([t[3] for t in rawdata[referenceLabel]]) (unitstring, denomitor) = guessLumiUnit(tot) csvreport = None rows = [] flat = [] for label, yvalues in rawdata.items(): yvalues.sort() flat.append([t[3] for t in yvalues]) if label == referenceLabel: minTime = yvalues[0][1] maxTime = yvalues[-1][1] ypoints[label] = [] lumivals = [t[3] for t in yvalues] for i, val in enumerate(lumivals): ypoints[label].append(sum(lumivals[0:i + 1]) / denomitor) ytotal[label] = sum(lumivals) / denomitor xpoints = [ matplotlib.dates.date2num(t[1]) for t in rawdata[referenceLabel] ] ax = self.__fig.add_subplot(111) ax.set_yscale(yscale) yearStrMin = minTime.strftime('%Y') yearStrMax = maxTime.strftime('%Y') if yearStrMin == yearStrMax: dateFmt = matplotlib.dates.DateFormatter('%d/%m') else: dateFmt = matplotlib.dates.DateFormatter('%d/%m/%y') majorLoc = matplotlib.ticker.LinearLocator(numticks=nticks) ax.xaxis.set_major_locator(majorLoc) minorLoc = matplotlib.ticker.LinearLocator(numticks=nticks * 4) ax.xaxis.set_major_formatter(dateFmt) ax.set_xlabel(r'Date', position=(0.84, 0)) ax.set_ylabel(r'L ' + unitstring, position=(0, 0.9)) ax.xaxis.set_minor_locator(minorLoc) ax.set_xbound(lower=xpoints[0], upper=xpoints[-1]) xticklabels = ax.get_xticklabels() for tx in xticklabels: tx.set_horizontalalignment('left') ax.grid(True) keylist = sorted(ypoints.keys()) keylist.insert(0, keylist.pop(keylist.index( referenceLabel))) #move refereceLabel to front from now on legendlist = [] head = ['#Run', 'StartTime', 'StopTime'] textsummaryhead = ['#TotalRun'] textsummaryline = ['#' + str(len(xpoints))] for ylabel in keylist: cl = 'k' if ylabel in self.colormap: cl = self.colormap[ylabel] ax.plot(xpoints, ypoints[ylabel], label=ylabel, color=cl, drawstyle='steps') legendlist.append(ylabel + ' ' + '%.3f' % (ytotal[ylabel]) + ' ' + unitstring) textsummaryhead.append('Total' + ylabel) textsummaryline.append('%.3f' % (ytotal[ylabel]) + ' ' + unitstring) head.append(ylabel) if textoutput: csvreport = csvReporter.csvReporter(textoutput) csvreport.writeRow(head) allruns = [int(t[0]) for t in rawdata[referenceLabel]] allstarts = [ lut.DatetimeToStr(t[1], customfm='%Y-%m-%d %H:%M:%S') for t in rawdata[referenceLabel] ] allstops = [ lut.DatetimeToStr(t[2], customfm='%Y-%m-%d %H:%M:%S') for t in rawdata[referenceLabel] ] flat.insert(0, allruns) flat.insert(1, allstarts) flat.insert(2, allstops) rows = list(zip(*flat)) csvreport.writeRows([list(t) for t in rows]) csvreport.writeRow(textsummaryhead) csvreport.writeRow(textsummaryline) #annotations trans = matplotlib.transforms.BlendedGenericTransform( ax.transData, ax.transAxes) #print 'run boundary ',runs[0],runs[-1] #print 'xpoints boundary ',xpoints[0],xpoints[-1] #annotation if withannotation: runs = [t[0] for t in rawdata[referenceLabel]] ax.text(xpoints[0], 1.025, str(runs[0]), transform=trans, horizontalalignment='left', size='x-small', color='green', bbox=dict(facecolor='white')) ax.text(xpoints[-1], 1.025, str(runs[-1]), transform=trans, horizontalalignment='left', size='x-small', color='green', bbox=dict(facecolor='white')) if yearStrMin == yearStrMax: firsttimeStr = rawdata[referenceLabel][1][1].strftime( '%b %d %H:%M' ) #time range(start) in the title is the first run beg time lasttimeStr = rawdata[referenceLabel][-1][2].strftime( '%b %d %H:%M' ) #time range(stop) in the tile is the last run stop time #firstimeStr=minTime.strftime('%b %d %H:%M') #lasttimeStr=maxTime.strftime('%b %d %H:%M') #ax.set_title('CMS Total Integrated Luminosity '+yearStrMin+' ('+firstimeStr+' - '+lasttimeStr+' UTC)',size='small',family='fantasy') ax.set_title('CMS Total Integrated Luminosity ' + yearStrMin + ' (' + firsttimeStr + ' - ' + lasttimeStr + ' UTC)', size='small') else: #ax.set_title('CMS Total Integrated Luminosity '+yearStrMin+'-'+yearStrMax,size='small',family='fantasy') ax.set_title('CMS Total Integrated Luminosity ' + yearStrMin + '-' + yearStrMax, size='small') ax.legend(tuple(legendlist), loc='upper left') ax.autoscale_view(tight=True, scalex=True, scaley=False) self.__fig.autofmt_xdate(bottom=0.18, rotation=15, ha='right') self.__fig.subplots_adjust(bottom=0.2, left=0.15)
def plotSumX_Fill(self, rawdata={}, resultlines=[], minFill=None, maxFill=None, nticks=6, yscale='linear', withannotation=False, referenceLabel='Delivered', labels=['Delivered', 'Recorded'], textoutput=None): ''' input: rawdata = {'Delivered':[(fill,runnumber,lumiperrun)],'Recorded':[(fill,runnumber,lumiperrun)]} resultlines = [[fillnumber,runnumber,dellumiperrun,reclumiperrun],[fillnumber,runnumber,dellumiperrun,reclumiperrun],] minFill : min fill to draw maxFill : max fill to draw yscale: linear,log or both withannotation: wheather the boundary points should be annotated textoutput: text output file name. ''' ytotal = {} ypoints = {} for r in resultlines: #parse old text data fillnum = int(r[0]) runnum = int(r[1]) if rawdata and (fillnum, runnum) in [ (t[0], t[1]) for t in rawdata[referenceLabel] ]: continue if minFill and fillnum < minFill: continue if maxFill and fillnum > maxFill: continue for i, lab in enumerate(labels): v = float( r[-(len(labels) - i)]) #the values to plot are always the last n fields rawdata.setdefault(lab, []).append((fillnum, runnum, v)) #print 'fillrunDict ',fillrunDict if not rawdata: print('[WARNING]: no data, do nothing') return tot = sum([t[2] for t in rawdata[referenceLabel]]) beginfo = '' endinfo = '' (unitstring, denomitor) = guessLumiUnit(tot) csvreport = None rows = [] flat = [] for label, yvalues in rawdata.items(): yvalues.sort() flat.append([t[2] for t in yvalues]) ypoints[label] = [] ytotal[label] = 0.0 lumivals = [t[2] for t in yvalues] for i, val in enumerate(lumivals): ypoints[label].append(sum(lumivals[0:i + 1]) / denomitor) ytotal[label] = sum(lumivals) / denomitor xpoints = [t[0] for t in rawdata[referenceLabel]] #after sort ax = self.__fig.add_subplot(111) ax.set_xlabel(r'LHC Fill Number', position=(0.84, 0)) ax.set_ylabel(r'L ' + unitstring, position=(0, 0.9)) ax.set_xbound(lower=xpoints[0], upper=xpoints[-1]) if yscale == 'linear': ax.set_yscale('linear') elif yscale == 'log': ax.set_yscale('log') else: raise 'unsupported yscale ', yscale xticklabels = ax.get_xticklabels() majorLocator = matplotlib.ticker.LinearLocator(nticks) majorFormatter = matplotlib.ticker.FormatStrFormatter('%d') #minorLocator=matplotlib.ticker.MultipleLocator(sampleinterval) ax.xaxis.set_major_locator(majorLocator) ax.xaxis.set_major_formatter(majorFormatter) #ax.xaxis.set_minor_locator(minorLocator) ax.grid(True) keylist = sorted(ypoints.keys()) keylist.insert(0, keylist.pop(keylist.index( referenceLabel))) #move refereceLabel to front from now on legendlist = [] head = ['#fill', 'run'] textsummaryhead = ['#TotalFill'] textsummaryline = ['#' + str(len(xpoints))] for ylabel in keylist: cl = 'k' if ylabel in self.colormap: cl = self.colormap[ylabel] ax.plot(xpoints, ypoints[ylabel], label=ylabel, color=cl, drawstyle='steps') legendlist.append(ylabel + ' ' + '%.3f' % (ytotal[ylabel]) + ' ' + unitstring) textsummaryhead.append('Total' + ylabel) textsummaryline.append('%.3f' % (ytotal[ylabel]) + ' ' + unitstring) head.append(ylabel) if textoutput: csvreport = csvReporter.csvReporter(textoutput) allfills = [int(t[0]) for t in rawdata[referenceLabel]] allruns = [int(t[1]) for t in rawdata[referenceLabel]] flat.insert(0, allfills) flat.insert(1, allruns) rows = list(zip(*flat)) csvreport.writeRow(head) csvreport.writeRows([list(t) for t in rows]) csvreport.writeRow(textsummaryhead) csvreport.writeRow(textsummaryline) #font=FontProperties(size='medium',weight='demibold') #annotations if withannotation: trans = matplotlib.transforms.BlendedGenericTransform( ax.transData, ax.transAxes) ax.text(xpoints[0], 1.025, beginfo, transform=trans, horizontalalignment='left', size='x-small', color='green', bbox=dict(facecolor='white')) ax.text(xpoints[-1], 1.025, endinfo, transform=trans, horizontalalignment='left', size='x-small', color='green', bbox=dict(facecolor='white')) #legend ax.legend(tuple(legendlist), loc='upper left') #adjust self.__fig.subplots_adjust(bottom=0.1, left=0.1)
def plotSumX_Run(self, rawdata={}, resultlines=[], minRun=None, maxRun=None, nticks=6, yscale='linear', withannotation=False, referenceLabel='Delivered', labels=['Delivered', 'Recorded'], textoutput=None): ''' input: rawdata = {'Delivered':[(runnumber,lumiperrun),..],'Recorded':[(runnumber,lumiperrun),..]} resultlines = [[runnumber,dellumiperrun,reclumiperrun],[runnumber,dellumiperrun,reclumiperrun],] minRun : minimal runnumber required maxRun : max runnumber required yscale: linear,log or both withannotation: wheather the boundary points should be annotated referenceLabel: the one variable that decides the total unit and the plot x-axis range labels: labels of the variables to plot textoutput: text output file name. ''' ypoints = {} ytotal = {} for r in resultlines: #parse old text data runnumber = int(r[0]) if rawdata and runnumber in [ t[0] for t in rawdata[referenceLabel] ]: continue #use text input only if not in selected data if minRun and runnumber < minRun: continue if maxRun and runnumber > maxRun: continue for i, lab in enumerate(labels): v = float( r[-(len(labels) - i) - 1]) #the values to plot are always the last n fields rawdata.setdefault(lab, []).append((runnumber, v)) if not rawdata: print('[WARNING]: no data to plot , exit') return tot = sum([t[1] for t in rawdata[referenceLabel]]) (unitstring, denomitor) = guessLumiUnit(tot) csvreport = None rows = [] flat = [] for label, yvalues in rawdata.items(): yvalues.sort() flat.append([t[1] for t in yvalues]) ypoints[label] = [] ytotal[label] = 0.0 lumivals = [t[1] for t in yvalues] for i, val in enumerate(lumivals): ypoints[label].append(sum(lumivals[0:i + 1]) / denomitor) #integrated lumi ytotal[label] = sum(lumivals) / denomitor xpoints = [t[0] for t in rawdata[referenceLabel]] ax = self.__fig.add_subplot(111) if yscale == 'linear': ax.set_yscale('linear') elif yscale == 'log': ax.set_yscale('log') else: raise 'unsupported yscale ', yscale ax.set_xlabel(r'Run', position=(0.95, 0)) ax.set_ylabel(r'L ' + unitstring, position=(0, 0.9)) xticklabels = ax.get_xticklabels() for tx in xticklabels: tx.set_rotation(30) majorLocator = matplotlib.ticker.LinearLocator(nticks) majorFormatter = matplotlib.ticker.FormatStrFormatter('%d') minorLocator = matplotlib.ticker.LinearLocator(numticks=4 * nticks) ax.xaxis.set_major_locator(majorLocator) ax.xaxis.set_major_formatter(majorFormatter) ax.xaxis.set_minor_locator(minorLocator) ax.set_xbound(lower=xpoints[0], upper=xpoints[-1]) ax.grid(True) keylist = sorted(ypoints.keys()) keylist.insert(0, keylist.pop(keylist.index( referenceLabel))) #move refereceLabel to front from now on legendlist = [] head = ['#Run'] textsummaryhead = ['#TotalRun'] textsummaryline = ['#' + str(len(xpoints))] for ylabel in keylist: cl = 'k' if ylabel in self.colormap: cl = self.colormap[ylabel] ax.plot(xpoints, ypoints[ylabel], label=ylabel, color=cl, drawstyle='steps') legendlist.append(ylabel + ' ' + '%.3f' % (ytotal[ylabel]) + ' ' + unitstring) textsummaryhead.append('Total' + ylabel) textsummaryline.append('%.3f' % (ytotal[ylabel]) + ' ' + unitstring) head.append(ylabel) if textoutput: csvreport = csvReporter.csvReporter(textoutput) csvreport.writeRow(head) allruns = [int(t[0]) for t in rawdata[referenceLabel]] flat.insert(0, allruns) rows = list(zip(*flat)) csvreport.writeRows([list(t) for t in rows]) csvreport.writeRow(textsummaryhead) csvreport.writeRow(textsummaryline) #font=FontProperties(size='medium',weight='demibold') #legend ax.legend(tuple(legendlist), loc='upper left') #adjust self.__fig.subplots_adjust(bottom=0.18, left=0.1) #annotations if withannotation: trans = matplotlib.transforms.BlendedGenericTransform( ax.transData, ax.transAxes) ax.text(xpoints[0], 1.025, str(xpoints[0]), transform=trans, horizontalalignment='left', size='x-small', color='green', bbox=dict(facecolor='white')) ax.text(xpoints[-1], 1.025, str(xpoints[-1]), transform=trans, horizontalalignment='left', size='x-small', color='green', bbox=dict(facecolor='white'))
def main(): parser = argparse.ArgumentParser(prog=os.path.basename(sys.argv[0]),description = "Lumi Validation",formatter_class=argparse.ArgumentDefaultsHelpFormatter) allowedActions = ['batchupdate','update','dump'] allowedFlags = ['UNKNOWN','GOOD','BAD','SUSPECT'] # parse arguments parser.add_argument('action',choices=allowedActions,help='command actions') parser.add_argument('-c',dest='connect',action='store',required=True,help='connect string to lumiDB') parser.add_argument('-P',dest='authpath',action='store',required=True,help='path to authentication file') parser.add_argument('-i',dest='inputfile',action='store',help='lumi range selection file,required for batchupdate action') parser.add_argument('-o',dest='outputfile',action='store',help='output to csv file') parser.add_argument('-r',dest='runnumber',action='store',type=int,help='run number,optional') parser.add_argument('-runls',dest='runls',action='store',help='selection string,optional. Example [1234:[],4569:[1,1],[2,100]]') parser.add_argument('-flag',dest='flag',action='store',default='UNKNOWN',help='flag string,optional') parser.add_argument('--verbose',dest='verbose',action='store_true',help='verbose mode for printing' ) parser.add_argument('--debug',dest='debug',action='store_true',help='debug') options=parser.parse_args() if options.flag.upper() not in allowedFlags: print 'unrecognised flag ',options.flag.upper() raise os.environ['CORAL_AUTH_PATH'] = options.authpath connectstring=options.connect svc = coral.ConnectionService() msg=coral.MessageStream('') if options.debug: msg.setMsgVerbosity(coral.message_Level_Debug) else: msg.setMsgVerbosity(coral.message_Level_Error) session=svc.connect(connectstring,accessMode=coral.access_Update) session.typeConverter().setCppTypeForSqlType("unsigned int","NUMBER(10)") session.typeConverter().setCppTypeForSqlType("unsigned long long","NUMBER(20)") result={}#parsing result {run:[[ls,status,comment]]} if options.debug : msg=coral.MessageStream('') msg.setMsgVerbosity(coral.message_Level_Debug) if options.action=='batchupdate': #populate from csv file, require -i argument if not options.inputfile: print 'inputfile -i option is required for batchupdate' raise csvReader=csv.reader(open(options.inputfile),delimiter=',') for row in csvReader: if len(row)==0: continue fieldrun=str(row[0]).strip() fieldls=str(row[1]).strip() fieldstatus=row[2] fieldcomment=row[3] if not result.has_key(int(fieldrun)): result[int(fieldrun)]=[] result[int(fieldrun)].append([int(fieldls),fieldstatus,fieldcomment]) insertupdateValidationData(session,result) if options.action=='update': #update flag interactively, require -runls argument #runls={run:[]} populate all CMSLSNUM found in LUMISUMMARY #runls={run:[[1,1],[2,5]],run:[[1,1],[2,5]]} #default value if not options.runls and not options.runnumber: print 'option -runls or -r is required for update' raise if not options.flag: print 'option -flag is required for update' raise if options.flag.upper() not in allowedFlags: print 'unrecognised flag ',options.flag raise if options.runnumber: runlsjson='{"'+str(options.runnumber)+'":[]}' elif options.runls: runlsjson=CommonUtil.tolegalJSON(options.runls) sparser=selectionParser.selectionParser(runlsjson) runsandls=sparser.runsandls() commentStr='NA' statusStr=options.flag for run,lslist in runsandls.items(): if not result.has_key(run): result[run]=[] for ls in lslist: result[run].append([ls,statusStr,commentStr]) insertupdateValidationData(session,result) if options.action=='dump': if options.runls or options.inputfile: if options.runls: runlsjson=CommonUtil.tolegalJSON(options.runls) sparser=selectionParser.selectionParser(runlsjson) runsandls=sparser.runsandls() if options.inputfile: p=inputFilesetParser.inputFilesetParser(options.inputfile) runsandls=p.runsandls() for runnum,lslist in runsandls.items(): dataperrun=getValidationData(session,run=runnum,cmsls=lslist) if dataperrun.has_key(runnum): result[runnum]=dataperrun[runnum] else: result[runnum]=[] else: result=getValidationData(session,run=options.runnumber) runs=result.keys() runs.sort() if options.outputfile: r=csvReporter.csvReporter(options.outputfile) for run in runs: for perrundata in result[run]: r.writeRow([str(run),str(perrundata[0]),perrundata[1],perrundata[2]]) else: for run in runs: print '== =' if len(result[run])==0: print str(run),'no validation data' continue for lsdata in result[run]: print str(run)+','+str(lsdata[0])+','+lsdata[1]+','+lsdata[2] del session del svc
def toCSVTotEffective(lumidata,filename,resultlines,scalefactor,isverbose): ''' input: {run:[lumilsnum(0),triggeredls(1),timestamp(2),beamstatus(3),beamenergy(4),deliveredlumi(5),recordedlumi(6),calibratedlumierror(7),{hltpath:[l1name,l1prescale,hltprescale,efflumi]},bxdata,beamdata](8)} screen Run,SelectedLS,Recorded,HLTPath,L1Bit,Effective ''' result=[]#[run,selectedlsStr,recorded,hltpath,l1bitname,efflumi] selectedcmsls=[] for rline in resultlines: result.append(rline) for run in sorted(lumidata):#loop over runs hprescdict={} lprescdict={} rundata=lumidata[run] if rundata is None: result.append([str(run),'n/a','n/a','n/a','n/a','n/a']) continue selectedcmsls=[x[1] for x in rundata if x[1]!=0] totefflumiDict={} totrecorded=0.0 pathmap={}#{hltpathname:1lname} for lsdata in rundata: cmslsnum=lsdata[1] efflumiDict=lsdata[8]# this ls has no such path? if not efflumiDict: if cmslsnum in selectedcmsls: selectedcmsls.remove(cmslsnum) continue for hltpathname,pathdata in efflumiDict.items(): if not totefflumiDict.has_key(hltpathname): totefflumiDict[hltpathname]=0.0 pathmap[hltpathname]='n/a' l1name=pathdata[0] l1presc=pathdata[1] hltpresc=pathdata[2] lumival=pathdata[3] if l1presc is None or hltpresc is None:#if found all null prescales and if it is in the selectedcmsls, remove it because incomplete if cmslsnum in selectedcmsls: selectedcmsls.remove(cmslsnum) else: recordedlumi=lsdata[6] totrecorded+=recordedlumi if not hprescdict.has_key(hltpathname): hprescdict[hltpathname]=[] hprescdict[hltpathname].append(hltpresc) if not lprescdict.has_key(l1name): lprescdict[l1name]=[] lprescdict[l1name].append(l1presc) if lumival: totefflumiDict[hltpathname]+=lumival pathmap[hltpathname]=l1name.replace('\"','') if len(selectedcmsls)==0: selectedlsStr='n/a' else: selectedlsStr= CommonUtil.splitlistToRangeString(selectedcmsls) for name in sorted(totefflumiDict): lname=pathmap[name] if lname=='n/a': continue hprescs=list(set(hprescdict[hltpathname])) lprescs=list(set(lprescdict['"'+lname+'"'])) hprescStr='('+','.join(['%d'%(x) for x in hprescs])+')' lprescStr='('+','.join(['%d'%(x) for x in lprescs])+')' result.append([run,selectedlsStr,totrecorded*scalefactor,name+hprescStr,lname+lprescStr,totefflumiDict[name]*scalefactor]) fieldnames=['Run','SelectedLS','Recorded','HLTpath','L1bit','Effective(/ub)'] assert(filename) if filename.upper()=='STDOUT': r=sys.stdout r.write(','.join(fieldnames)+'\n') for l in result: r.write(str(l)+'\n') else: r=csvReporter.csvReporter(filename) r.writeRow(fieldnames) r.writeRows(result)
def plotSumX_Time(self,rawdata={},resultlines=[],minTime=None,maxTime=None,nticks=6,yscale='linear',withannotation=False,referenceLabel='Delivered',labels=['Delivered','Recorded'],textoutput=None): ''' input: rawdata = {'Delivered':[(runnumber,starttimestamp,stoptimestamp,lumiperrun)],'Recorded':[(runnumber,starttimestamp,stoptimestamp,lumiperrun)]} resultlines = [[runnumber,starttimestampStr,stoptimestampStr,dellumiperrun,reclumiperrun],[runnumber,starttimestampStr,stoptimestampStr,dellumiperrun,reclumiperrun],] minTime (python DateTime) : min *begin* time to draw: format %m/%d/%y %H:%M:%S maxTime (python DateTime): max *begin* time to draw %m/%d/%y %H:%M:%S yscale: linear,log or both withannotation: wheather the boundary points should be annotated referenceLabel: the one variable that decides the total unit and the plot x-axis range labels: labels of the variables to plot ''' xpoints=[] ypoints={} ytotal={} lut=lumiTime.lumiTime() if not minTime: minTime='03/01/10 00:00:00' minTime=lut.StrToDatetime(minTime,customfm='%m/%d/%y %H:%M:%S') if not maxTime: maxTime=datetime.datetime.utcnow() else: maxTime=lut.StrToDatetime(maxTime,customfm='%m/%d/%y %H:%M:%S') for r in resultlines: runnumber=int(r[0]) starttimeStr=r[1].split('.')[0] starttime=lut.StrToDatetime(starttimeStr,customfm='%Y-%m-%d %H:%M:%S') stoptimeStr=r[2].split('.')[0] stoptime=lut.StrToDatetime(stoptimeStr,customfm='%Y-%m-%d %H:%M:%S') if rawdata and runnumber in [t[0] for t in rawdata[referenceLabel]]:continue if starttime<minTime:continue if starttime>maxTime:continue for i,lab in enumerate(labels): v=float(r[-(len(labels)-i)]) rawdata.setdefault(lab,[]).append((runnumber,starttime,stoptime,v)) if not rawdata: print '[WARNING]: no data, do nothing' return tot=sum([t[3] for t in rawdata[referenceLabel]]) (unitstring,denomitor)=guessLumiUnit(tot) csvreport=None rows=[] flat=[] for label,yvalues in rawdata.items(): yvalues.sort() flat.append([t[3] for t in yvalues]) if label==referenceLabel: minTime=yvalues[0][1] maxTime=yvalues[-1][1] ypoints[label]=[] lumivals=[t[3] for t in yvalues] for i,val in enumerate(lumivals): ypoints[label].append(sum(lumivals[0:i+1])/denomitor) ytotal[label]=sum(lumivals)/denomitor xpoints=[matplotlib.dates.date2num(t[1]) for t in rawdata[referenceLabel]] ax=self.__fig.add_subplot(111) ax.set_yscale(yscale) yearStrMin=minTime.strftime('%Y') yearStrMax=maxTime.strftime('%Y') if yearStrMin==yearStrMax: dateFmt=matplotlib.dates.DateFormatter('%d/%m') else: dateFmt=matplotlib.dates.DateFormatter('%d/%m/%y') majorLoc=matplotlib.ticker.LinearLocator(numticks=nticks) ax.xaxis.set_major_locator(majorLoc) minorLoc=matplotlib.ticker.LinearLocator(numticks=nticks*4) ax.xaxis.set_major_formatter(dateFmt) ax.set_xlabel(r'Date',position=(0.84,0)) ax.set_ylabel(r'L '+unitstring,position=(0,0.9)) ax.xaxis.set_minor_locator(minorLoc) ax.set_xbound(lower=xpoints[0],upper=xpoints[-1]) xticklabels=ax.get_xticklabels() for tx in xticklabels: tx.set_horizontalalignment('left') ax.grid(True) keylist=ypoints.keys() keylist.sort() keylist.insert(0,keylist.pop(keylist.index(referenceLabel)))#move refereceLabel to front from now on legendlist=[] head=['#Run','StartTime','StopTime'] textsummaryhead=['#TotalRun'] textsummaryline=['#'+str(len(xpoints))] for ylabel in keylist: cl='k' if self.colormap.has_key(ylabel): cl=self.colormap[ylabel] ax.plot(xpoints,ypoints[ylabel],label=ylabel,color=cl,drawstyle='steps') legendlist.append(ylabel+' '+'%.3f'%(ytotal[ylabel])+' '+unitstring) textsummaryhead.append('Total'+ylabel) textsummaryline.append('%.3f'%(ytotal[ylabel])+' '+unitstring) head.append(ylabel) if textoutput: csvreport=csvReporter.csvReporter(textoutput) csvreport.writeRow(head) allruns=[int(t[0]) for t in rawdata[referenceLabel]] allstarts=[ lut.DatetimeToStr(t[1],customfm='%Y-%m-%d %H:%M:%S') for t in rawdata[referenceLabel] ] allstops=[ lut.DatetimeToStr(t[2],customfm='%Y-%m-%d %H:%M:%S') for t in rawdata[referenceLabel] ] flat.insert(0,allruns) flat.insert(1,allstarts) flat.insert(2,allstops) rows=zip(*flat) csvreport.writeRows([list(t) for t in rows]) csvreport.writeRow(textsummaryhead) csvreport.writeRow(textsummaryline) #annotations trans=matplotlib.transforms.BlendedGenericTransform(ax.transData,ax.transAxes) #print 'run boundary ',runs[0],runs[-1] #print 'xpoints boundary ',xpoints[0],xpoints[-1] #annotation if withannotation: runs=[t[0] for t in rawdata[referenceLabel]] ax.text(xpoints[0],1.025,str(runs[0]),transform=trans,horizontalalignment='left',size='x-small',color='green',bbox=dict(facecolor='white')) ax.text(xpoints[-1],1.025,str(runs[-1]),transform=trans,horizontalalignment='left',size='x-small',color='green',bbox=dict(facecolor='white')) if yearStrMin==yearStrMax: firsttimeStr=rawdata[referenceLabel][1][1].strftime('%b %d %H:%M') #time range(start) in the title is the first run beg time lasttimeStr=rawdata[referenceLabel][-1][2].strftime('%b %d %H:%M') #time range(stop) in the tile is the last run stop time #firstimeStr=minTime.strftime('%b %d %H:%M') #lasttimeStr=maxTime.strftime('%b %d %H:%M') #ax.set_title('CMS Total Integrated Luminosity '+yearStrMin+' ('+firstimeStr+' - '+lasttimeStr+' UTC)',size='small',family='fantasy') ax.set_title('CMS Total Integrated Luminosity '+yearStrMin+' ('+firsttimeStr+' - '+lasttimeStr+' UTC)',size='small') else: #ax.set_title('CMS Total Integrated Luminosity '+yearStrMin+'-'+yearStrMax,size='small',family='fantasy') ax.set_title('CMS Total Integrated Luminosity '+yearStrMin+'-'+yearStrMax,size='small') ax.legend(tuple(legendlist),loc='upper left') ax.autoscale_view(tight=True,scalex=True,scaley=False) self.__fig.autofmt_xdate(bottom=0.18,rotation=15,ha='right') self.__fig.subplots_adjust(bottom=0.2,left=0.15)
def plotSumX_Run(self,rawdata={},resultlines=[],minRun=None,maxRun=None,nticks=6,yscale='linear',withannotation=False,referenceLabel='Delivered',labels=['Delivered','Recorded'],textoutput=None): ''' input: rawdata = {'Delivered':[(runnumber,lumiperrun),..],'Recorded':[(runnumber,lumiperrun),..]} resultlines = [[runnumber,dellumiperrun,reclumiperrun],[runnumber,dellumiperrun,reclumiperrun],] minRun : minimal runnumber required maxRun : max runnumber required yscale: linear,log or both withannotation: wheather the boundary points should be annotated referenceLabel: the one variable that decides the total unit and the plot x-axis range labels: labels of the variables to plot textoutput: text output file name. ''' ypoints={} ytotal={} for r in resultlines:#parse old text data runnumber=int(r[0]) if rawdata and runnumber in [t[0] for t in rawdata[referenceLabel]]:continue#use text input only if not in selected data if minRun and runnumber<minRun: continue if maxRun and runnumber>maxRun: continue for i,lab in enumerate(labels) : v=float(r[-(len(labels)-i)-1])#the values to plot are always the last n fields rawdata.setdefault(lab,[]).append((runnumber,v)) if not rawdata: print '[WARNING]: no data to plot , exit' return tot=sum([t[1] for t in rawdata[referenceLabel]]) (unitstring,denomitor)=guessLumiUnit(tot) csvreport=None rows=[] flat=[] for label,yvalues in rawdata.items(): yvalues.sort() flat.append([t[1] for t in yvalues]) ypoints[label]=[] ytotal[label]=0.0 lumivals=[t[1] for t in yvalues] for i,val in enumerate(lumivals): ypoints[label].append(sum(lumivals[0:i+1])/denomitor)#integrated lumi ytotal[label]=sum(lumivals)/denomitor xpoints=[t[0] for t in rawdata[referenceLabel]] ax=self.__fig.add_subplot(111) if yscale=='linear': ax.set_yscale('linear') elif yscale=='log': ax.set_yscale('log') else: raise 'unsupported yscale ',yscale ax.set_xlabel(r'Run',position=(0.95,0)) ax.set_ylabel(r'L '+unitstring,position=(0,0.9)) xticklabels=ax.get_xticklabels() for tx in xticklabels: tx.set_rotation(30) majorLocator=matplotlib.ticker.LinearLocator( nticks ) majorFormatter=matplotlib.ticker.FormatStrFormatter('%d') minorLocator=matplotlib.ticker.LinearLocator(numticks=4*nticks) ax.xaxis.set_major_locator(majorLocator) ax.xaxis.set_major_formatter(majorFormatter) ax.xaxis.set_minor_locator(minorLocator) ax.set_xbound(lower=xpoints[0],upper=xpoints[-1]) ax.grid(True) keylist=ypoints.keys() keylist.sort() keylist.insert(0,keylist.pop(keylist.index(referenceLabel)))#move refereceLabel to front from now on legendlist=[] head=['#Run'] textsummaryhead=['#TotalRun'] textsummaryline=['#'+str(len(xpoints))] for ylabel in keylist: cl='k' if self.colormap.has_key(ylabel): cl=self.colormap[ylabel] ax.plot(xpoints,ypoints[ylabel],label=ylabel,color=cl,drawstyle='steps') legendlist.append(ylabel+' '+'%.3f'%(ytotal[ylabel])+' '+unitstring) textsummaryhead.append('Total'+ylabel) textsummaryline.append('%.3f'%(ytotal[ylabel])+' '+unitstring) head.append(ylabel) if textoutput: csvreport=csvReporter.csvReporter(textoutput) csvreport.writeRow(head) allruns=[int(t[0]) for t in rawdata[referenceLabel]] flat.insert(0,allruns) rows=zip(*flat) csvreport.writeRows([list(t) for t in rows]) csvreport.writeRow(textsummaryhead) csvreport.writeRow(textsummaryline) #font=FontProperties(size='medium',weight='demibold') #legend ax.legend(tuple(legendlist),loc='upper left') #adjust self.__fig.subplots_adjust(bottom=0.18,left=0.1) #annotations if withannotation: trans=matplotlib.transforms.BlendedGenericTransform(ax.transData,ax.transAxes) ax.text(xpoints[0],1.025,str(xpoints[0]),transform=trans,horizontalalignment='left',size='x-small',color='green',bbox=dict(facecolor='white')) ax.text(xpoints[-1],1.025,str(xpoints[-1]),transform=trans,horizontalalignment='left',size='x-small',color='green',bbox=dict(facecolor='white'))
def plotSumX_Fill(self,rawdata={},resultlines=[],minFill=None,maxFill=None,nticks=6,yscale='linear',withannotation=False,referenceLabel='Delivered',labels=['Delivered','Recorded'],textoutput=None): ''' input: rawdata = {'Delivered':[(fill,runnumber,lumiperrun)],'Recorded':[(fill,runnumber,lumiperrun)]} resultlines = [[fillnumber,runnumber,dellumiperrun,reclumiperrun],[fillnumber,runnumber,dellumiperrun,reclumiperrun],] minFill : min fill to draw maxFill : max fill to draw yscale: linear,log or both withannotation: wheather the boundary points should be annotated textoutput: text output file name. ''' ytotal={} ypoints={} for r in resultlines: #parse old text data fillnum=int(r[0]) runnum=int(r[1]) if rawdata and (fillnum,runnum) in [(t[0],t[1]) for t in rawdata[referenceLabel]]:continue if minFill and fillnum<minFill:continue if maxFill and fillnum>maxFill:continue for i,lab in enumerate(labels) : v=float(r[-(len(labels)-i)])#the values to plot are always the last n fields rawdata.setdefault(lab,[]).append((fillnum,runnum,v)) #print 'fillrunDict ',fillrunDict if not rawdata: print '[WARNING]: no data, do nothing' return tot=sum([t[2] for t in rawdata[referenceLabel]]) beginfo='' endinfo='' (unitstring,denomitor)=guessLumiUnit(tot) csvreport=None rows=[] flat=[] for label,yvalues in rawdata.items(): yvalues.sort() flat.append([t[2] for t in yvalues]) ypoints[label]=[] ytotal[label]=0.0 lumivals=[t[2] for t in yvalues] for i,val in enumerate(lumivals): ypoints[label].append(sum(lumivals[0:i+1])/denomitor) ytotal[label]=sum(lumivals)/denomitor xpoints=[t[0] for t in rawdata[referenceLabel]]#after sort ax=self.__fig.add_subplot(111) ax.set_xlabel(r'LHC Fill Number',position=(0.84,0)) ax.set_ylabel(r'L '+unitstring,position=(0,0.9)) ax.set_xbound(lower=xpoints[0],upper=xpoints[-1]) if yscale=='linear': ax.set_yscale('linear') elif yscale=='log': ax.set_yscale('log') else: raise 'unsupported yscale ',yscale xticklabels=ax.get_xticklabels() majorLocator=matplotlib.ticker.LinearLocator( nticks ) majorFormatter=matplotlib.ticker.FormatStrFormatter('%d') #minorLocator=matplotlib.ticker.MultipleLocator(sampleinterval) ax.xaxis.set_major_locator(majorLocator) ax.xaxis.set_major_formatter(majorFormatter) #ax.xaxis.set_minor_locator(minorLocator) ax.grid(True) keylist=ypoints.keys() keylist.sort() keylist.insert(0,keylist.pop(keylist.index(referenceLabel)))#move refereceLabel to front from now on legendlist=[] head=['#fill','run'] textsummaryhead=['#TotalFill'] textsummaryline=['#'+str(len(xpoints))] for ylabel in keylist: cl='k' if self.colormap.has_key(ylabel): cl=self.colormap[ylabel] ax.plot(xpoints,ypoints[ylabel],label=ylabel,color=cl,drawstyle='steps') legendlist.append(ylabel+' '+'%.3f'%(ytotal[ylabel])+' '+unitstring) textsummaryhead.append('Total'+ylabel) textsummaryline.append('%.3f'%(ytotal[ylabel])+' '+unitstring) head.append(ylabel) if textoutput: csvreport=csvReporter.csvReporter(textoutput) allfills=[int(t[0]) for t in rawdata[referenceLabel]] allruns=[int(t[1]) for t in rawdata[referenceLabel]] flat.insert(0,allfills) flat.insert(1,allruns) rows=zip(*flat) csvreport.writeRow(head) csvreport.writeRows([list(t) for t in rows]) csvreport.writeRow(textsummaryhead) csvreport.writeRow(textsummaryline) #font=FontProperties(size='medium',weight='demibold') #annotations if withannotation: trans=matplotlib.transforms.BlendedGenericTransform(ax.transData,ax.transAxes) ax.text(xpoints[0],1.025,beginfo,transform=trans,horizontalalignment='left',size='x-small',color='green',bbox=dict(facecolor='white')) ax.text(xpoints[-1],1.025,endinfo,transform=trans,horizontalalignment='left',size='x-small',color='green',bbox=dict(facecolor='white')) #legend ax.legend(tuple(legendlist),loc='upper left') #adjust self.__fig.subplots_adjust(bottom=0.1,left=0.1)
def main(): allowedscales=['linear','log','both'] c=constants() parser = argparse.ArgumentParser(prog=os.path.basename(sys.argv[0]),description="Plot integrated luminosity as function of the time variable of choice",formatter_class=argparse.ArgumentDefaultsHelpFormatter) # add required arguments parser.add_argument('-c',dest='connect',action='store',required=True,help='connect string to lumiDB') # add optional arguments parser.add_argument('-P',dest='authpath',action='store',help='path to authentication file') parser.add_argument('-n',dest='normfactor',action='store',help='normalization factor (optional, default to 1.0)') parser.add_argument('-i',dest='inputfile',action='store',help='lumi range selection file (optional)') parser.add_argument('-o',dest='outputfile',action='store',help='csv outputfile name (optional)') parser.add_argument('-lumiversion',dest='lumiversion',default='0001',action='store',required=False,help='lumi data version') parser.add_argument('-begin',dest='begin',action='store',help='begin value of x-axi (required)') parser.add_argument('-end',dest='end',action='store',help='end value of x-axi (optional). Default to the maximum exists DB') parser.add_argument('-beamenergy',dest='beamenergy',action='store',type=float,required=False,help='beamenergy (in GeV) selection criteria,e.g. 3.5e3') parser.add_argument('-beamfluctuation',dest='beamfluctuation',action='store',type=float,required=False,help='allowed fraction of beamenergy to fluctuate, e.g. 0.1') parser.add_argument('-beamstatus',dest='beamstatus',action='store',required=False,help='selection criteria beam status,e.g. STABLE BEAMS') parser.add_argument('-yscale',dest='yscale',action='store',required=False,default='linear',help='y_scale') parser.add_argument('-hltpath',dest='hltpath',action='store',help='specific hltpath to calculate the recorded luminosity. If specified aoverlays the recorded luminosity for the hltpath on the plot') parser.add_argument('-batch',dest='batch',action='store',help='graphical mode to produce PNG file. Specify graphical file here. Default to lumiSum.png') parser.add_argument('--annotateboundary',dest='annotateboundary',action='store_true',help='annotate boundary run numbers') parser.add_argument('--interactive',dest='interactive',action='store_true',help='graphical mode to draw plot in a TK pannel.') parser.add_argument('-timeformat',dest='timeformat',action='store',help='specific python timeformat string (optional). Default mm/dd/yy hh:min:ss.00') parser.add_argument('-siteconfpath',dest='siteconfpath',action='store',help='specific path to site-local-config.xml file, default to $CMS_PATH/SITECONF/local/JobConfig, if path undefined, fallback to cern proxy&server') parser.add_argument('action',choices=['run','fill','time','perday'],help='x-axis data type of choice') #graphical mode options parser.add_argument('--with-correction',dest='withFineCorrection',action='store_true',help='with fine correction') parser.add_argument('--verbose',dest='verbose',action='store_true',help='verbose mode, print result also to screen') parser.add_argument('--debug',dest='debug',action='store_true',help='debug') # parse arguments batchmode=True args=parser.parse_args() connectstring=args.connect begvalue=args.begin endvalue=args.end beamstatus=args.beamstatus beamenergy=args.beamenergy beamfluctuation=args.beamfluctuation xaxitype='run' connectparser=connectstrParser.connectstrParser(connectstring) connectparser.parse() usedefaultfrontierconfig=False cacheconfigpath='' if connectparser.needsitelocalinfo(): if not args.siteconfpath: cacheconfigpath=os.environ['CMS_PATH'] if cacheconfigpath: cacheconfigpath=os.path.join(cacheconfigpath,'SITECONF','local','JobConfig','site-local-config.xml') else: usedefaultfrontierconfig=True else: cacheconfigpath=args.siteconfpath cacheconfigpath=os.path.join(cacheconfigpath,'site-local-config.xml') p=cacheconfigParser.cacheconfigParser() if usedefaultfrontierconfig: p.parseString(c.defaultfrontierConfigString) else: p.parse(cacheconfigpath) connectstring=connectparser.fullfrontierStr(connectparser.schemaname(),p.parameterdict()) #print 'connectstring',connectstring runnumber=0 svc = coral.ConnectionService() hltpath='' if args.hltpath: hltpath=args.hltpath if args.debug : msg=coral.MessageStream('') msg.setMsgVerbosity(coral.message_Level_Debug) ifilename='' ofilename='integratedlumi.png' timeformat='' if args.authpath and len(args.authpath)!=0: os.environ['CORAL_AUTH_PATH']=args.authpath if args.normfactor: c.NORM=float(args.normfactor) if args.lumiversion: c.LUMIVERSION=args.lumiversion if args.verbose: c.VERBOSE=True if args.inputfile: ifilename=args.inputfile if args.batch: opicname=args.batch if args.outputfile: ofilename=args.outputfile if args.timeformat: timeformat=args.timeformat session=svc.connect(connectstring,accessMode=coral.access_Update) session.typeConverter().setCppTypeForSqlType("unsigned int","NUMBER(10)") session.typeConverter().setCppTypeForSqlType("unsigned long long","NUMBER(20)") inputfilecontent='' fileparsingResult='' runList=[] runDict={} fillDict={} selectionDict={} minTime='' maxTime='' startRunTime='' stopRunTime='' #if len(ifilename)!=0 : # f=open(ifilename,'r') # inputfilecontent=f.read() # sparser=selectionParser.selectionParser(inputfilecontent) # runsandls=sparser.runsandls() # keylist=runsandls.keys() # keylist.sort() # for run in keylist: # if selectionDict.has_key(run): # lslist=runsandls[run] # lslist.sort() # selectionDict[run]=lslist if len(ifilename)!=0: ifparser=inputFilesetParser.inputFilesetParser(ifilename) runsandls=ifparser.runsandls() keylist=runsandls.keys() keylist.sort() for run in keylist: if not selectionDict.has_key(run): lslist=runsandls[run] lslist.sort() selectionDict[run]=lslist if args.action == 'run': if not args.end: session.transaction().start(True) schema=session.nominalSchema() lastrun=max(lumiQueryAPI.allruns(schema,requireRunsummary=True,requireLumisummary=True,requireTrg=True,requireHlt=False)) session.transaction().commit() else: lastrun=int(args.end) for r in range(int(args.begin),lastrun+1): runList.append(r) runList.sort() elif args.action == 'fill': session.transaction().start(True) maxfill=None if not args.end: qHandle=session.nominalSchema().newQuery() maxfill=max(lumiQueryAPI.allfills(qHandle,filtercrazy=True)) del qHandle else: maxfill=int(args.end) qHandle=session.nominalSchema().newQuery() fillDict=lumiQueryAPI.runsByfillrange(qHandle,int(args.begin),maxfill) del qHandle session.transaction().commit() #print 'fillDict ',fillDict for fill in range(int(args.begin),maxfill+1): if fillDict.has_key(fill): #fill exists for run in fillDict[fill]: runList.append(run) runList.sort() elif args.action == 'time' or args.action == 'perday': session.transaction().start(True) t=lumiTime.lumiTime() minTime=t.StrToDatetime(args.begin,timeformat) if not args.end: maxTime=datetime.datetime.utcnow() #to now else: maxTime=t.StrToDatetime(args.end,timeformat) #print minTime,maxTime qHandle=session.nominalSchema().newQuery() runDict=lumiQueryAPI.runsByTimerange(qHandle,minTime,maxTime)#xrawdata session.transaction().commit() runList=runDict.keys() del qHandle runList.sort() if len(runList)!=0: runmin=min(runList) runmax=max(runList) startRunTime=runDict[runmin][0] stopRunTime=runDict[runmax][1] #print 'run list: ',runDict else: print 'unsupported action ',args.action exit finecorrections=None if args.withFineCorrection: schema=session.nominalSchema() session.transaction().start(True) finecorrections=lumiCorrections.correctionsForRange(schema,runList) session.transaction().commit() #print 'runList ',runList #print 'runDict ', runDict fig=Figure(figsize=(7.2,5.4),dpi=120) m=matplotRender.matplotRender(fig) logfig=Figure(figsize=(7.5,5.7),dpi=135) mlog=matplotRender.matplotRender(logfig) if args.action == 'run': result={} result=getLumiInfoForRuns(session,c,runList,selectionDict,hltpath,beamstatus=beamstatus,beamenergy=beamenergy,beamfluctuation=beamfluctuation,finecorrections=finecorrections) xdata=[] ydata={} ydata['Delivered']=[] ydata['Recorded']=[] keylist=result.keys() keylist.sort() #must be sorted in order if args.outputfile: reporter=csvReporter.csvReporter(ofilename) fieldnames=['run','delivered','recorded'] reporter.writeRow(fieldnames) for run in keylist: xdata.append(run) delivered=result[run][0] recorded=result[run][1] ydata['Delivered'].append(delivered) ydata['Recorded'].append(recorded) if args.outputfile and (delivered!=0 or recorded!=0): reporter.writeRow([run,result[run][0],result[run][1]]) m.plotSumX_Run(xdata,ydata,yscale='linear') mlog.plotSumX_Run(xdata,ydata,yscale='log') elif args.action == 'fill': lumiDict={} lumiDict=getLumiInfoForRuns(session,c,runList,selectionDict,hltpath,beamstatus=beamstatus,beamenergy=beamenergy,beamfluctuation=beamfluctuation,finecorrections=finecorrections) xdata=[] ydata={} ydata['Delivered']=[] ydata['Recorded']=[] #keylist=lumiDict.keys() #keylist.sort() if args.outputfile: reporter=csvReporter.csvReporter(ofilename) fieldnames=['fill','run','delivered','recorded'] reporter.writeRow(fieldnames) fills=fillDict.keys() fills.sort() for fill in fills: runs=fillDict[fill] runs.sort() for run in runs: xdata.append(run) ydata['Delivered'].append(lumiDict[run][0]) ydata['Recorded'].append(lumiDict[run][1]) if args.outputfile : reporter.writeRow([fill,run,lumiDict[run][0],lumiDict[run][1]]) #print 'input fillDict ',len(fillDict.keys()),fillDict m.plotSumX_Fill(xdata,ydata,fillDict,yscale='linear') mlog.plotSumX_Fill(xdata,ydata,fillDict,yscale='log') elif args.action == 'time' : lumiDict={} lumiDict=getLumiInfoForRuns(session,c,runList,selectionDict,hltpath,beamstatus=beamstatus,beamenergy=beamenergy,beamfluctuation=beamfluctuation,finecorrections=finecorrections) #lumiDict=getLumiInfoForRuns(session,c,runList,selectionDict,hltpath,beamstatus='STABLE BEAMS') xdata={}#{run:[starttime,stoptime]} ydata={} ydata['Delivered']=[] ydata['Recorded']=[] keylist=lumiDict.keys() keylist.sort() if args.outputfile: reporter=csvReporter.csvReporter(ofilename) fieldnames=['run','starttime','stoptime','delivered','recorded'] reporter.writeRow(fieldnames) for run in keylist: ydata['Delivered'].append(lumiDict[run][0]) ydata['Recorded'].append(lumiDict[run][1]) starttime=runDict[run][0] stoptime=runDict[run][1] xdata[run]=[starttime,stoptime] if args.outputfile : reporter.writeRow([run,starttime,stoptime,lumiDict[run][0],lumiDict[run][1]]) m.plotSumX_Time(xdata,ydata,startRunTime,stopRunTime,hltpath=hltpath,annotateBoundaryRunnum=args.annotateboundary,yscale='linear') mlog.plotSumX_Time(xdata,ydata,startRunTime,stopRunTime,hltpath=hltpath,annotateBoundaryRunnum=args.annotateboundary,yscale='log') elif args.action == 'perday': daydict={}#{day:[[run,cmslsnum,lsstarttime,delivered,recorded]]} lumibyls=getLumiOrderByLS(session,c,runList,selectionDict,hltpath,beamstatus=beamstatus,beamenergy=beamenergy,beamfluctuation=beamfluctuation,finecorrections=finecorrections) #lumibyls [[runnumber,runstarttime,lsnum,lsstarttime,delivered,recorded,recordedinpath]] if args.outputfile: reporter=csvReporter.csvReporter(ofilename) fieldnames=['day','begrunls','endrunls','delivered','recorded'] reporter.writeRow(fieldnames) beginfo=[lumibyls[0][3],str(lumibyls[0][0])+':'+str(lumibyls[0][2])] endinfo=[lumibyls[-1][3],str(lumibyls[-1][0])+':'+str(lumibyls[-1][2])] for perlsdata in lumibyls: lsstarttime=perlsdata[3] delivered=perlsdata[4] recorded=perlsdata[5] day=lsstarttime.toordinal() if not daydict.has_key(day): daydict[day]=[] daydict[day].append([delivered,recorded]) days=daydict.keys() days.sort() daymin=days[0] daymax=days[-1] #alldays=range(daymin,daymax+1) resultbyday={} resultbyday['Delivered']=[] resultbyday['Recorded']=[] #for day in days: #print 'day min ',daymin #print 'day max ',daymax for day in range(daymin,daymax+1): if not daydict.has_key(day): delivered=0.0 recorded=0.0 else: daydata=daydict[day] mytransposed=CommonUtil.transposed(daydata,defaultval=0.0) delivered=sum(mytransposed[0]) recorded=sum(mytransposed[1]) resultbyday['Delivered'].append(delivered) resultbyday['Recorded'].append(recorded) if args.outputfile: reporter.writeRow([day,beginfo[1],endinfo[1],delivered,recorded]) #print 'beginfo ',beginfo #print 'endinfo ',endinfo #print resultbyday m.plotPerdayX_Time( range(daymin,daymax+1) ,resultbyday,startRunTime,stopRunTime,boundaryInfo=[beginfo,endinfo],annotateBoundaryRunnum=args.annotateboundary,yscale='linear') mlog.plotPerdayX_Time( range(daymin,daymax+1),resultbyday,startRunTime,stopRunTime,boundaryInfo=[beginfo,endinfo],annotateBoundaryRunnum=args.annotateboundary,yscale='log') else: raise Exception,'must specify the type of x-axi' del session del svc if args.batch and args.yscale=='linear': m.drawPNG(args.batch) elif args.batch and args.yscale=='log': mlog.drawPNG(args.batch) elif args.batch and args.yscale=='both': m.drawPNG(args.batch) basename,extension=os.path.splitext(args.batch) logfilename=basename+'_log'+extension mlog.drawPNG(logfilename) else: if not args.interactive: return if args.interactive is True and args.yscale=='linear': m.drawInteractive() elif args.interactive is True and args.yscale=='log': mlog.drawInteractive() else: raise Exception('unsupported yscale for interactive mode : '+args.yscale)
def plotPeakPerday_Time(self,rawdata={},resultlines=[],minTime=None,maxTime=None,nticks=6,withannotation=False,yscale='linear',referenceLabel='Delivered',labels=['Delivered'],textoutput=None): ''' THIS PLOT IS DELIVERED ONLY Input: rawdata={'Delivered':[(day,run,ls,instlumi)]} resultlines=[[day,run,ls,maxinstlum],[]] minTime (python DateTime) : min *begin* time to draw: format %m/%d/%y %H:%M:%S maxTime (python DateTime): max *begin* time to draw %m/%d/%y %H:%M:%S withannotation: wheather the boundary points should be annotated referenceLabel: the one variable that decides the total unit and the plot x-axis range labels: labels of the variables to plot ''' xpoints=[] ypoints={} legendlist=[] maxinfo='' ymax={} lut=lumiTime.lumiTime() if not minTime: minTime='03/01/10 00:00:00' minTime=lut.StrToDatetime(minTime,customfm='%m/%d/%y %H:%M:%S') if not maxTime: maxTime=datetime.datetime.utcnow() else: maxTime=lut.StrToDatetime(maxTime,customfm='%m/%d/%y %H:%M:%S') for r in resultlines: day=int(r[0]) runnumber=int(r[1]) lsnum=int(r[2].split('.')[0]) if rawdata and day in [int(t[0]) for t in rawdata[referenceLabel]]:continue if day < minTime.date().toordinal():continue if day > maxTime.date().toordinal():continue for i,lab in enumerate(labels): v=float(r[-(len(labels)-i)-1]) rawdata.setdefault(lab,[]).append((day,runnumber,lsnum,v)) if not rawdata: print '[WARNING]: no data, do nothing' return maxlum=max([t[3] for t in rawdata[referenceLabel]]) minlum=min([t[3] for t in rawdata[referenceLabel] if t[3]>0]) #used only for log scale, fin the non-zero bottom (unitstring,denomitor)=guessInstLumiUnit(maxlum) csvreport=None rows=[] flat=[] MinDay=minTime.date().toordinal() MaxDay=maxTime.date().toordinal() fulldays=range(MinDay,MaxDay+1) for label in rawdata.keys(): yvalues=rawdata[label] yvalues.sort()#sort by day alldays=[t[0] for t in yvalues] alldates=[str(datetime.date.fromordinal(t)) for t in alldays] ypoints[label]=[] lumivals=[t[3] for t in yvalues] flat.append(lumivals) for d in fulldays: if not d in alldays: ypoints[label].append(0.0) else: thisdaylumi=[t[3] for t in yvalues if t[0]==d][0] if yscale=='log': if thisdaylumi<minlum: thisdaylumi=minlum/denomitor else: thisdaylumi=thisdaylumi/denomitor else: thisdaylumi=thisdaylumi/denomitor ypoints[label].append(thisdaylumi) ymax[label]=max(lumivals)/denomitor 'ymax ',max(lumivals) xpoints=fulldays if textoutput: csvreport=csvReporter.csvReporter(textoutput) head=['#day','run','lsnum','maxinstlumi','date'] csvreport.writeRow(head) flat.insert(0,alldays) allruns=[ t[1] for t in rawdata[referenceLabel]] allls=[ t[2] for t in rawdata[referenceLabel]] flat.insert(1,allruns) flat.insert(2,allls) flat.append(alldates) rows=zip(*flat) csvreport.writeRows([list(t) for t in rows]) yearStrMin=minTime.strftime('%Y') yearStrMax=maxTime.strftime('%Y') if yearStrMin==yearStrMax: dateFmt=matplotlib.dates.DateFormatter('%d/%m') else: dateFmt=matplotlib.dates.DateFormatter('%d/%m/%y') ax=self.__fig.add_subplot(111) if yscale=='linear': ax.set_yscale('linear') elif yscale=='log': ax.set_yscale('log') else: raise 'unsupported yscale ',yscale majorLoc=matplotlib.ticker.LinearLocator(numticks=nticks) minorLoc=matplotlib.ticker.LinearLocator(numticks=nticks*4) ax.xaxis.set_major_formatter(dateFmt) ax.set_xlabel(r'Date',position=(0.84,0)) ax.set_ylabel(r'L '+unitstring,position=(0,0.9)) ax.xaxis.set_major_locator(majorLoc) ax.xaxis.set_minor_locator(minorLoc) xticklabels=ax.get_xticklabels() for tx in xticklabels: tx.set_horizontalalignment('right') ax.grid(True) cl=self.colormap['Max Inst'] textsummaryhead=['#TotalRunningDays'] textsummaryline=['#'+str(len(alldays))] for ylabel in labels: cl='k' if self.colormap.has_key(ylabel): cl=self.colormap[ylabel] ax.plot(xpoints,ypoints[ylabel],label='Max Inst',color=cl,drawstyle='steps') legendlist.append('Max Inst %.3f'%(ymax[ylabel])+' '+unitstring) textsummaryhead.append('Max Inst'+ylabel) textsummaryline.append('%.3f'%(ymax[ylabel])+' '+unitstring) if textoutput: csvreport.writeRow(textsummaryhead) csvreport.writeRow(textsummaryline) ax.legend(tuple(legendlist),loc='upper left') ax.set_xbound(lower=matplotlib.dates.date2num(minTime),upper=matplotlib.dates.date2num(maxTime)) if withannotation: #annotations trans=matplotlib.transforms.BlendedGenericTransform(ax.transData,ax.transAxes) ax.text(xpoints[0],1.025,beginfo,transform=trans,horizontalalignment='left',size='x-small',color='green',bbox=dict(facecolor='white')) ax.text(xpoints[-1],1.025,endinfo,transform=trans,horizontalalignment='left',size='x-small',color='green',bbox=dict(facecolor='white')) ax.annotate(maxinfo,xy=(xmax,ymax),xycoords='data',xytext=(0,13),textcoords='offset points',arrowprops=dict(facecolor='green',shrink=0.05),size='x-small',horizontalalignment='center',color='green',bbox=dict(facecolor='white')) firstday=datetime.date.fromordinal(rawdata[referenceLabel][0][0]) lastday=datetime.date.fromordinal(rawdata[referenceLabel][-1][0]) firstdayStr=firstday.strftime('%Y %b %d') lastdayStr=lastday.strftime('%Y %b %d') ax.set_title('CMS Peak Luminosity/Day ('+firstdayStr+' - '+lastdayStr+')',size='small') #ax.autoscale(tight=True) ax.autoscale_view(tight=True,scalex=True,scaley=False) #ax.set_xmargin(0.015) self.__fig.autofmt_xdate(bottom=0.18,rotation=15,ha='right') self.__fig.subplots_adjust(bottom=0.2,left=0.15)
def main(): allowedscales=['linear','log','both'] c=constants() parser = argparse.ArgumentParser(prog=os.path.basename(sys.argv[0]),description="Plot integrated luminosity as function of the time variable of choice") # add required arguments parser.add_argument('-c',dest='connect',action='store',required=True,help='connect string to lumiDB') # add optional arguments parser.add_argument('-P',dest='authpath',action='store',help='path to authentication file') parser.add_argument('-n',dest='normfactor',action='store',help='normalization factor (optional, default to 1.0)') parser.add_argument('-i',dest='inputfile',action='store',help='lumi range selection file (optional)') parser.add_argument('-o',dest='outputfile',action='store',help='csv outputfile name (optional)') parser.add_argument('-b',dest='beammode',action='store',help='beam mode, optional, no default') parser.add_argument('-lumiversion',dest='lumiversion',action='store',help='lumi data version, optional for all, default 0001') parser.add_argument('-begin',dest='begin',action='store',help='begin xvalue (required)') parser.add_argument('-end',dest='end',action='store',help='end xvalue(optional). Default to the maximum exists DB') parser.add_argument('-batch',dest='batch',action='store',help='graphical mode to produce PNG file. Specify graphical file here. Default to lumiSum.png') parser.add_argument('-yscale',dest='yscale',action='store',required=False,default='linear',help='y_scale') parser.add_argument('--interactive',dest='interactive',action='store_true',help='graphical mode to draw plot in a TK pannel.') parser.add_argument('-timeformat',dest='timeformat',action='store',help='specific python timeformat string (optional). Default mm/dd/yy hh:min:ss.00') parser.add_argument('-siteconfpath',dest='siteconfpath',action='store',help='specific path to site-local-config.xml file, default to $CMS_PATH/SITECONF/local/JobConfig, if path undefined, fallback to cern proxy&server') parser.add_argument('action',choices=['peakperday','run'],help='plot type of choice') #graphical mode options parser.add_argument('--annotateboundary',dest='annotateboundary',action='store_true',help='annotate boundary run numbers') parser.add_argument('--verbose',dest='verbose',action='store_true',help='verbose mode, print result also to screen') parser.add_argument('--with-correction',dest='withFineCorrection',action='store_true',help='with fine correction') parser.add_argument('--debug',dest='debug',action='store_true',help='debug') # parse arguments args=parser.parse_args() connectstring=args.connect connectparser=connectstrParser.connectstrParser(connectstring) connectparser.parse() usedefaultfrontierconfig=False cacheconfigpath='' if connectparser.needsitelocalinfo(): if not args.siteconfpath: cacheconfigpath=os.environ['CMS_PATH'] if cacheconfigpath: cacheconfigpath=os.path.join(cacheconfigpath,'SITECONF','local','JobConfig','site-local-config.xml') else: usedefaultfrontierconfig=True else: cacheconfigpath=args.siteconfpath cacheconfigpath=os.path.join(cacheconfigpath,'site-local-config.xml') p=cacheconfigParser.cacheconfigParser() if usedefaultfrontierconfig: p.parseString(c.defaultfrontierConfigString) else: p.parse(cacheconfigpath) connectstring=connectparser.fullfrontierStr(connectparser.schemaname(),p.parameterdict()) runnumber=0 svc = coral.ConnectionService() if args.debug : msg=coral.MessageStream('') msg.setMsgVerbosity(coral.message_Level_Debug) ifilename='' ofilename='instlumi.csv' beammode='stable' timeformat='' selectionDict={} if args.authpath and len(args.authpath)!=0: os.environ['CORAL_AUTH_PATH']=args.authpath if args.normfactor: c.NORM=float(args.normfactor) if args.lumiversion: c.LUMIVERSION=args.lumiversion if args.beammode: c.BEAMMODE=args.beammode if args.verbose: c.VERBOSE=True if args.inputfile: ifilename=args.inputfile if args.batch: opicname=args.batch if args.outputfile: ofilename=args.outputfile if args.timeformat: timeformat=args.timeformat session=svc.connect(connectstring,accessMode=coral.access_Update) session.typeConverter().setCppTypeForSqlType("unsigned int","NUMBER(10)") session.typeConverter().setCppTypeForSqlType("unsigned long long","NUMBER(20)") startRunTime='' stopRunTime='' if ifilename: ifparser=inputFilesetParser(ifilename) runsandls=ifparser.runsandls() keylist=runsandls.keys() keylist.sort() for run in keylist: if selectionDict.has_key(run): lslist=runsandls[run] lslist.sort() selectionDict[run]=lslist if args.action == 'run': minRun=int(args.begin) if not args.end: maxRun=minRun else: maxRun=int(args.end) runList=range(minRun,maxRun+1) elif args.action == 'peakperday': session.transaction().start(True) t=lumiTime.lumiTime() minTime=t.StrToDatetime(args.begin,timeformat) if not args.end: maxTime=datetime.datetime.utcnow() #to now else: maxTime=t.StrToDatetime(args.end,timeformat) #print minTime,maxTime qHandle=session.nominalSchema().newQuery() runDict=lumiQueryAPI.runsByTimerange(qHandle,minTime,maxTime)#xrawdata session.transaction().commit() runList=runDict.keys() del qHandle runList.sort() if len(runList)!=0: runmin=min(runList) runmax=max(runList) startRunTime=runDict[runmin][0] stopRunTime=runDict[runmax][1] else: print 'unsupported action ',args.action exit #print 'runList ',runList #print 'runDict ', runDict finecorrections=None if args.withFineCorrection: schema=session.nominalSchema() session.transaction().start(True) finecorrections=lumiCorrections.correctionsForRange(schema,runList) session.transaction().commit() fig=Figure(figsize=(6,4.5),dpi=100) m=matplotRender.matplotRender(fig) logfig=Figure(figsize=(6,4.5),dpi=100) mlog=matplotRender.matplotRender(logfig) if args.action == 'peakperday': l=lumiTime.lumiTime() lumiperls=getInstLumiPerLS(session,c,runList,selectionDict,finecorrections=finecorrections) if args.outputfile: reporter=csvReporter.csvReporter(ofilename) fieldnames=['day','run','lsnum','maxinstlumi'] reporter.writeRow(fieldnames) #minDay=minTime.toordinal() #maxDay=maxTime.toordinal() daydict={}#{day:[[run,lsnum,instlumi]]} result={}#{day:[maxrun,maxlsnum,maxinstlumi]} for lsdata in lumiperls: runnumber=lsdata[0] lsnum=lsdata[1] runstarttimeStr=lsdata[-2]#note: it is a string!! startorbit=lsdata[5] deliveredInst=lsdata[2] lsstarttime=l.OrbitToTime(runstarttimeStr,startorbit) day=lsstarttime.toordinal() if not daydict.has_key(day): daydict[day]=[] daydict[day].append([runnumber,lsnum,deliveredInst]) days=daydict.keys() days.sort() for day in days: daydata=daydict[day] transposeddata=CommonUtil.transposed(daydata,defaultval=0.0) todaysmaxinst=max(transposeddata[2]) todaysmaxidx=transposeddata[2].index(todaysmaxinst) todaysmaxrun=transposeddata[0][todaysmaxidx] todaysmaxls=transposeddata[1][todaysmaxidx] result[day]=[todaysmaxrun,todaysmaxls,todaysmaxinst] if args.outputfile : reporter.writeRow([day,todaysmaxrun,todaysmaxls,todaysmaxinst]) m.plotPeakPerday_Time(result,startRunTime,stopRunTime,annotateBoundaryRunnum=args.annotateboundary,yscale='linear') mlog.plotPeakPerday_Time(result,startRunTime,stopRunTime,annotateBoundaryRunnum=args.annotateboundary,yscale='log') if args.action == 'run': runnumber=runList[0] if finecorrections and finecorrections[runnumber]: lumiperrun=getLumiPerRun(session,c,runnumber,finecorrections=finecorrections[runnumber])#[[lsnumber,deliveredInst,recordedInst,norbit,startorbit,fillnum,runstarttime,runstoptime]] else: lumiperrun=getLumiPerRun(session,c,runnumber) #print 'lumiperrun ',lumiperrun xdata=[]#[runnumber,fillnum,norbit,stattime,stoptime,totalls,ncmsls] ydata={}#{label:[instlumi]} ydata['Delivered']=[] ydata['Recorded']=[] norbit=lumiperrun[0][3] fillnum=lumiperrun[0][-3] starttime=lumiperrun[0][-2] stoptime=lumiperrun[0][-1] ncmsls=0 totalls=len(lumiperrun) for lsdata in lumiperrun: lsnumber=lsdata[0] if lsnumber!=0: ncmsls+=1 deliveredInst=lsdata[1] recordedInst=lsdata[2] ydata['Delivered'].append(deliveredInst) ydata['Recorded'].append(recordedInst) xdata=[runnumber,fillnum,norbit,starttime,stoptime,totalls,ncmsls] m.plotInst_RunLS(xdata,ydata) del session del svc if args.batch and args.yscale=='linear': m.drawPNG(args.batch) elif args.batch and args.yscale=='log': mlog.drawPNG(args.batch) elif args.batch and args.yscale=='both': m.drawPNG(args.batch) basename,extension=os.path.splitext(args.batch) logfilename=basename+'_log'+extension mlog.drawPNG(logfilename) else: raise Exception('unsupported yscale for batch mode : '+args.yscale) if not args.interactive: return if args.interactive is True and args.yscale=='linear': m.drawInteractive() elif args.interactive is True and args.yscale=='log': mlog.drawInteractive() else: raise Exception('unsupported yscale for interactive mode : '+args.yscale)