Esempio n. 1
0
def main():
    c=constants()
    connectstring='frontier://LumiProd/CMS_LUMI_PROD'
    connectparser=connectstrParser.connectstrParser(connectstring)
    connectparser.parse()
    usedefaultfrontierconfig=False
    cacheconfigpath=''
    if connectparser.needsitelocalinfo():
        cacheconfigpath=os.environ['CMS_PATH']
        if cacheconfigpath:
            cacheconfigpath=os.path.join(cacheconfigpath,'SITECONF','local','JobConfig','site-local-config.xml')
        else:
            usedefaultfrontierconfig=True
        p=cacheconfigParser.cacheconfigParser()
        if usedefaultfrontierconfig:
            p.parseString(c.defaultfrontierConfigString)
        else:
            p.parse(cacheconfigpath)
        connectstring=connectparser.fullfrontierStr(connectparser.schemaname(),p.parameterdict())
    svc = coral.ConnectionService()
    session=svc.connect(connectstring,accessMode=coral.access_Update)
    session.typeConverter().setCppTypeForSqlType("unsigned int","NUMBER(10)")
    session.typeConverter().setCppTypeForSqlType("unsigned long long","NUMBER(20)")
    deliveredLumiForRunSection (session,c,140182,2)
    del session
    del svc
Esempio n. 2
0
def main():


    # First of all we retrieve the arguments
    
    c=constants()
    parser = argparse.ArgumentParser(prog=os.path.basename(sys.argv[0]),description="Dump Run info")
    parser.add_argument('-c',dest='connect',action='store',required=True,help='connect string to trigger DB(required)')
    parser.add_argument('-r',dest='runnumber',action='store',required=True,help='run number')

    args      = parser.parse_args()

    connectstring=args.connect
    connectparser=connectstrParser.connectstrParser(connectstring)
    connectparser.parse()
    usedefaultfrontierconfig=False
    cacheconfigpath=''
    if connectparser.needsitelocalinfo():
        
        cacheconfigpath=os.environ['CMS_PATH']
        cacheconfigpath=os.path.join(cacheconfigpath,'SITECONF','local','JobConfig','site-local-config.xml')

        p=cacheconfigParser.cacheconfigParser()
        p.parse(cacheconfigpath)
        connectstring=connectparser.fullfrontierStr(connectparser.schemaname(),p.parameterdict())


    svc=coral.ConnectionService()
    session=svc.connect(connectstring,accessMode=coral.access_ReadOnly)

    getRunInfo(session,c,args.runnumber,0)
    
    del session
    del svc
Esempio n. 3
0
    def __init__(self,**kwargs):
        self._c = constants()

        connectstring='frontier://LumiProd/CMS_LUMI_PROD' #default value
        siteconfpath = None

        for arg in kwargs:
            if arg == 'connectstring':
                connectstring = kwargs[arg]
            if arg == 'siteconfpath':
                siteconfpath = kwargs[arg]
            if arg == 'verbose':
                self._c.VERBOSE = True
            if arg == 'debug':
                msg=coral.MessageStream('')
                msg.setMsgVerbosity(coral.message_Level_Debug)
                self._c.VERBOSE = True
            if arg == 'nowarning':
                self._c.NOWARNING = True
            if arg == 'normfactor':
                self._c.NORM = kwargs[arg]
            if arg == 'lumiversion':
                self._c.LUMIVERSION = kwargs[arg]
            if arg == "beammode":
                self._c.BEAMMODE = kwargs[arg]
            
        connectparser = connectstrParser.connectstrParser(connectstring)
        connectparser.parse()
        usedefaultfrontierconfig=False
        cacheconfigpath=''
        if connectparser.needsitelocalinfo():
            if siteconfpath is None:
                cacheconfigpath=os.environ['CMS_PATH']
                if cacheconfigpath:
                    cacheconfigpath=os.path.join(cacheconfigpath,'SITECONF','local','JobConfig','site-local-config.xml')
                else:
                    usedefaultfrontierconfig = True
            else:
                cacheconfigpath = siteconfpath
                cacheconfigpath = os.path.join(cacheconfigpath,'site-local-config.xml')
            p=cacheconfigParser.cacheconfigParser()
            if usedefaultfrontierconfig:
                p.parseString(c.defaultfrontierConfigString)
            else:
                p.parse(cacheconfigpath)
            connectstring=connectparser.fullfrontierStr(connectparser.schemaname(),p.parameterdict())
            
        self._dbsvc = coral.ConnectionService()
        self._session = self._dbsvc.connect(connectstring,
                                            accessMode=coral.access_Update)
        self._session.typeConverter().setCppTypeForSqlType("unsigned int","NUMBER(10)")
        self._session.typeConverter().setCppTypeForSqlType("unsigned long long","Number(20)")
Esempio n. 4
0
 def __init__(self,
              connectString,
              authpath=None,
              siteconfpath=None,
              debugON=False):
     self.__connectString = connectString
     self.__svc = None
     self.__connectparser = connectstrParser.connectstrParser(
         self.__connectString)
     usedefaultfrontierconfig = False
     cacheconfigpath = ''
     try:
         self.__connectparser.parse()
         if self.__connectparser.needsitelocalinfo():
             if not siteconfpath:
                 cacheconfigpath = os.environ['CMS_PATH']
                 if cacheconfigpath:
                     cacheconfigpath = os.path.join(
                         cacheconfigpath, 'SITECONF', 'local', 'JobConfig',
                         'site-local-config.xml')
                 else:
                     usedefaultfrontierconfig = True
             else:
                 cacheconfigpath = siteconfpath
                 cacheconfigpath = os.path.join(cacheconfigpath,
                                                'site-local-config.xml')
             ccp = cacheconfigParser.cacheconfigParser()
             if usedefaultfrontierconfig:
                 ccp.parseString(self.defaultfrontierConfigString())
             else:
                 ccp.parse(cacheconfigpath)
             self.__connectString = self.__connectparser.fullfrontierStr(
                 self.__connectparser.schemaname(), ccp.parameterdict())
         if self.__connectparser.protocol() == 'oracle':
             if authpath:
                 os.environ['CORAL_AUTH_PATH'] = authpath
             else:
                 os.environ['CORAL_AUTH_PATH'] = '.'
         if debugON:
             msg = coral.MessageStream('')
             msg.setMsgVerbosity(coral.message_Level_Debug)
         self.__svc = coral.ConnectionService()
     except:
         if self.__svc: del self.__svc
         raise
Esempio n. 5
0
 def __init__(self,connectString,authpath=None,siteconfpath=None,debugON = False):
     self.__connectString=connectString
     self.__svc=None
     self.__connectparser=connectstrParser.connectstrParser(self.__connectString)
     usedefaultfrontierconfig = False
     cacheconfigpath = ''
     try:
         self.__connectparser.parse()
         if self.__connectparser.needsitelocalinfo():
             if not siteconfpath:
                 cacheconfigpath = os.environ['CMS_PATH']
                 if cacheconfigpath:
                     cacheconfigpath = os.path.join (cacheconfigpath, 'SITECONF', 'local', 'JobConfig', 'site-local-config.xml')
                 else:
                     usedefaultfrontierconfig = True
             else:
                 cacheconfigpath = siteconfpath
                 cacheconfigpath = os.path.join (cacheconfigpath, 'site-local-config.xml')
             ccp = cacheconfigParser.cacheconfigParser()
             if usedefaultfrontierconfig:
                 ccp.parseString ( self.defaultfrontierConfigString() )
             else:
                 ccp.parse (cacheconfigpath)
             self.__connectString = self.__connectparser.fullfrontierStr(self.__connectparser.schemaname(), ccp.parameterdict())
         if self.__connectparser.protocol()=='oracle':
             if authpath:
                 os.environ['CORAL_AUTH_PATH']=authpath
             else:
                 os.environ['CORAL_AUTH_PATH']='.'
         if debugON :
             msg = coral.MessageStream ('')
             msg.setMsgVerbosity (coral.message_Level_Debug)            
         self.__svc = coral.ConnectionService()
     except:
         if self.__svc: del self.__svc
         raise
Esempio n. 6
0
def main():


    # First of all we retrieve the arguments
    
    c=constants()
    parser = argparse.ArgumentParser(prog=os.path.basename(sys.argv[0]),description="Dump Run info")
    parser.add_argument('-c',dest='connect',action='store',required=True,help='connect string to trigger DB(required)')
    parser.add_argument('-r',dest='runnumber',action='store',required=False,help='run number')

    args      = parser.parse_args()
    #datenow   = time.time()

    connectstring=args.connect
    connectparser=connectstrParser.connectstrParser(connectstring)
    connectparser.parse()
    usedefaultfrontierconfig=False
    cacheconfigpath=''
    if connectparser.needsitelocalinfo():       
        cacheconfigpath=os.environ['CMS_PATH']
        cacheconfigpath=os.path.join(cacheconfigpath,'SITECONF','local','JobConfig','site-local-config.xml')            
        p=cacheconfigParser.cacheconfigParser()
        p.parse(cacheconfigpath)
        connectstring=connectparser.fullfrontierStr(connectparser.schemaname(),p.parameterdict())

        
    lastfillrun1  = args.runnumber

    run_list  = [] # The list of runs we will get
    fill_list = [] # The list of fills we will get
 
    f=open('the_list.txt','r+') # The list of runs we start from 

    for line in f:
        if '1' in line:
            split = line.split(' ')[0:]
            lastfill        = split[0]
            lastfillrunlist = split[1].split('_')  
            lastfilllastrun = lastfillrunlist[len(lastfillrunlist)-2] 

            #print lastfillrunlist


    print 'Last fill treated is ',lastfill,' with first run ', lastfilllastrun
    print '...we start from there...'

    svc=coral.ConnectionService()
    session=svc.connect(connectstring,accessMode=coral.access_ReadOnly)

    is_ok = True
    firstrun = int(int(lastfilllastrun))

    run_list=getRunList(session,c,firstrun)
    run_list.sort()

    #print run_list

    for run in run_list:        

        fillnum = FillNum_ForRun(session,c,run)

        if fillnum == '0':
            continue
        
        if fillnum == 'None':
            continue

        if fillnum not in fill_list:
            fill_list.append(FillNum_ForRun(session,c,run))

    fill_list.sort()

    for fill in fill_list:  

        if fill == lastfill:
            for run in run_list:        
                if FillNum_ForRun(session,c,run) == fill:
                    f.write("%d_"%(run))

        else:
            f.write("\n%s "%(fill))

            for run in run_list:        
                if FillNum_ForRun(session,c,run) == fill:
                    f.write("%d_"%(run))

    
    del session
    del svc
Esempio n. 7
0
def main():


    # First of all we retrieve the arguments
    
    c=constants()
    parser = argparse.ArgumentParser(prog=os.path.basename(sys.argv[0]),description="Dump Run info")
    parser.add_argument('-c',dest='connect',action='store',required=True,help='connect string to trigger DB(required)')
    parser.add_argument('-f',dest='fillnumber',action='store',required=True,help='initial fill number')

    args      = parser.parse_args()

    connectstring=args.connect
    connectparser=connectstrParser.connectstrParser(connectstring)
    connectparser.parse()
    usedefaultfrontierconfig=False
    cacheconfigpath=''
    if connectparser.needsitelocalinfo():       
        cacheconfigpath=os.environ['CMS_PATH']
        cacheconfigpath=os.path.join(cacheconfigpath,'SITECONF','local','JobConfig','site-local-config.xml')            
        p=cacheconfigParser.cacheconfigParser()
        p.parse(cacheconfigpath)
        connectstring=connectparser.fullfrontierStr(connectparser.schemaname(),p.parameterdict())

        
    lastfillrun1  = args.fillnumber

    fill_list_coll     = [] # The list of fills we will get
    fill_list_inter    = [] # The list of fills we will get
    run_list_coll      = [] # The list of collision runs we will get
    run_list_inter     = [] # The list of interfill runs we will get
    run_list_coll_new  = [] # The list of collision runs we will get
    run_list_inter_new = [] # The list of interfill runs we will get
    
    f=open('the_collision_list.txt','r+') # The list of runs we start from 
    f2=open('the_collision_list_new.txt','w') # The list of runs we will write
    g=open('the_interfill_list.txt','r+') # The list of runs we start from 
    g2=open('the_interfill_list_new.txt','w') # The list of runs we will write

    for line in f:
        if '1' in line:
            split = line.split(' ')[0:]
            fill_list_coll.append(int(split[0]))
            lastfillrunlist = split[1].split('_')

            for i in range(len(lastfillrunlist)-1):
                run_list_coll.append(int(lastfillrunlist[i]))

    for line in g:
        if '1' in line:
            split = line.split(' ')[0:]
            fill_list_inter.append(int(split[0]))
            lastfillrunlist = split[1].split('_')

            for i in range(len(lastfillrunlist)-1):
                run_list_inter.append(int(lastfillrunlist[i]))
                    
            #print lastfillrunlist

    fill_list_coll.sort()
    fill_list_inter.sort()
    run_list_coll.sort()
    run_list_inter.sort()
    
    print "Collision run list contains ",len(run_list_coll)," runs..."
    print "Interfill run list contains ",len(run_list_inter)," runs..."

    print "...Now look for new runs..."

    if len(run_list_coll)==0:
        fill_list_coll.append(2355)
        run_list_coll.append(187000)
        
    if len(run_list_inter)==0:
        fill_list_inter.append(2355)
        run_list_inter.append(187000)


    # Connect to the database
    svc=coral.ConnectionService()
    session=svc.connect(connectstring,accessMode=coral.access_ReadOnly)

    is_ok    = True
    firstrun_coll  = int(run_list_coll[len(run_list_coll)-1]-200)
    firstrun_inter = int(run_list_inter[len(run_list_inter)-1]-200)

    # Create the run lists corresponding to what we are looking for
    run_list_coll_new=getRunList(session,c,firstrun_coll,'collisions','physics')
    run_list_inter_new=getRunList(session,c,firstrun_coll,'circulating','Interfill')

    # Sort the lists
    run_list_coll_new.sort()
    run_list_inter_new.sort()
    
    for run in run_list_coll_new:        

        if run in run_list_coll:
            continue

        run_list_coll.append(run)
        fillnum = FillNum_ForRun(session,c,run)
        
        if fillnum == '0':
            continue
        
        if fillnum == 'None':
            continue

        if int(fillnum) not in fill_list_coll: # We got a new fill
            fill_list_coll.append(int(fillnum))

    for run in run_list_inter_new:        

        if run in run_list_inter:
            continue

        run_list_inter.append(run)
        fillnum = FillNum_ForRun(session,c,run)
        
        if fillnum == '0':
            continue
        
        if fillnum == 'None':
            continue

        if int(fillnum) not in fill_list_inter: # We got a new fill
            fill_list_inter.append(int(fillnum))

    fill_list_coll.sort()
    fill_list_inter.sort()
    run_list_coll.sort()
    run_list_inter.sort()
    
    #print "Here ",fill_list_coll
    #print "Here ",fill_list_inter

    for fill in fill_list_coll:  

        f2.write("\n%d "%(fill))
        
        for run in run_list_coll:
            if int(FillNum_ForRun(session,c,run)) == fill:
                f2.write("%d_"%(run))
                

    for fill in fill_list_inter:  

        g2.write("\n%d "%(fill))
        
        for run in run_list_inter:
            if int(FillNum_ForRun(session,c,run)) == fill:
                g2.write("%d_"%(run))
        
    
    del session
    del svc
Esempio n. 8
0
def main():
    parser = argparse.ArgumentParser(prog=os.path.basename(sys.argv[0]),description="Lumi Data operations")
    # add required arguments
    parser.add_argument('-c',dest='connect',action='store',required=True,help='connect string to lumiDB')
    # add optional arguments
    parser.add_argument('-P',dest='authpath',action='store',help='path to authentication file')
    parser.add_argument('-lumiversion',dest='lumiversion',action='store',help='lumi data version, optional')
    parser.add_argument('-siteconfpath',dest='siteconfpath',action='store',help='specific path to site-local-config.xml file, default to $CMS_PATH/SITECONF/local/JobConfig, if path undefined, fallback to cern proxy&server')
    parser.add_argument('action',choices=['listrun'],help='command actions')
    parser.add_argument('--raw',dest='printraw',action='store_true',help='print raw data' )
    parser.add_argument('--verbose',dest='verbose',action='store_true',help='verbose mode for printing' )
    parser.add_argument('--debug',dest='debug',action='store_true',help='debug')
    # parse arguments
    args=parser.parse_args()
    connectstring=args.connect
    connectparser=connectstrParser.connectstrParser(connectstring)
    connectparser.parse()
    usedefaultfrontierconfig=False
    cacheconfigpath=''
    if connectparser.needsitelocalinfo():
        if not args.siteconfpath:
            cacheconfigpath=os.environ['CMS_PATH']
            if cacheconfigpath:
                cacheconfigpath=os.path.join(cacheconfigpath,'SITECONF','local','JobConfig','site-local-config.xml')
            else:
                usedefaultfrontierconfig=True
        else:
            cacheconfigpath=args.siteconfpath
            cacheconfigpath=os.path.join(cacheconfigpath,'site-local-config.xml')
        p=cacheconfigParser.cacheconfigParser()
        if usedefaultfrontierconfig:
            p.parseString(c.defaultfrontierConfigString)
        else:
            p.parse(cacheconfigpath)
        connectstring=connectparser.fullfrontierStr(connectparser.schemaname(),p.parameterdict())
    #print 'connectstring',connectstring
    runnumber=0
    svc = coral.ConnectionService()
    isverbose=False
    if args.debug :
        msg=coral.MessageStream('')
        msg.setMsgVerbosity(coral.message_Level_Debug)
        c.VERBOSE=True

    if args.verbose :
        c.VERBOSE=True
    if args.authpath and len(args.authpath)!=0:
        os.environ['CORAL_AUTH_PATH']=args.authpath

    session=svc.connect(connectstring,accessMode=coral.access_Update)
    session.typeConverter().setCppTypeForSqlType("unsigned int","NUMBER(10)")

    if args.action == 'listrun':
        lumiversion=''
        if args.lumiversion:
            lumiversion=args.lumiversion
        runlist=runListInDB(session,lumiversion)
	if args.printraw:
	    print runlist
	else: printRunList(runlist)
    
    del session
    del svc
Esempio n. 9
0
def main():
    allowedscales=['linear','log','both']
    c=constants()
    parser = argparse.ArgumentParser(prog=os.path.basename(sys.argv[0]),description="Plot integrated luminosity as function of the time variable of choice",formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    # add required arguments
    parser.add_argument('-c',dest='connect',action='store',required=True,help='connect string to lumiDB')
    # add optional arguments
    parser.add_argument('-P',dest='authpath',action='store',help='path to authentication file')
    parser.add_argument('-n',dest='normfactor',action='store',help='normalization factor (optional, default to 1.0)')
    parser.add_argument('-i',dest='inputfile',action='store',help='lumi range selection file (optional)')
    parser.add_argument('-o',dest='outputfile',action='store',help='csv outputfile name (optional)')
    parser.add_argument('-lumiversion',dest='lumiversion',default='0001',action='store',required=False,help='lumi data version')
    parser.add_argument('-begin',dest='begin',action='store',help='begin value of x-axi (required)')
    parser.add_argument('-end',dest='end',action='store',help='end value of x-axi (optional). Default to the maximum exists DB')
    parser.add_argument('-beamenergy',dest='beamenergy',action='store',type=float,required=False,help='beamenergy (in GeV) selection criteria,e.g. 3.5e3')
    parser.add_argument('-beamfluctuation',dest='beamfluctuation',action='store',type=float,required=False,help='allowed fraction of beamenergy to fluctuate, e.g. 0.1')
    parser.add_argument('-beamstatus',dest='beamstatus',action='store',required=False,help='selection criteria beam status,e.g. STABLE BEAMS')
    parser.add_argument('-yscale',dest='yscale',action='store',required=False,default='linear',help='y_scale')
    parser.add_argument('-hltpath',dest='hltpath',action='store',help='specific hltpath to calculate the recorded luminosity. If specified aoverlays the recorded luminosity for the hltpath on the plot')
    parser.add_argument('-batch',dest='batch',action='store',help='graphical mode to produce PNG file. Specify graphical file here. Default to lumiSum.png')
    parser.add_argument('--annotateboundary',dest='annotateboundary',action='store_true',help='annotate boundary run numbers')
    parser.add_argument('--interactive',dest='interactive',action='store_true',help='graphical mode to draw plot in a TK pannel.')
    parser.add_argument('-timeformat',dest='timeformat',action='store',help='specific python timeformat string (optional).  Default mm/dd/yy hh:min:ss.00')
    parser.add_argument('-siteconfpath',dest='siteconfpath',action='store',help='specific path to site-local-config.xml file, default to $CMS_PATH/SITECONF/local/JobConfig, if path undefined, fallback to cern proxy&server')
    parser.add_argument('action',choices=['run','fill','time','perday'],help='x-axis data type of choice')
    #graphical mode options
    parser.add_argument('--with-correction',dest='withFineCorrection',action='store_true',help='with fine correction')
    parser.add_argument('--verbose',dest='verbose',action='store_true',help='verbose mode, print result also to screen')
    parser.add_argument('--debug',dest='debug',action='store_true',help='debug')
    # parse arguments
    batchmode=True
    args=parser.parse_args()
    connectstring=args.connect
    begvalue=args.begin
    endvalue=args.end
    beamstatus=args.beamstatus
    beamenergy=args.beamenergy
    beamfluctuation=args.beamfluctuation
    xaxitype='run'
    connectparser=connectstrParser.connectstrParser(connectstring)
    connectparser.parse()
    usedefaultfrontierconfig=False
    cacheconfigpath=''
    if connectparser.needsitelocalinfo():
        if not args.siteconfpath:
            cacheconfigpath=os.environ['CMS_PATH']
            if cacheconfigpath:
                cacheconfigpath=os.path.join(cacheconfigpath,'SITECONF','local','JobConfig','site-local-config.xml')
            else:
                usedefaultfrontierconfig=True
        else:
            cacheconfigpath=args.siteconfpath
            cacheconfigpath=os.path.join(cacheconfigpath,'site-local-config.xml')
        p=cacheconfigParser.cacheconfigParser()
        if usedefaultfrontierconfig:
            p.parseString(c.defaultfrontierConfigString)
        else:
            p.parse(cacheconfigpath)
        connectstring=connectparser.fullfrontierStr(connectparser.schemaname(),p.parameterdict())
    #print 'connectstring',connectstring
    runnumber=0
    svc = coral.ConnectionService()
    hltpath=''
    if args.hltpath:
        hltpath=args.hltpath
    if args.debug :
        msg=coral.MessageStream('')
        msg.setMsgVerbosity(coral.message_Level_Debug)
    ifilename=''
    ofilename='integratedlumi.png'
    timeformat=''
    if args.authpath and len(args.authpath)!=0:
        os.environ['CORAL_AUTH_PATH']=args.authpath
    if args.normfactor:
        c.NORM=float(args.normfactor)
    if args.lumiversion:
        c.LUMIVERSION=args.lumiversion
    if args.verbose:
        c.VERBOSE=True
    if args.inputfile:
        ifilename=args.inputfile
    if args.batch:
        opicname=args.batch
    if args.outputfile:
        ofilename=args.outputfile
    if args.timeformat:
        timeformat=args.timeformat
    session=svc.connect(connectstring,accessMode=coral.access_Update)
    session.typeConverter().setCppTypeForSqlType("unsigned int","NUMBER(10)")
    session.typeConverter().setCppTypeForSqlType("unsigned long long","NUMBER(20)")
    inputfilecontent=''
    fileparsingResult=''
    runList=[]
    runDict={}
    fillDict={}
    selectionDict={}
    minTime=''
    maxTime=''
    startRunTime=''
    stopRunTime=''
    #if len(ifilename)!=0 :
    #    f=open(ifilename,'r')
    #    inputfilecontent=f.read()
    #    sparser=selectionParser.selectionParser(inputfilecontent)
    #    runsandls=sparser.runsandls()
    #    keylist=runsandls.keys()
    #    keylist.sort()
    #    for run in keylist:
    #        if selectionDict.has_key(run):
    #            lslist=runsandls[run]
    #            lslist.sort()
    #            selectionDict[run]=lslist
    if len(ifilename)!=0:
        ifparser=inputFilesetParser.inputFilesetParser(ifilename)
        runsandls=ifparser.runsandls()
        keylist=runsandls.keys()
        keylist.sort()
        for run in keylist:
            if not selectionDict.has_key(run):
                lslist=runsandls[run]
                lslist.sort()
                selectionDict[run]=lslist
    if args.action == 'run':
        if not args.end:
            session.transaction().start(True)
            schema=session.nominalSchema()
            lastrun=max(lumiQueryAPI.allruns(schema,requireRunsummary=True,requireLumisummary=True,requireTrg=True,requireHlt=False))
            session.transaction().commit()
        else:
            lastrun=int(args.end)
        for r in range(int(args.begin),lastrun+1):
            runList.append(r)
        runList.sort()
    elif args.action == 'fill':
        session.transaction().start(True)
        maxfill=None
        if not args.end:
            qHandle=session.nominalSchema().newQuery()
            maxfill=max(lumiQueryAPI.allfills(qHandle,filtercrazy=True))
            del qHandle
        else:
            maxfill=int(args.end)
        qHandle=session.nominalSchema().newQuery()
        fillDict=lumiQueryAPI.runsByfillrange(qHandle,int(args.begin),maxfill)
        del qHandle
        session.transaction().commit()
        #print 'fillDict ',fillDict
        for fill in range(int(args.begin),maxfill+1):
            if fillDict.has_key(fill): #fill exists
                for run in fillDict[fill]:
                    runList.append(run)
        runList.sort()
        
    elif args.action == 'time' or args.action == 'perday':
        session.transaction().start(True)
        t=lumiTime.lumiTime()
        minTime=t.StrToDatetime(args.begin,timeformat)
        if not args.end:
            maxTime=datetime.datetime.utcnow() #to now
        else:
            maxTime=t.StrToDatetime(args.end,timeformat)
        #print minTime,maxTime
        qHandle=session.nominalSchema().newQuery()
        runDict=lumiQueryAPI.runsByTimerange(qHandle,minTime,maxTime)#xrawdata
        session.transaction().commit()
        runList=runDict.keys()        
        del qHandle
        runList.sort()
        if len(runList)!=0:
            runmin=min(runList)
            runmax=max(runList)       
            startRunTime=runDict[runmin][0]
            stopRunTime=runDict[runmax][1]
        #print 'run list: ',runDict
    else:
        print 'unsupported action ',args.action
        exit
    finecorrections=None
    if args.withFineCorrection:
        schema=session.nominalSchema()
        session.transaction().start(True)
        finecorrections=lumiCorrections.correctionsForRange(schema,runList)
        session.transaction().commit()      
        
    #print 'runList ',runList
    #print 'runDict ', runDict
    
    fig=Figure(figsize=(7.2,5.4),dpi=120)
    m=matplotRender.matplotRender(fig)
    
    logfig=Figure(figsize=(7.5,5.7),dpi=135)
    mlog=matplotRender.matplotRender(logfig)
    
    if args.action == 'run':
        result={}        
        result=getLumiInfoForRuns(session,c,runList,selectionDict,hltpath,beamstatus=beamstatus,beamenergy=beamenergy,beamfluctuation=beamfluctuation,finecorrections=finecorrections)
        xdata=[]
        ydata={}
        ydata['Delivered']=[]
        ydata['Recorded']=[]
        keylist=result.keys()
        keylist.sort() #must be sorted in order
        if args.outputfile:
            reporter=csvReporter.csvReporter(ofilename)
            fieldnames=['run','delivered','recorded']
            reporter.writeRow(fieldnames)
        for run in keylist:
            xdata.append(run)
            delivered=result[run][0]
            recorded=result[run][1]
            ydata['Delivered'].append(delivered)
            ydata['Recorded'].append(recorded)
            if args.outputfile and (delivered!=0 or recorded!=0):
                reporter.writeRow([run,result[run][0],result[run][1]])                
        m.plotSumX_Run(xdata,ydata,yscale='linear')
        mlog.plotSumX_Run(xdata,ydata,yscale='log')
    elif args.action == 'fill':        
        lumiDict={}
        lumiDict=getLumiInfoForRuns(session,c,runList,selectionDict,hltpath,beamstatus=beamstatus,beamenergy=beamenergy,beamfluctuation=beamfluctuation,finecorrections=finecorrections)
        xdata=[]
        ydata={}
        ydata['Delivered']=[]
        ydata['Recorded']=[]
        #keylist=lumiDict.keys()
        #keylist.sort()
        if args.outputfile:
            reporter=csvReporter.csvReporter(ofilename)
            fieldnames=['fill','run','delivered','recorded']
            reporter.writeRow(fieldnames)
        fills=fillDict.keys()
        fills.sort()
        for fill in fills:
            runs=fillDict[fill]
            runs.sort()
            for run in runs:
                xdata.append(run)
                ydata['Delivered'].append(lumiDict[run][0])
                ydata['Recorded'].append(lumiDict[run][1])
                if args.outputfile :
                    reporter.writeRow([fill,run,lumiDict[run][0],lumiDict[run][1]])   
        #print 'input fillDict ',len(fillDict.keys()),fillDict
        m.plotSumX_Fill(xdata,ydata,fillDict,yscale='linear')
        mlog.plotSumX_Fill(xdata,ydata,fillDict,yscale='log')
    elif args.action == 'time' : 
        lumiDict={}
        lumiDict=getLumiInfoForRuns(session,c,runList,selectionDict,hltpath,beamstatus=beamstatus,beamenergy=beamenergy,beamfluctuation=beamfluctuation,finecorrections=finecorrections)
        #lumiDict=getLumiInfoForRuns(session,c,runList,selectionDict,hltpath,beamstatus='STABLE BEAMS')
        xdata={}#{run:[starttime,stoptime]}
        ydata={}
        ydata['Delivered']=[]
        ydata['Recorded']=[]
        keylist=lumiDict.keys()
        keylist.sort()
        if args.outputfile:
            reporter=csvReporter.csvReporter(ofilename)
            fieldnames=['run','starttime','stoptime','delivered','recorded']
            reporter.writeRow(fieldnames)
        for run in keylist:
            ydata['Delivered'].append(lumiDict[run][0])
            ydata['Recorded'].append(lumiDict[run][1])
            starttime=runDict[run][0]
            stoptime=runDict[run][1]
            xdata[run]=[starttime,stoptime]
            if args.outputfile :
                reporter.writeRow([run,starttime,stoptime,lumiDict[run][0],lumiDict[run][1]])
        m.plotSumX_Time(xdata,ydata,startRunTime,stopRunTime,hltpath=hltpath,annotateBoundaryRunnum=args.annotateboundary,yscale='linear')
        mlog.plotSumX_Time(xdata,ydata,startRunTime,stopRunTime,hltpath=hltpath,annotateBoundaryRunnum=args.annotateboundary,yscale='log')
    elif args.action == 'perday':
        daydict={}#{day:[[run,cmslsnum,lsstarttime,delivered,recorded]]}
        lumibyls=getLumiOrderByLS(session,c,runList,selectionDict,hltpath,beamstatus=beamstatus,beamenergy=beamenergy,beamfluctuation=beamfluctuation,finecorrections=finecorrections)
        #lumibyls [[runnumber,runstarttime,lsnum,lsstarttime,delivered,recorded,recordedinpath]]
        if args.outputfile:
            reporter=csvReporter.csvReporter(ofilename)
            fieldnames=['day','begrunls','endrunls','delivered','recorded']
            reporter.writeRow(fieldnames)
        beginfo=[lumibyls[0][3],str(lumibyls[0][0])+':'+str(lumibyls[0][2])]
        endinfo=[lumibyls[-1][3],str(lumibyls[-1][0])+':'+str(lumibyls[-1][2])]
        for perlsdata in lumibyls:
            lsstarttime=perlsdata[3]
            delivered=perlsdata[4]
            recorded=perlsdata[5]
            day=lsstarttime.toordinal()
            if not daydict.has_key(day):
                daydict[day]=[]
            daydict[day].append([delivered,recorded])
        days=daydict.keys()
        days.sort()
        daymin=days[0]
        daymax=days[-1]
        #alldays=range(daymin,daymax+1)
        resultbyday={}
        resultbyday['Delivered']=[]
        resultbyday['Recorded']=[]
        #for day in days:
        #print 'day min ',daymin
        #print 'day max ',daymax
        for day in range(daymin,daymax+1):
            if not daydict.has_key(day):
                delivered=0.0
                recorded=0.0
            else:
                daydata=daydict[day]
                mytransposed=CommonUtil.transposed(daydata,defaultval=0.0)
                delivered=sum(mytransposed[0])
                recorded=sum(mytransposed[1])
            resultbyday['Delivered'].append(delivered)
            resultbyday['Recorded'].append(recorded)
            if args.outputfile:
                reporter.writeRow([day,beginfo[1],endinfo[1],delivered,recorded])
        #print 'beginfo ',beginfo
        #print 'endinfo ',endinfo
        #print resultbyday
        m.plotPerdayX_Time( range(daymin,daymax+1) ,resultbyday,startRunTime,stopRunTime,boundaryInfo=[beginfo,endinfo],annotateBoundaryRunnum=args.annotateboundary,yscale='linear')
        mlog.plotPerdayX_Time( range(daymin,daymax+1),resultbyday,startRunTime,stopRunTime,boundaryInfo=[beginfo,endinfo],annotateBoundaryRunnum=args.annotateboundary,yscale='log')
    else:
        raise Exception,'must specify the type of x-axi'

    del session
    del svc

    if args.batch and args.yscale=='linear':
        m.drawPNG(args.batch)
    elif args.batch and args.yscale=='log':
        mlog.drawPNG(args.batch)
    elif args.batch and args.yscale=='both':
        m.drawPNG(args.batch)
        basename,extension=os.path.splitext(args.batch)
        logfilename=basename+'_log'+extension        
        mlog.drawPNG(logfilename)
    else:
        if not args.interactive:
            return
        if args.interactive is True and args.yscale=='linear':
            m.drawInteractive()
        elif args.interactive is True and args.yscale=='log':
            mlog.drawInteractive()
        else:
            raise Exception('unsupported yscale for interactive mode : '+args.yscale)
Esempio n. 10
0
def main():
    allowedscales=['linear','log','both']
    c=constants()
    parser = argparse.ArgumentParser(prog=os.path.basename(sys.argv[0]),description="Plot integrated luminosity as function of the time variable of choice")
    # add required arguments
    parser.add_argument('-c',dest='connect',action='store',required=True,help='connect string to lumiDB')
    # add optional arguments
    parser.add_argument('-P',dest='authpath',action='store',help='path to authentication file')
    parser.add_argument('-n',dest='normfactor',action='store',help='normalization factor (optional, default to 1.0)')
    parser.add_argument('-i',dest='inputfile',action='store',help='lumi range selection file (optional)')
    parser.add_argument('-o',dest='outputfile',action='store',help='csv outputfile name (optional)')
    parser.add_argument('-b',dest='beammode',action='store',help='beam mode, optional, no default')
    parser.add_argument('-lumiversion',dest='lumiversion',action='store',help='lumi data version, optional for all, default 0001')
    parser.add_argument('-begin',dest='begin',action='store',help='begin xvalue (required)')
    parser.add_argument('-end',dest='end',action='store',help='end xvalue(optional). Default to the maximum exists DB')
    parser.add_argument('-batch',dest='batch',action='store',help='graphical mode to produce PNG file. Specify graphical file here. Default to lumiSum.png')
    parser.add_argument('-yscale',dest='yscale',action='store',required=False,default='linear',help='y_scale')
    parser.add_argument('--interactive',dest='interactive',action='store_true',help='graphical mode to draw plot in a TK pannel.')
    parser.add_argument('-timeformat',dest='timeformat',action='store',help='specific python timeformat string (optional).  Default mm/dd/yy hh:min:ss.00')
    parser.add_argument('-siteconfpath',dest='siteconfpath',action='store',help='specific path to site-local-config.xml file, default to $CMS_PATH/SITECONF/local/JobConfig, if path undefined, fallback to cern proxy&server')
    parser.add_argument('action',choices=['peakperday','run'],help='plot type of choice')
    #graphical mode options
    parser.add_argument('--annotateboundary',dest='annotateboundary',action='store_true',help='annotate boundary run numbers')
    parser.add_argument('--verbose',dest='verbose',action='store_true',help='verbose mode, print result also to screen')
    parser.add_argument('--with-correction',dest='withFineCorrection',action='store_true',help='with fine correction')
    parser.add_argument('--debug',dest='debug',action='store_true',help='debug')
    # parse arguments
    args=parser.parse_args()
    connectstring=args.connect
    connectparser=connectstrParser.connectstrParser(connectstring)
    connectparser.parse()
    usedefaultfrontierconfig=False
    cacheconfigpath=''
    if connectparser.needsitelocalinfo():
        if not args.siteconfpath:
            cacheconfigpath=os.environ['CMS_PATH']
            if cacheconfigpath:
                cacheconfigpath=os.path.join(cacheconfigpath,'SITECONF','local','JobConfig','site-local-config.xml')
            else:
                usedefaultfrontierconfig=True
        else:
            cacheconfigpath=args.siteconfpath
            cacheconfigpath=os.path.join(cacheconfigpath,'site-local-config.xml')
        p=cacheconfigParser.cacheconfigParser()
        if usedefaultfrontierconfig:
            p.parseString(c.defaultfrontierConfigString)
        else:
            p.parse(cacheconfigpath)
        connectstring=connectparser.fullfrontierStr(connectparser.schemaname(),p.parameterdict())
    runnumber=0
    svc = coral.ConnectionService()
    if args.debug :
        msg=coral.MessageStream('')
        msg.setMsgVerbosity(coral.message_Level_Debug)
    ifilename=''
    ofilename='instlumi.csv'
    beammode='stable'
    timeformat=''
    selectionDict={}
    if args.authpath and len(args.authpath)!=0:
        os.environ['CORAL_AUTH_PATH']=args.authpath
    if args.normfactor:
        c.NORM=float(args.normfactor)
    if args.lumiversion:
        c.LUMIVERSION=args.lumiversion
    if args.beammode:
        c.BEAMMODE=args.beammode
    if args.verbose:
        c.VERBOSE=True
    if args.inputfile:
        ifilename=args.inputfile
    if args.batch:
        opicname=args.batch
    if args.outputfile:
        ofilename=args.outputfile
    if args.timeformat:
        timeformat=args.timeformat
    session=svc.connect(connectstring,accessMode=coral.access_Update)
    session.typeConverter().setCppTypeForSqlType("unsigned int","NUMBER(10)")
    session.typeConverter().setCppTypeForSqlType("unsigned long long","NUMBER(20)")
    startRunTime=''
    stopRunTime=''
    if ifilename:
        ifparser=inputFilesetParser(ifilename)
        runsandls=ifparser.runsandls()
        keylist=runsandls.keys()
        keylist.sort()
        for run in keylist:
            if selectionDict.has_key(run):
                lslist=runsandls[run]
                lslist.sort()
                selectionDict[run]=lslist
    if args.action == 'run':
        minRun=int(args.begin)
        if not args.end:
            maxRun=minRun 
        else:
            maxRun=int(args.end)            
        runList=range(minRun,maxRun+1)
    elif args.action == 'peakperday':
        session.transaction().start(True)
        t=lumiTime.lumiTime()
        minTime=t.StrToDatetime(args.begin,timeformat)
        if not args.end:
            maxTime=datetime.datetime.utcnow() #to now
        else:
            maxTime=t.StrToDatetime(args.end,timeformat)
        #print minTime,maxTime
        qHandle=session.nominalSchema().newQuery()
        runDict=lumiQueryAPI.runsByTimerange(qHandle,minTime,maxTime)#xrawdata
        session.transaction().commit()
        runList=runDict.keys()
        del qHandle
        runList.sort()
        if len(runList)!=0:
            runmin=min(runList)
            runmax=max(runList)
            startRunTime=runDict[runmin][0]
            stopRunTime=runDict[runmax][1]
    else:
        print 'unsupported action ',args.action
        exit
    #print 'runList ',runList
    #print 'runDict ', runDict
    finecorrections=None
    if args.withFineCorrection:
        schema=session.nominalSchema()
        session.transaction().start(True)
        finecorrections=lumiCorrections.correctionsForRange(schema,runList)
        session.transaction().commit()      
    fig=Figure(figsize=(6,4.5),dpi=100)
    m=matplotRender.matplotRender(fig)

    logfig=Figure(figsize=(6,4.5),dpi=100)
    mlog=matplotRender.matplotRender(logfig)
    
    if args.action == 'peakperday':
        l=lumiTime.lumiTime()
        lumiperls=getInstLumiPerLS(session,c,runList,selectionDict,finecorrections=finecorrections)
        if args.outputfile:
            reporter=csvReporter.csvReporter(ofilename)
            fieldnames=['day','run','lsnum','maxinstlumi']
            reporter.writeRow(fieldnames)
        #minDay=minTime.toordinal()
        #maxDay=maxTime.toordinal()
        daydict={}#{day:[[run,lsnum,instlumi]]}
        result={}#{day:[maxrun,maxlsnum,maxinstlumi]}
        for lsdata in lumiperls:
            runnumber=lsdata[0]
            lsnum=lsdata[1]
            runstarttimeStr=lsdata[-2]#note: it is a string!!
            startorbit=lsdata[5]
            deliveredInst=lsdata[2]
            lsstarttime=l.OrbitToTime(runstarttimeStr,startorbit)
            day=lsstarttime.toordinal()
            if not daydict.has_key(day):
                daydict[day]=[]
            daydict[day].append([runnumber,lsnum,deliveredInst])
        days=daydict.keys()
        days.sort()
        for day in days:
            daydata=daydict[day]
            transposeddata=CommonUtil.transposed(daydata,defaultval=0.0)
            todaysmaxinst=max(transposeddata[2])
            todaysmaxidx=transposeddata[2].index(todaysmaxinst)
            todaysmaxrun=transposeddata[0][todaysmaxidx]
            todaysmaxls=transposeddata[1][todaysmaxidx]
            result[day]=[todaysmaxrun,todaysmaxls,todaysmaxinst]
            if args.outputfile :
                reporter.writeRow([day,todaysmaxrun,todaysmaxls,todaysmaxinst])
        m.plotPeakPerday_Time(result,startRunTime,stopRunTime,annotateBoundaryRunnum=args.annotateboundary,yscale='linear')
        mlog.plotPeakPerday_Time(result,startRunTime,stopRunTime,annotateBoundaryRunnum=args.annotateboundary,yscale='log')
        
    if args.action == 'run':
        runnumber=runList[0]
        if finecorrections and finecorrections[runnumber]:
            lumiperrun=getLumiPerRun(session,c,runnumber,finecorrections=finecorrections[runnumber])#[[lsnumber,deliveredInst,recordedInst,norbit,startorbit,fillnum,runstarttime,runstoptime]]
        else:
            lumiperrun=getLumiPerRun(session,c,runnumber)
        #print 'lumiperrun ',lumiperrun
        xdata=[]#[runnumber,fillnum,norbit,stattime,stoptime,totalls,ncmsls]
        ydata={}#{label:[instlumi]}
        ydata['Delivered']=[]
        ydata['Recorded']=[]
        norbit=lumiperrun[0][3]
        fillnum=lumiperrun[0][-3]
        starttime=lumiperrun[0][-2]
        stoptime=lumiperrun[0][-1]
        ncmsls=0
        totalls=len(lumiperrun)
        for lsdata in lumiperrun:
            lsnumber=lsdata[0]
            if lsnumber!=0:
                ncmsls+=1
            deliveredInst=lsdata[1]
            recordedInst=lsdata[2]
            ydata['Delivered'].append(deliveredInst)
            ydata['Recorded'].append(recordedInst)
        xdata=[runnumber,fillnum,norbit,starttime,stoptime,totalls,ncmsls]
        m.plotInst_RunLS(xdata,ydata)
    del session
    del svc
    if args.batch and args.yscale=='linear':
        m.drawPNG(args.batch)
    elif  args.batch and args.yscale=='log':
        mlog.drawPNG(args.batch)
    elif args.batch and args.yscale=='both':
        m.drawPNG(args.batch)
        basename,extension=os.path.splitext(args.batch)
        logfilename=basename+'_log'+extension        
        mlog.drawPNG(logfilename)
    else:
        raise Exception('unsupported yscale for batch mode : '+args.yscale)
    
    if not args.interactive:
        return
    if args.interactive is True and args.yscale=='linear':
        m.drawInteractive()
    elif args.interactive is True and args.yscale=='log':
        mlog.drawInteractive()
    else:
        raise Exception('unsupported yscale for interactive mode : '+args.yscale)