def crab_submit_remotely(rel, work_area): "Submit crab job remotely" msg = 'You cannot directly submit job from Mac OSX, ' msg += 'but we will attempt to execute it on lxplus' print_warning(msg) # create tarball of local area tar_filename = os.path.join(work_area, 'cmssh.tar.gz') tar = tarfile.open(tar_filename, "w:gz") for name in os.listdir(os.getcwd()): if name == tar_filename: continue tar.add(name) tar.close() # hostname = 'lxplus424.cern.ch' hostname = 'lxplus.cern.ch' # send first hostname command to know which lxplus we will talk too if not CLIENTS.has_key(hostname): CLIENTS.setdefault(hostname, SSHClient(hostname)) client = CLIENTS.get(hostname) username = client.username # create remote area remote_dir = '/tmp/%s' % username cmd = 'mkdir -p %s && uname -n && echo "Create %s"' \ % (remote_dir, remote_dir) res, err = client.execute(cmd) print_res_err(res, err) # transfer local files remote_file = '/tmp/%s/%s' % (username, tar_filename.split('/')[-1]) client.put(tar_filename, remote_file) # execute remote command crab_cmd = 'crab -submit' cmd = remote_script(username, rel, crab_cmd) res, err = client.execute(cmd) print_res_err(res, err)
def get_data(url, kwargs=None, headers=None, verbose=None, decoder='json', post=False): "Retrive data" if not headers and url.find('DBSReader') != -1: headers = {'Accept': 'application/json' } # DBS3 always needs that ckey = None cert = os.path.join(os.environ['HOME'], '.globus/usercert.pem') try: # pycurl data look-up, primary way to get the data mgr = RequestHandler() with working_pem(PEMMGR.pem) as ckey: res = mgr.get_data(url, kwargs, headers, post, ckey, cert, verbose=verbose) if decoder == 'json': data = json.load(res) else: data = res.read() return data except Exception as exc: if verbose: print_error(exc) msg = 'Fall back to urllib' print_warning(msg) # urllib data look-up, fallback mechanism with working_pem(PEMMGR.pem) as ckey: return get_data_helper(url, kwargs, headers, verbose, decoder, post, ckey, cert)
def print_res_err(res, err): "Print res/err from remote command execution" if isinstance(res, list): print "\n", '\n'.join(res) else: print "\n", res if err: if isinstance(err, list): msg = '\n'.join(err) else: msg = err print_warning(msg)
def check_voms_proxy(): "Check status of user VOMS proxy" cmd = 'voms-proxy-info -timeleft' out, err = execmd(cmd) if err: print_error('Fail to check user proxy info') return if int(out) < 3600: # time left is less then 1 hour msg = 'Your VOMS proxy will expire in %s sec (< 1 hour). ' % out msg += 'Please run ' + msg_green('vomsinit') + ' command to renew it' print_warning(msg)
def run_lumi_golden_json(): "Get run lumi dict from golden JSON file" fname = os.environ.get('CMS_JSON', None) if os.path.isfile(fname): with open(fname, 'r') as json_file: try: cms_json = json.load(json_file) return fname, cms_json except: print_error('Unable to decode CMS JSON: %s' % fname) return fname, {} else: msg = 'Unable to locate CMS JSON file' print_warning(msg) return None, {}
def run_lumi_golden_json(): "Get run lumi dict from golden JSON file" fname = os.environ.get("CMS_JSON", None) if os.path.isfile(fname): with open(fname, "r") as json_file: try: cms_json = json.load(json_file) return fname, cms_json except: print_error("Unable to decode CMS JSON: %s" % fname) return fname, {} else: msg = "Unable to locate CMS JSON file" print_warning(msg) return None, {}
def test_key_cert(): """Test user key/cert file and their permissions""" kfile = os.path.join(os.environ['HOME'], '.globus/userkey.pem') cfile = os.path.join(os.environ['HOME'], '.globus/usercert.pem') if os.path.isfile(kfile): if not (check_0600(kfile) or check_0400(kfile)): msg = "File %s has weak permission settings, try" % kfile print_warning(msg) print "chmod 0400 %s" % kfile else: print_error("File %s does not exists, grid/cp commands will not work" % kfile) if os.path.isfile(cfile): if not (check_0600(cfile) or check_0400(cfile)): msg = "File %s has weak permission settings, try" % cfile print_warning(msg) print "chmod 0600 %s" % cfile else: msg = "File %s does not exists, grid/cp commands will not work" % cfile print_error(msg)
def copy_lfn(lfn, dst, verbose=0, background=False, overwrite=False): """Copy lfn to destination""" if overwrite: if os.path.isfile(dst): os.remove(dst) if lfn_exists(lfn, dst): if os.path.isdir(dst): fname = lfn.split('/')[-1] if os.path.exists(os.path.join(dst, fname)): os.remove(os.path.join(dst, fname)) else: if lfn_exists(lfn, dst): if os.path.isdir(dst): fname = os.path.join(dst, lfn.split('/')[-1]) if not os.path.exists(fname): fname = None elif os.path.isfile(dst) and os.path.exists(dst): fname = dst else: fname = None print_warning('Destination %s is not local disk') if fname: print_warning('File %s already exists' % fname) return 'fail' method = os.environ.get('CMSSH_TRANSFER_METHOD', 'xrdcp') status = FM_SINGLETON.copy(lfn, dst, method, verbose, background) if status == 'fail': print_warning('xrdcp fails to copy file, fallback to GRID middleware mechanism') if os.environ.get('LCG_CP', ''): status = FM_SINGLETON.copy(lfn, dst, 'lcgcp', verbose, background) else: status = FM_SINGLETON.copy(lfn, dst, 'srmcp', verbose, background) return status
def copy_lfn(lfn, dst, verbose=0, background=False, overwrite=False): """Copy lfn to destination""" if overwrite: if os.path.isfile(dst): os.remove(dst) if lfn_exists(lfn, dst): if os.path.isdir(dst): fname = lfn.split('/')[-1] if os.path.exists(os.path.join(dst, fname)): os.remove(os.path.join(dst, fname)) else: if lfn_exists(lfn, dst): if os.path.isdir(dst): fname = os.path.join(dst, lfn.split('/')[-1]) if not os.path.exists(fname): fname = None elif os.path.isfile(dst) and os.path.exists(dst): fname = dst else: fname = None print_warning('Destination %s is not local disk') if fname: print_warning('File %s already exists' % fname) return 'fail' method = os.environ.get('CMSSH_TRANSFER_METHOD', 'xrdcp') status = FM_SINGLETON.copy(lfn, dst, method, verbose, background) if status == 'fail': print_warning( 'xrdcp fails to copy file, fallback to GRID middleware mechanism') if os.environ.get('LCG_CP', ''): status = FM_SINGLETON.copy(lfn, dst, 'lcgcp', verbose, background) else: status = FM_SINGLETON.copy(lfn, dst, 'srmcp', verbose, background) return status
def cmscrab(arg): """ Execute CRAB command, help is available at https://twiki.cern.ch/twiki/bin/view/CMSPublic/SWGuideCrabFaq """ msg = \ 'CRAB FAQ: https://twiki.cern.ch/twiki/bin/view/CMSPublic/SWGuideCrabFaq' print_info(msg) # check if release version and work area are set (should be set at cmsrel) rel = os.environ.get('CMSSW_VERSION', None) work_area = os.environ.get('CMSSW_WORKAREA', None) if not rel or not work_area: msg = 'In order to run crab command you must ' msg += 'run ' + msg_blue('cmsrel') + ' command' print_error(msg) return # check existence of crab.cfg crab_dir = os.path.join(work_area, 'crab') crab_cfg = os.path.join(crab_dir, 'crab.cfg') if not os.path.isdir(crab_dir): os.makedirs(crab_dir) os.chdir(crab_dir) if not os.path.isfile(crab_cfg): msg = 'No crab.cfg file found in %s' % crab_dir print_warning(msg) msg = 'Would you like to create one' if user_input(msg, default='N'): with open('crab.cfg', 'w') as config: config.write(crabconfig()) msg = 'Your crab.cfg has been created, please edit it ' msg += 'appropriately and re-run crab command' print_info(msg) print "cwd:", os.getcwd() return if os.uname()[0] == 'Darwin' and arg == '-submit': crab_submit_remotely(rel, work_area) return cmd = 'source $CRAB_ROOT/crab.sh; crab %s' % arg cmsexe(cmd)
def check_release_arch(rel): "Check release/architecture" # check if given release name is installed on user system rel_dir = '%s/cms/cmssw/%s' % (os.environ['SCRAM_ARCH'], rel) if os.path.isdir(os.path.join(os.environ['VO_CMS_SW_DIR'], rel_dir)): return 'ok' output = [] for arch, status in get_release_arch(rel): if not status: msg = '%s release is not officially supported under %s' \ % (rel, arch) print_warning(msg) if arch != os.environ['SCRAM_ARCH']: msg = 'Your SCRAM_ARCH=%s, while found arch=%s' \ % (os.environ['SCRAM_ARCH'], arch) print_warning(msg) msg = '\n%s/%s is not installed within cmssh, proceed' \ % (rel, arch) if user_input(msg, default='N'): os.environ['SCRAM_ARCH'] = arch if not os.path.isdir(\ os.path.join(os.environ['VO_CMS_SW_DIR'], arch)): bootstrap(arch) return 'ok' else: msg = '%s/%s rejected by user' % (rel, arch) output.append(msg) if output: return ', '.join(output) osname, osarch = osparameters() if osname == 'osx' and osarch == 'ia32': return 'OSX/ia32 is not supported in CMSSW' return 'no match'
def get_data(url, kwargs=None, headers=None, verbose=None, decoder='json', post=False): "Retrive data" if not headers and url.find('DBSReader') != -1: headers = {'Accept': 'application/json'} # DBS3 always needs that ckey = None cert = os.path.join(os.environ['HOME'], '.globus/usercert.pem') try: # pycurl data look-up, primary way to get the data mgr = RequestHandler() with working_pem(PEMMGR.pem) as ckey: res = mgr.get_data(url, kwargs, headers, post, ckey, cert, verbose=verbose) if decoder == 'json': data = json.load(res) else: data = res.read() return data except Exception as exc: if verbose: print_error(exc) msg = 'Fall back to urllib' print_warning(msg) # urllib data look-up, fallback mechanism with working_pem(PEMMGR.pem) as ckey: return get_data_helper(url, kwargs, headers, verbose, decoder, post, ckey, cert)
def lumidb(run_lumi_dict, action='delivered', lumi_report=False): "Call lumidb to get luminosity numbers" actions = ['overview', 'delivered', 'recorded', 'lumibyls', 'lumibylsXing'] if action not in actions: raise Exception('Unsupported action="%s", please check from %s' % (action, actions)) beamModeChoices = ['stable'] amodetagChoices = ['PROTPHYS', 'IONPHYS', 'PAPHYS'] xingAlgoChoices = ['OCC1', 'OCC2', 'ET'] # # parse arguments # connect = 'frontier://LumiCalc/CMS_LUMI_PROD' authpath = None scalefactor = 1.0 beamfluctuation = 0.2 minbiasxsec = 69300.0 # minbias cross-secvtion in ub xingMinLum = 1e-03 # Minimum perbunch luminosity to print, default=1e-03/ub xingAlgo = 'OCC1' hltpath = None outputfile = None siteconfpath = None withoutNorm = False withoutCheckforupdate = False fillnum = None nowarning = True debug = False beamenergy = None amodetag = None reqrunmin = None reqfillmin = None reqtimemin = None reqrunmax = None reqfillmax = None reqtimemax = None timeFilter = [None, None] pbeammode = None iresults = [] reqTrg = False reqHlt = False svc = sessionManager.sessionManager(connect, authpath, siteconfpath, debugON=debug) session = svc.openSession(isReadOnly=True, cpp2sqltype=[('unsigned int', 'NUMBER(10)'), ('unsigned long long', 'NUMBER(20)') ]) ############################################################## # check run/ls list ############################################################## irunlsdict = {} rruns = [] session.transaction().start(True) irunlsdict = run_lumi_dict iresults = [] runlist = lumiCalcAPI.runList(session.nominalSchema(), fillnum, runmin=reqrunmin, runmax=reqrunmax, fillmin=reqfillmin, fillmax=reqfillmax, startT=reqtimemin, stopT=reqtimemax, l1keyPattern=None, hltkeyPattern=None, amodetag=amodetag, nominalEnergy=beamenergy, energyFlut=beamfluctuation, requiretrg=False, requirehlt=False) rruns = [val for val in runlist if val in irunlsdict.keys()] for selectedrun in irunlsdict.keys( ): #if there's further filter on the runlist,clean input dict if selectedrun not in rruns: del irunlsdict[selectedrun] ############################################################## # check datatag # ############################################################# datatagid, datatagname = revisionDML.currentDataTag( session.nominalSchema()) dataidmap=revisionDML.dataIdsByTagId(\ session.nominalSchema(), datatagid, runlist=rruns, withcomment=False) session.transaction().commit() if not dataidmap: print_warning('No data found in LumiDB for given set run lumi section') return 0, '/ub' # return lumi, units normvalueDict = {} ################## # ls level # ################## session.transaction().start(True) GrunsummaryData = lumiCalcAPI.runsummaryMap(session.nominalSchema(), irunlsdict) if action == 'delivered': result = lumiCalcAPI.deliveredLumiForIds(session.nominalSchema(), irunlsdict, dataidmap, runsummaryMap=GrunsummaryData, beamstatusfilter=pbeammode, timeFilter=timeFilter, normmap=normvalueDict, lumitype='HF') if lumi_report: lumiReport.toScreenTotDelivered(result, iresults, scalefactor, irunlsdict=irunlsdict, noWarning=nowarning, toFile=outputfile) # result {run:[lumilsnum(0),cmslsnum(1),timestamp(2),beamstatus(3),beamenergy(4),deliveredlumi(5),calibratedlumierr(6),(bxidxlist,bxvalues,bxerrs)(7),(bxidx,b1intensities,b2intensities)(8),fillnum(9),pu(10)]} totlumi = 0 for run, val in result.items(): for llist in val: if irunlsdict[run]: if llist[0] in irunlsdict[ run]: # select only lumis from input run ls dict totlumi += llist[5] else: totlumi += llist[5] totlumival, lumiunit = CommonUtil.guessUnit(totlumi) return totlumival, lumiunit if action == 'overview': result = lumiCalcAPI.lumiForIds(session.nominalSchema(), irunlsdict, dataidmap, runsummaryMap=GrunsummaryData, beamstatusfilter=pbeammode, timeFilter=timeFilter, normmap=normvalueDict, lumitype='HF') lumiReport.toScreenOverview(result, iresults, scalefactor, irunlsdict=irunlsdict, noWarning=nowarning, toFile=outputfile) if action == 'lumibyls': if not hltpath: result = lumiCalcAPI.lumiForIds(session.nominalSchema(), irunlsdict, dataidmap, runsummaryMap=GrunsummaryData, beamstatusfilter=pbeammode, timeFilter=timeFilter, normmap=normvalueDict, lumitype='HF', minbiasXsec=minbiasxsec) lumiReport.toScreenLumiByLS(result, iresults, scalefactor, irunlsdict=irunlsdict, noWarning=nowarning, toFile=outputfile) else: hltname = hltpath hltpat = None if hltname == '*' or hltname == 'all': hltname = None elif 1 in [c in hltname for c in '*?[]']: #is a fnmatch pattern hltpat = hltname hltname = None result = lumiCalcAPI.effectiveLumiForIds( session.nominalSchema(), irunlsdict, dataidmap, runsummaryMap=GrunsummaryData, beamstatusfilter=pbeammode, timeFilter=timeFilter, normmap=normvalueDict, hltpathname=hltname, hltpathpattern=hltpat, withBXInfo=False, bxAlgo=None, xingMinLum=xingMinLum, withBeamIntensity=False, lumitype='HF') lumiReport.toScreenLSEffective(result, iresults, scalefactor, irunlsdict=irunlsdict, noWarning=nowarning, toFile=outputfile) if action == 'recorded': #recorded actually means effective because it needs to show all the hltpaths... hltname = hltpath hltpat = None if hltname is not None: if hltname == '*' or hltname == 'all': hltname = None elif 1 in [c in hltname for c in '*?[]']: #is a fnmatch pattern hltpat = hltname hltname = None result = lumiCalcAPI.effectiveLumiForIds(session.nominalSchema(), irunlsdict, dataidmap, runsummaryMap=GrunsummaryData, beamstatusfilter=pbeammode, timeFilter=timeFilter, normmap=normvalueDict, hltpathname=hltname, hltpathpattern=hltpat, withBXInfo=False, bxAlgo=None, xingMinLum=xingMinLum, withBeamIntensity=False, lumitype='HF') lumiReport.toScreenTotEffective(result, iresults, scalefactor, irunlsdict=irunlsdict, noWarning=nowarning, toFile=outputfile) if action == 'lumibylsXing': result = lumiCalcAPI.lumiForIds(session.nominalSchema(), irunlsdict, dataidmap, runsummaryMap=GrunsummaryData, beamstatusfilter=pbeammode, timeFilter=timeFilter, normmap=normvalueDict, withBXInfo=True, bxAlgo=xingAlgo, xingMinLum=xingMinLum, withBeamIntensity=False, lumitype='HF') outfile = outputfile if not outfile: print '[WARNING] no output file given. lumibylsXing writes per-bunch lumi only to default file lumibylsXing.csv' outfile = 'lumibylsXing.csv' lumiReport.toCSVLumiByLSXing(result, scalefactor, outfile, irunlsdict=irunlsdict, noWarning=nowarning) session.transaction().commit() del session del svc
def pfn_dst(lfn, dst, verbose=None): """ Look-up LFN in Phedex and return pfn dst for further processing """ dstfname = None pat = re.compile('^T[0-9]_[A-Z]+(_)[A-Z]+') if pat.match(dst): dst_split = dst.split(':') dst = dst_split[0] if len(dst_split) == 1: # copy to the node local_path = dst_split[1] for srm_path, lfn_match in resolve_srm_path(dst, verbose): lfn_pat = re.compile(lfn_match) if lfn_pat.match(lfn): srm_path = srm_path.replace('\?', '?').replace('$1', local_path) if verbose: print "Resolve %s into %s" % (dst, srm_path) dst = srm_path else: paths = [p for p in resolve_user_srm_path(dst, verbose=verbose)] dst = '%s/%s' % (paths[0], get_username()) check_permission(dst, verbose) else: if dst.find('file:///') == -1: dstfname = dst.split('/')[-1] if dstfname == '.': dstfname = None if dst[0] == '/': # absolute path if os.path.isdir(dst): ddir = dst dstfname = None else: ddir = '/'.join(dst.split('/')[:-1]) if not os.path.isdir(ddir): msg = 'Provided destination directory %s does not exists' % ddir raise Exception(msg) dst = 'file:///%s' % ddir else: ddir = '/'.join(dst.split('/')[:-1]).replace( '$PWD', os.getcwd()) if os.path.isdir(ddir): dst = 'file:///%s' % os.path.join(os.getcwd(), ddir) else: dst = 'file:///%s' % os.getcwd() pfnlist = [] if os.path.isfile(lfn) or lfn.find('file:///') != -1: # local file pfn = lfn.replace('file:///', '') if pfn[0] != '/': pfn = 'file:///%s' % os.path.join(os.getcwd(), pfn) else: pfn = 'file:///%s' % pfn pfnlist = [pfn] else: if lfn.find(':') != -1: node, lfn = lfn.split(':') params = {'node': node, 'lfn': lfn, 'protocol': 'srmv2'} method = 'lfn2pfn' else: params = {'se': '*', 'lfn': lfn} method = 'fileReplicas' json_dict = get_data(phedex_url(method), params) ddict = DotDict(json_dict) if verbose: print "Look-up LFN:" print lfn phedex = json_dict['phedex'] if phedex.has_key('mapping'): if not phedex['mapping']: msg = "LFN: %s\n" % lfn msg += 'No replicas found\n' msg += str(json_dict) raise Exception(msg) filelist = ddict.get('phedex.mapping.pfn') if not filelist: filelist = [] if isinstance(filelist, basestring): filelist = [filelist] for fname in filelist: pfnlist.append(fname) elif phedex.has_key('block') and not phedex['block']: msg = 'No replicas found in PhEDEx, will try to get original SE from DBS' print_warning(msg) sename = get_dbs_se(lfn) msg = 'Orignal LFN site %s' % sename print_info(msg) mgr = SiteDBManager() pfnlist = lfn2pfn(lfn, sename, mgr) filelist = ddict.get('phedex.block.file') if not filelist: filelist = [] for fname in filelist: for replica in fname['replica']: cmsname = replica['node'] se = replica['se'] if verbose: print "found LFN on node=%s, se=%s" % (cmsname, se) if cmsname.count('T0', 0, 2) == 1: continue # skip T0's # query Phedex for PFN params = {'protocol': 'srmv2', 'lfn': lfn, 'node': cmsname} result = get_data(phedex_url('lfn2pfn'), params) try: for item in result['phedex']['mapping']: pfn = item['pfn'] if pfn not in pfnlist: pfnlist.append(pfn) except: msg = "Fail to look-up PFNs in Phedex\n" + str(result) print msg continue if verbose > 1: print "PFN list:" for pfn in pfnlist: print pfn # finally return pfn and dst paths w/ file for further processing for item in pfnlist: ifile = item.split("/")[-1] if not dstfname else dstfname yield item, '%s/%s' % (dst, ifile)
def pfn_dst(lfn, dst, verbose=None): """ Look-up LFN in Phedex and return pfn dst for further processing """ dstfname = None pat = re.compile('^T[0-9]_[A-Z]+(_)[A-Z]+') if pat.match(dst): dst_split = dst.split(':') dst = dst_split[0] if len(dst_split) == 1: # copy to the node local_path = dst_split[1] for srm_path, lfn_match in resolve_srm_path(dst, verbose): lfn_pat = re.compile(lfn_match) if lfn_pat.match(lfn): srm_path = srm_path.replace('\?', '?').replace('$1', local_path) if verbose: print "Resolve %s into %s" % (dst, srm_path) dst = srm_path else: paths = [p for p in resolve_user_srm_path(dst, verbose=verbose)] dst = '%s/%s' % (paths[0], get_username()) check_permission(dst, verbose) else: if dst.find('file:///') == -1: dstfname = dst.split('/')[-1] if dstfname == '.': dstfname = None if dst[0] == '/': # absolute path if os.path.isdir(dst): ddir = dst dstfname = None else: ddir = '/'.join(dst.split('/')[:-1]) if not os.path.isdir(ddir): msg = 'Provided destination directory %s does not exists' % ddir raise Exception(msg) dst = 'file:///%s' % ddir else: ddir = '/'.join(dst.split('/')[:-1]).replace('$PWD', os.getcwd()) if os.path.isdir(ddir): dst = 'file:///%s' % os.path.join(os.getcwd(), ddir) else: dst = 'file:///%s' % os.getcwd() pfnlist = [] if os.path.isfile(lfn) or lfn.find('file:///') != -1: # local file pfn = lfn.replace('file:///', '') if pfn[0] != '/': pfn = 'file:///%s' % os.path.join(os.getcwd(), pfn) else: pfn = 'file:///%s' % pfn pfnlist = [pfn] else: if lfn.find(':') != -1: node, lfn = lfn.split(':') params = {'node':node, 'lfn':lfn, 'protocol':'srmv2'} method = 'lfn2pfn' else: params = {'se':'*', 'lfn':lfn} method = 'fileReplicas' json_dict = get_data(phedex_url(method), params) ddict = DotDict(json_dict) if verbose: print "Look-up LFN:" print lfn phedex = json_dict['phedex'] if phedex.has_key('mapping'): if not phedex['mapping']: msg = "LFN: %s\n" % lfn msg += 'No replicas found\n' msg += str(json_dict) raise Exception(msg) filelist = ddict.get('phedex.mapping.pfn') if not filelist: filelist = [] if isinstance(filelist, basestring): filelist = [filelist] for fname in filelist: pfnlist.append(fname) elif phedex.has_key('block') and not phedex['block']: msg = 'No replicas found in PhEDEx, will try to get original SE from DBS' print_warning(msg) sename = get_dbs_se(lfn) msg = 'Orignal LFN site %s' % sename print_info(msg) mgr = SiteDBManager() pfnlist = lfn2pfn(lfn, sename, mgr) filelist = ddict.get('phedex.block.file') if not filelist: filelist = [] for fname in filelist: for replica in fname['replica']: cmsname = replica['node'] se = replica['se'] if verbose: print "found LFN on node=%s, se=%s" % (cmsname, se) if cmsname.count('T0', 0, 2) == 1: continue # skip T0's # query Phedex for PFN params = {'protocol':'srmv2', 'lfn':lfn, 'node':cmsname} result = get_data(phedex_url('lfn2pfn'), params) try: for item in result['phedex']['mapping']: pfn = item['pfn'] if pfn not in pfnlist: pfnlist.append(pfn) except: msg = "Fail to look-up PFNs in Phedex\n" + str(result) print msg continue if verbose > 1: print "PFN list:" for pfn in pfnlist: print pfn # finally return pfn and dst paths w/ file for further processing for item in pfnlist: ifile = item.split("/")[-1] if not dstfname else dstfname yield item, '%s/%s' % (dst, ifile)
def lumidb(run_lumi_dict, action='delivered', lumi_report=False): "Call lumidb to get luminosity numbers" actions = ['overview', 'delivered', 'recorded', 'lumibyls', 'lumibylsXing'] if action not in actions: raise Exception('Unsupported action="%s", please check from %s' % (action, actions)) beamModeChoices = ['stable'] amodetagChoices = ['PROTPHYS', 'IONPHYS', 'PAPHYS' ] xingAlgoChoices = ['OCC1', 'OCC2', 'ET'] # # parse arguments # connect='frontier://LumiCalc/CMS_LUMI_PROD' authpath = None scalefactor = 1.0 beamfluctuation = 0.2 minbiasxsec = 69300.0 # minbias cross-secvtion in ub xingMinLum = 1e-03 # Minimum perbunch luminosity to print, default=1e-03/ub xingAlgo = 'OCC1' hltpath = None outputfile = None siteconfpath = None withoutNorm = False withoutCheckforupdate = False fillnum = None nowarning = True debug = False beamenergy = None amodetag = None reqrunmin=None reqfillmin=None reqtimemin=None reqrunmax=None reqfillmax=None reqtimemax=None timeFilter=[None,None] pbeammode = None iresults=[] reqTrg=False reqHlt=False svc = sessionManager.sessionManager(connect, authpath, siteconfpath, debugON=debug) session=svc.openSession(isReadOnly=True, cpp2sqltype=[('unsigned int', 'NUMBER(10)'), ('unsigned long long', 'NUMBER(20)')]) ############################################################## # check run/ls list ############################################################## irunlsdict={} rruns=[] session.transaction().start(True) irunlsdict = run_lumi_dict iresults = [] runlist=lumiCalcAPI.runList(session.nominalSchema(), fillnum, runmin=reqrunmin, runmax=reqrunmax, fillmin=reqfillmin, fillmax=reqfillmax, startT=reqtimemin, stopT=reqtimemax, l1keyPattern=None, hltkeyPattern=None, amodetag=amodetag, nominalEnergy=beamenergy, energyFlut=beamfluctuation, requiretrg=False, requirehlt=False) rruns = [val for val in runlist if val in irunlsdict.keys()] for selectedrun in irunlsdict.keys():#if there's further filter on the runlist,clean input dict if selectedrun not in rruns: del irunlsdict[selectedrun] ############################################################## # check datatag # ############################################################# datatagid, datatagname = revisionDML.currentDataTag(session.nominalSchema()) dataidmap=revisionDML.dataIdsByTagId(\ session.nominalSchema(), datatagid, runlist=rruns, withcomment=False) session.transaction().commit() if not dataidmap: print_warning('No data found in LumiDB for given set run lumi section') return 0, '/ub' # return lumi, units normvalueDict={} ################## # ls level # ################## session.transaction().start(True) GrunsummaryData=lumiCalcAPI.runsummaryMap(session.nominalSchema(),irunlsdict) if action == 'delivered': result=lumiCalcAPI.deliveredLumiForIds(session.nominalSchema(),irunlsdict,dataidmap,runsummaryMap=GrunsummaryData,beamstatusfilter=pbeammode,timeFilter=timeFilter,normmap=normvalueDict,lumitype='HF') if lumi_report: lumiReport.toScreenTotDelivered(result,iresults,scalefactor,irunlsdict=irunlsdict,noWarning=nowarning,toFile=outputfile) # result {run:[lumilsnum(0),cmslsnum(1),timestamp(2),beamstatus(3),beamenergy(4),deliveredlumi(5),calibratedlumierr(6),(bxidxlist,bxvalues,bxerrs)(7),(bxidx,b1intensities,b2intensities)(8),fillnum(9),pu(10)]} totlumi = 0 for run, val in result.items(): for llist in val: if irunlsdict[run]: if llist[0] in irunlsdict[run]: # select only lumis from input run ls dict totlumi += llist[5] else: totlumi += llist[5] totlumival, lumiunit = CommonUtil.guessUnit(totlumi) return totlumival, lumiunit if action == 'overview': result=lumiCalcAPI.lumiForIds(session.nominalSchema(),irunlsdict,dataidmap,runsummaryMap=GrunsummaryData,beamstatusfilter=pbeammode,timeFilter=timeFilter,normmap=normvalueDict,lumitype='HF') lumiReport.toScreenOverview(result,iresults,scalefactor,irunlsdict=irunlsdict,noWarning=nowarning,toFile=outputfile) if action == 'lumibyls': if not hltpath: result=lumiCalcAPI.lumiForIds(session.nominalSchema(),irunlsdict,dataidmap,runsummaryMap=GrunsummaryData,beamstatusfilter=pbeammode,timeFilter=timeFilter,normmap=normvalueDict,lumitype='HF',minbiasXsec=minbiasxsec) lumiReport.toScreenLumiByLS(result,iresults,scalefactor,irunlsdict=irunlsdict,noWarning=nowarning,toFile=outputfile) else: hltname=hltpath hltpat=None if hltname=='*' or hltname=='all': hltname=None elif 1 in [c in hltname for c in '*?[]']: #is a fnmatch pattern hltpat=hltname hltname=None result=lumiCalcAPI.effectiveLumiForIds(session.nominalSchema(),irunlsdict,dataidmap,runsummaryMap=GrunsummaryData,beamstatusfilter=pbeammode,timeFilter=timeFilter,normmap=normvalueDict,hltpathname=hltname,hltpathpattern=hltpat,withBXInfo=False,bxAlgo=None,xingMinLum=xingMinLum,withBeamIntensity=False,lumitype='HF') lumiReport.toScreenLSEffective(result,iresults,scalefactor,irunlsdict=irunlsdict,noWarning=nowarning,toFile=outputfile) if action == 'recorded':#recorded actually means effective because it needs to show all the hltpaths... hltname=hltpath hltpat=None if hltname is not None: if hltname=='*' or hltname=='all': hltname=None elif 1 in [c in hltname for c in '*?[]']: #is a fnmatch pattern hltpat=hltname hltname=None result=lumiCalcAPI.effectiveLumiForIds(session.nominalSchema(),irunlsdict,dataidmap,runsummaryMap=GrunsummaryData,beamstatusfilter=pbeammode,timeFilter=timeFilter,normmap=normvalueDict,hltpathname=hltname,hltpathpattern=hltpat,withBXInfo=False,bxAlgo=None,xingMinLum=xingMinLum,withBeamIntensity=False,lumitype='HF') lumiReport.toScreenTotEffective(result,iresults,scalefactor,irunlsdict=irunlsdict,noWarning=nowarning,toFile=outputfile) if action == 'lumibylsXing': result=lumiCalcAPI.lumiForIds(session.nominalSchema(),irunlsdict,dataidmap,runsummaryMap=GrunsummaryData,beamstatusfilter=pbeammode,timeFilter=timeFilter,normmap=normvalueDict,withBXInfo=True,bxAlgo=xingAlgo,xingMinLum=xingMinLum,withBeamIntensity=False,lumitype='HF') outfile=outputfile if not outfile: print '[WARNING] no output file given. lumibylsXing writes per-bunch lumi only to default file lumibylsXing.csv' outfile='lumibylsXing.csv' lumiReport.toCSVLumiByLSXing(result,scalefactor,outfile,irunlsdict=irunlsdict,noWarning=nowarning) session.transaction().commit() del session del svc
def cmsrel(rel): """ cmssh release setup command, it setups CMSSW environment and creates user based directory structure. Examples: cmssh> cmsrel # reset CMSSW environment to cmssh one cmssh> cmsrel CMSSW_5_2_4 """ ipython = get_ipython() rel = rel.strip() if not rel or rel in ['reset', 'clear', 'clean']: path = os.environ['CMSSH_ROOT'] for idir in ['external', 'lib', 'root']: pdir = os.path.join(path, 'install/lib/release_%s' % idir) if os.path.islink(pdir): os.remove(pdir) if os.path.isdir(pdir): shutil.rmtree(pdir) os.makedirs(pdir) # Set cmssh prompt prompt = 'cms-sh' ipython.prompt_manager.in_template = '%s|\#> ' % prompt return # check if given release name is installed on user system rel_arch = None for arch in cms_architectures(): rel_dir = '%s/cms/cmssw/%s' % (arch, rel) if os.path.isdir(os.path.join(os.environ['VO_CMS_SW_DIR'], rel_dir)): rel_arch = arch break if not rel_arch: msg = 'Release ' + msg_red(rel) msg += ' is not yet installed on your system.\n' msg += 'Use ' + msg_green('releases') msg += ' command to list available releases.\n' msg += 'Use ' + msg_green('install %s' % rel) msg += ' command to install given release.' print msg return # set release architecture os.environ['SCRAM_ARCH'] = rel_arch # setup environment cmssw_dir = os.environ.get('CMSSW_RELEASES', os.getcwd()) if not os.path.isdir(cmssw_dir): os.makedirs(cmssw_dir) root = os.environ['CMSSH_ROOT'] idir = os.environ['CMSSH_INSTALL_DIR'] base = os.path.realpath('%s/CMSSW' % root) path = '%s/%s/cms/cmssw/%s' % (base, rel_arch, rel) os.environ['CMSSW_BASE'] = os.path.join(cmssw_dir, rel) os.environ['CMSSW_RELEASE_BASE'] = path for pkg in ['FWCore', 'DataFormats']: pdir = '%s/%s' % (idir, pkg) if os.path.exists(pdir): shutil.rmtree(pdir) os.mkdir(pdir) touch(os.path.join(pdir, '__init__.py')) pkgs = ['Framework', 'GuiBrowsers', 'Integration', 'MessageLogger', 'MessageService', 'Modules', 'ParameterSet', 'PythonUtilities', 'Services', 'Utilities'] for pkg in pkgs: link = '%s/src/FWCore/%s/python' % (path, pkg) dst = '%s/FWCore/%s' % (idir, pkg) os.symlink(link, dst) link = '%s/src/DataFormats/FWLite/python' % path dst = '%s/DataFormats/FWLite' % idir os.symlink(link, dst) for lib in ['external', 'lib']: link = '%s/%s/%s' % (path, lib, rel_arch) dst = '%s/install/lib/release_%s' % (root, lib) if os.path.islink(dst): os.remove(dst) else: shutil.rmtree(dst) os.symlink(link, dst) # switch to given release os.environ['CMSSW_VERSION'] = rel os.environ['CMSSW_WORKAREA'] = os.path.join(cmssw_dir, rel) if os.path.isdir(os.path.join(cmssw_dir, rel + '/src')): os.chdir(os.path.join(cmssw_dir, rel + '/src')) else: os.chdir(cmssw_dir) cmd = "scramv1 project CMSSW %s" % rel run(cmd) os.chdir(os.path.join(rel, 'src')) # get ROOT from run-time environment cmd = 'eval `scramv1 runtime -sh`; env | grep ^ROOTSYS=' stdout, stderr = execmd(cmd) if stderr: print "While executing cmd=%s" % cmd print_warning(stderr) rootsys = stdout.replace('\n', '').replace('ROOTSYS=', '') dst = '%s/install/lib/release_root' % root if os.path.exists(dst): if os.path.islink(dst): os.remove(dst) else: shutil.rmtree(dst) os.symlink(rootsys, dst) # set edm utils for given release ipython = get_ipython() rdir = '%s/bin/%s' % (rel_dir, rel_arch) reldir = os.path.join(os.environ['VO_CMS_SW_DIR'], rdir) for name in os.listdir(reldir): fname = os.path.join(reldir, name) if name.find('edm') == 0 and os.path.isfile(fname): # we use Magic(cmd).execute we don't need # to add scramv1 command in front of edm one, since # execute method will run in current shell environment # old command for reference: # cmd = "eval `scramv1 runtime -sh`; %s" % fname cmd = fname ipython.register_magic_function(Magic(cmd).execute, 'line', name) # Set cmssh prompt ipython.prompt_manager.in_template = '%s|\#> ' % rel # final message print "%s is ready, cwd: %s" % (rel, os.getcwd())