def __init__(self, task_id, panda_id, sample_id, files_to_check, check_interval, panda_url, certfile, keyfile, verbose, offline_mode=False, dump_file=None): self.task_id = task_id self.panda_id = panda_id self.sample_id = sample_id self.output_filename = '{0}_{1}'.format(task_id, sample_id) self.files_to_check = files_to_check self.check_interval = check_interval self.panda_url = panda_url self.certfile = certfile self.keyfile = keyfile self.body = None self.verbose = verbose self.offline_mode = offline_mode self.dump_file = dump_file self.log_filename = 'log.checkpoint_uploader' self.tmpLog = PLogger.getPandaLogger(log_file_name=self.log_filename)
import re import sys from dq2.clientapi.DQ2 import DQ2 from pandawnutil.wnlogger import PLogger # have to reset logger since DQ2 tweaks logger PLogger.resetLogger() EC_Failed = 255 EC_Config = 100 # list datasets by GUIDs def listDatasetsByGUIDs(guids,dsFilter,tmpLog,verbose=False,forColl=False): # DQ2 API try: dq2 = DQ2() dq2.listDatasetsByGUIDs(guids) except: errtype,errvalue = sys.exc_info()[:2] errStr = "dq2.listDatasetsByGUIDs failed with %s:%s" % (errtype, errvalue) tmpLog.error(errStr) sys.exit(EC_Failed) # get filter dsFilters = [] if dsFilter != '': dsFilters = dsFilter.split(',') retMap = {} allMap = {} iLookUp = 0
optP.add_option('--eventPickDS',action='store',dest='eventPickDS',default='', type='string', help='A comma-separated list of pattern strings. Datasets which are converted from the run/event list will be used when they match with one of the pattern strings. Either \ or "" is required when a wild-card is used. e.g., data\*') optP.add_option('--eventPickStagedDS',action='store',dest='eventPickStagedDS',default='', type='string', help='--eventPick options create a temporary dataset to stage-in interesting files when those files are available only on TAPE, and then a stage-in request is automatically sent to DaTRI. Once DaTRI transfers the dataset to DISK you can use the dataset as an input using this option') optP.add_option('--eventPickAmiTag',action='store',dest='eventPickAmiTag',default='', type='string', help='AMI tag used to match TAG collections names. This option is required when you are interested in older data than the latest one. Either \ or "" is required when a wild-card is used. e.g., f2\*') # dummy parameters optP.add_option('--oldPrefix',action='store',dest='oldPrefix') optP.add_option('--newPrefix',action='store',dest='newPrefix') optP.add_option('--lfcHost',action='store',dest='lfcHost') optP.add_option('--inputGUIDs',action='store',dest='inputGUIDs') optP.add_option('--usePFCTurl',action='store',dest='usePFCTurl') # get logger tmpLog = PLogger.getPandaLogger() tmpLog.info('start') # parse options options,args = optP.parse_args() if options.verbose: tmpLog.debug("=== parameters ===") print options print # save current dir currentDir = os.getcwd() currentDirFiles = os.listdir('.') tmpLog.info("Running in %s " % currentDir) # crate work dir
import re import sys from rucio.client import Client as RucioClient from pandawnutil.wnlogger import PLogger # have to reset logger since DQ2 tweaks logger PLogger.resetLogger() EC_Failed = 255 EC_Config = 100 # list datasets by GUIDs def listDatasetsByGUIDs(guids, dsFilter, tmpLog, verbose=False, forColl=False): # rucio API try: client = RucioClient() except: errtype, errvalue = sys.exc_info()[:2] errStr = "failed to get rucio API with %s:%s" % (errtype, errvalue) tmpLog.error(errStr) sys.exit(EC_Failed) # get filter dsFilters = [] if dsFilter != '': dsFilters = dsFilter.split(',') retMap = {} allMap = {} iLookUp = 0 guidLfnMap = {} checkedDSList = [] # loop over all GUIDs