for pd, datasetid in knownPDs.items(): dbcursor.execute('SELECT `run`, `lumi` FROM `scanstatus` WHERE `recoid` = %s AND `datasetid` = %s AND (`status` LIKE \'new\' OR `status` LIKE \'failed\') ORDER BY `run`, `lumi`', (recoid, datasetid)) if dbcursor.rowcount <= 0: print ' No job to submit for', pd continue lumis = [(run, lumi) for run, lumi in dbcursor] if len(lumis) == 0: continue print ' ' + pd for recoVersion in [ds[1] for ds in datasets if ds[0] == pd]: runsInDS = [] for row in dasQuery('run dataset=/%s/%s/RECO' % (pd, recoVersion)): # example output # {u'das_id': [u'562e00e8e1391816fc88e7e3'], u'qhash': u'82989270bb85ec7e7d676d8f447a1381', u'cache_id': [u'562e00e8e1391816fc88e81b'], u'das': {u'primary_key': u'run.run_number', u'record': 1, u'condition_keys': [u'dataset.name'], u'ts': 1445855464.7480609, u'system': [u'dbs3'], u'instance': u'prod/global', u'api': [u'runs_via_dataset'], u'expire': 1445855764, u'services': [{u'dbs3': [u'dbs3']}]}, u'run': [{u'run_number': 256584}], u'_id': u'562e00e8e1391816fc88e890'} runsInDS.append(row['run'][0]['run_number']) lumisDS = [(run, lumi) for run, lumi in lumis if run in runsInDS] # make json jsonCont = [] currentLumiRange = None for run, lumi in lumisDS: if len(jsonCont) == 0: jsonCont.append((run, [])) if run != jsonCont[-1][0]: jsonCont[-1][1].append(currentLumiRange) jsonCont.append((run, []))
for reco in config.reconstructions: print 'Checking for new lumis in', reco recoid = recoids[reco] # loop over primary datasets for pd, recoVersion in [ds for ds in datasets if ds[1][:ds[1].rfind('-v')] == reco]: if pd not in knownPDs: dbcursor.execute('INSERT INTO `primarydatasets` (name) VALUES (%s)', (pd,)) knownPDs[pd] = dbcursor.lastrowid print ' Inserted', pd, 'to the list of primary datasets to process.' datasetid = knownPDs[pd] # find new lumisections and inject to DB for row in dasQuery('run, lumi dataset=/' + pd + '/' + recoVersion + '/RECO'): # example output # [{u'das_id': [u'562374bae13918e2ff9dcb8b'], u'run': [{u'run_number': 256584}], u'lumi': [{u'number': [[3, 5], [7, 18], [20, 22]]}], u'cache_id': [u'562374bfe13918e2ff9dcb92'], u'das': {u'primary_key': u'run.run_number', u'record': 1, u'condition_keys': [u'dataset.name'], u'ts': 1445164352.67815, u'system': [u'dbs3'], u'instance': u'prod/global', u'api': [u'run_lumi4dataset'], u'expire': 1445164472, u'services': [{u'dbs3': [u'dbs3']}]}, u'qhash': u'213d57e7df3cc986dec2a81820c33679', u'_id': u'56237540e13918e4b9ffe1fc'}, {u'das_id': [u'562374bae13918e2ff9dcb8b'], u'qhash': u'213d57e7df3cc986dec2a81820c33679', u'lumi': [{u'number': [[1, 1], [3, 15], [17, 26], [28, 33], [35, 36], [38, 43], [45, 52], [55, 176], [178, 207]]}], u'cache_id': [u'562374bfe13918e2ff9dcb93'], u'das': {u'primary_key': u'run.run_number', u'record': 1, u'condition_keys': [u'dataset.name'], u'ts': 1445164352.67815, u'system': [u'dbs3'], u'instance': u'prod/global', u'api': [u'run_lumi4dataset'], u'expire': 1445164472, u'services': [{u'dbs3': [u'dbs3']}]}, u'run': [{u'run_number': 256587}], u'_id': u'56237540e13918e4b9ffe1fb'}] if len(row['run']) == 0: continue run = row['run'][0]['run_number'] if run not in dcsMask: continue lumiranges = row['lumi'][0]['number'] for first, last in lumiranges: for lumi in range(first, last + 1): if lumi in dcsMask[run]: infile.write('%d,%d,%d,%d,\'new\'\n' % (recoid, datasetid, run, lumi))
for reco in config.reconstructions: print 'Checking for new lumis in', reco recoid = recoids[reco] # loop over primary datasets for pd, recoVersion in [ds for ds in datasets if ds[1][:ds[1].rfind('-v')] == reco]: if pd not in knownPDs: dbcursor.execute('INSERT INTO `primarydatasets` (name) VALUES (%s)', (pd,)) knownPDs[pd] = dbcursor.lastrowid print ' Inserted', pd, 'to the list of primary datasets to process.' datasetid = knownPDs[pd] dbcursor.execute('SELECT `run`, `lumi` FROM `scanstatus` WHERE `datasetid` = %s AND `status` NOT LIKE \'done\'', (datasetid,)) # find new lumisections and inject to DB for run, lumi in dbcursor: result = dasQuery('file dataset=/' + pd + '/' + recoVersion + '/RECO run=' + str(run) + ' lumi=' + str(lumi)) try: lfn = result[0]['file'][0]['name'] except: # print result lfn = pd + ' N/A' # example output # {u'das_id': [u'56466a9f6924172dcacc17ea', u'56466a9f6924172dcacc17e8'], u'qhash': u'0c0ff5c354d68314f3f00a3cab297ceb', u'cache_id': [u'56466aa06924172dcacc17ef'], u'file': [{u'name': u'/store/data/Run2015D/Tau/RECO/PromptReco-v3/000/257/400/00000/4CC2B9E3-AD64-E511-B274-02163E014308.root'}], u'das': {u'primary_key': u'file.name', u'record': 1, u'condition_keys': [u'run.run_number', u'lumi.number', u'dataset.name'], u'ts': 1447455392.3699269, u'system': [u'dbs3'], u'instance': u'prod/global', u'api': [u'file4DatasetRunLumi'], u'expire': 1447455692, u'services': [{u'dbs3': [u'dbs3']}]}, u'_id': u'56466aa06924172dcacc17f2'} print run, lumi, lfn