# loop over JSON files, which will give the list of files to be merged
       for i in range(0, len(afterString)):
     	  if not afterString[i].endswith(".jsn"): continue
     	  if "index" in afterString[i]: continue
     	  if afterString[i].endswith("recv"): continue
     	  if "EoLS" in afterString[i]: continue
     	  if "BoLS" in afterString[i]: continue
     	  if "EoR" in afterString[i]: continue

   	  fileNameString = os.path.basename(afterString[i]).split('_')

     	  if(float(debug) >= 50): log.info("FILE: {0}".format(afterString[i]))
     	  inputJsonFile = os.path.join(inputDataFolder, afterString[i])
     	  if(float(debug) >= 50): log.info("inputJsonFile: {0}".format(inputJsonFile))

   	  settings = cmsDataFlowMerger.readJsonFile(inputJsonFile,debug)

   	  # avoid corrupted files or streamEvD files
     	  if("bad" in settings): continue

   	  # this is the number of input and output events, and the name of the dat file, something critical
     	  # eventsOutput is actually the total number of events to merge in the macromerged stage
   	  eventsInput	    = int(settings['data'][0])
   	  eventsOutput      = int(settings['data'][1])
   	  errorCode	    = 0
     	  file  	    = ""
     	  fileErrorString   = None
     	  fileSize	    = 0
   	  nFilesBU	    = 0
   	  eventsTotalInput  = 0
   	  checkSum	    = 0
Beispiel #2
0
def doReading(theInput,theMaxTime,theTooSlowTime,theDebug):

   initReadingTime = time.time()
   endReadingTime = 0
   totalReadFiles    = 0
   totalTimeFiles    = 0
   totalTooSlowFiles = 0
   while 1:
   
      endReadingTime = time.time()
      diffTime = endReadingTime-initReadingTime
      if(theMaxTime > 0 and diffTime > theMaxTime):
         msg  = "Maximum time (%f sec) has passed %f sec\n" % (diffTime,theMaxTime)
         msg += "Average time: %f msec\n" % (totalTimeFiles/totalReadFiles)
         msg += "Total too slow read files(%f msec): %d out of %d\n" % (theTooSlowTime,totalTooSlowFiles,totalReadFiles)
         print msg
         return

      inputDataFolders = glob.glob(theInput)

      if(debug >= 0): log.info("***************NEW LOOP***************")
      if(debug > 0): log.info("inputDataFolders: {0}".format(inputDataFolders))
      for nf in range(0, len(inputDataFolders)):
          inputDataFolder = inputDataFolders[nf]
	  
	  after = dict()
	  listFolders = sorted(glob.glob(os.path.join(inputDataFolder, 'stream*')));
	  for nStr in range(0, len(listFolders)):
             try:
        	after_temp = dict ([(f, None) for f in glob.glob(os.path.join(listFolders[nStr], '*.jsn'))])
        	after.update(after_temp)
        	after_temp = dict ([(f, None) for f in glob.glob(os.path.join(listFolders[nStr], '*.ini'))])
        	after.update(after_temp)
        	after_temp = dict ([(f, None) for f in glob.glob(os.path.join(listFolders[nStr], 'jsns', '*.jsn'))])
        	after.update(after_temp)
        	after_temp = dict ([(f, None) for f in glob.glob(os.path.join(listFolders[nStr], 'data', '*.ini'))])
        	after.update(after_temp)
             except Exception, e:
        	log.error("glob.glob operation failed: {0} - {1}".format(inputDataFolder,e))

	  afterStringNoSorted = [f for f in after]
	  afterString = sorted(afterStringNoSorted, reverse=False)

          for i in range(0, len(afterString)):
	     if not afterString[i].endswith(".jsn"): continue
	     if "index" in afterString[i]: continue
	     if afterString[i].endswith("recv"): continue
	     if "EoLS" in afterString[i]: continue
	     if "BoLS" in afterString[i]: continue
	     if "EoR" in afterString[i]: continue

	     inputJsonFile = os.path.join(inputDataFolder, afterString[i])

             initJsonTime = time.time()
             settings = str(cmsDataFlowMerger.readJsonFile(inputJsonFile, theDebug))
             endJsonTime = time.time()

             # avoid corrupted files
	     if("bad" in settings): continue
	     diffProcessTime = (endJsonTime-initJsonTime)*1000
	     totalReadFiles = totalReadFiles + 1
	     totalTimeFiles = totalTimeFiles + diffProcessTime
	     if(diffProcessTime > tooSlowTime): totalTooSlowFiles = totalTooSlowFiles + 1
             if(theDebug >= 1): log.info("Time in ms({0}): {1:5.1f}".format(inputJsonFile,diffProcessTime))

             endReadingTime = time.time()
             diffTime = endReadingTime-initReadingTime
             if(theMaxTime > 0 and diffTime > theMaxTime):
                break
Beispiel #3
0
def doReading(theInput, theMaxTime, theTooSlowTime, theDebug):

    initReadingTime = time.time()
    endReadingTime = 0
    totalReadFiles = 0
    totalTimeFiles = 0
    totalTooSlowFiles = 0
    while 1:

        endReadingTime = time.time()
        diffTime = endReadingTime - initReadingTime
        if (theMaxTime > 0 and diffTime > theMaxTime):
            msg = "Maximum time (%f sec) has passed %f sec\n" % (diffTime,
                                                                 theMaxTime)
            msg += "Average time: %f msec\n" % (totalTimeFiles /
                                                totalReadFiles)
            msg += "Total too slow read files(%f msec): %d out of %d\n" % (
                theTooSlowTime, totalTooSlowFiles, totalReadFiles)
            print msg
            return

        inputDataFolders = glob.glob(theInput)

        if (debug >= 0): log.info("***************NEW LOOP***************")
        if (debug > 0):
            log.info("inputDataFolders: {0}".format(inputDataFolders))
        for nf in range(0, len(inputDataFolders)):
            inputDataFolder = inputDataFolders[nf]

            after = dict()
            try:
                after_temp = dict([
                    (f, None)
                    for f in glob.glob(os.path.join(inputDataFolder, '*.jsn'))
                ])
                after.update(after_temp)
                after_temp = dict([
                    (f, None)
                    for f in glob.glob(os.path.join(inputDataFolder, '*.ini'))
                ])
                after.update(after_temp)
            except Exception, e:
                log.error("glob.glob operation failed: {0} - {1}".format(
                    inputDataFolder, e))

            listFolders = sorted(
                glob.glob(os.path.join(inputDataFolder, 'stream*')))
            for nStr in range(0, len(listFolders)):
                try:
                    after_temp = dict([(f, None) for f in glob.glob(
                        os.path.join(listFolders[nStr], '*.jsn'))])
                    after.update(after_temp)
                    after_temp = dict([(f, None) for f in glob.glob(
                        os.path.join(listFolders[nStr], '*.ini'))])
                    after.update(after_temp)
                except Exception, e:
                    log.error("glob.glob operation failed: {0} - {1}".format(
                        inputDataFolder, e))

            afterStringNoSorted = [f for f in after]
            afterString = sorted(afterStringNoSorted, reverse=False)

            for i in range(0, len(afterString)):
                if not afterString[i].endswith(".jsn"): continue
                if "index" in afterString[i]: continue
                if afterString[i].endswith("recv"): continue
                if "EoLS" in afterString[i]: continue
                if "BoLS" in afterString[i]: continue
                if "EoR" in afterString[i]: continue

                inputJsonFile = os.path.join(inputDataFolder, afterString[i])

                initJsonTime = time.time()
                settings = str(
                    cmsDataFlowMerger.readJsonFile(inputJsonFile, theDebug))
                endJsonTime = time.time()

                # avoid corrupted files
                if ("bad" in settings): continue
                diffProcessTime = (endJsonTime - initJsonTime) * 1000
                totalReadFiles = totalReadFiles + 1
                totalTimeFiles = totalTimeFiles + diffProcessTime
                if (diffProcessTime > tooSlowTime):
                    totalTooSlowFiles = totalTooSlowFiles + 1
                if (theDebug >= 1):
                    log.info("Time in ms({0}): {1:5.1f}".format(
                        inputJsonFile, diffProcessTime))

                endReadingTime = time.time()
                diffTime = endReadingTime - initReadingTime
                if (theMaxTime > 0 and diffTime > theMaxTime):
                    break
Beispiel #4
0
def cleanUpRun(debug, EoRFileName, inputDataFolder, afterString, path_eol, 
      theRunNumber, outputSMMergedFolder, outputEndName, 
      completeMergingThreshold):
   
   settingsEoR = cmsDataFlowMerger.readJsonFile(EoRFileName, debug)

   if("bad" in settingsEoR): return False

   eventsInputBU = int(settingsEoR['data'][0])

   eventsInputFU = 0
   for nb in range(0, len(afterString)):
      if "EoR" not in afterString[nb]: continue
      inputEoRFUJsonFile = afterString[nb]
      settingsLS = cmsDataFlowMerger.readJsonFile(inputEoRFUJsonFile, debug)

      if("bad" in settingsLS): continue

      eventsInputFU = eventsInputFU + int(settingsLS['data'][0])
   
   if(float(debug) >= 50): log.info(
      "eventsInputBU vs. eventsInputFU: {0} vs. {1}".format
      (eventsInputBU,eventsInputFU))
   if (eventsInputBU*completeMergingThreshold <= eventsInputFU):
      numberBoLSFiles = 0
      for nb in range(0, len(afterString)):
         if not afterString[nb].endswith("_BoLS.jsn"): continue
         if "DQM" in afterString[nb]: continue
         if "streamError" in afterString[nb]: continue
         numberBoLSFiles = numberBoLSFiles + 1
      if(float(debug) >= 50): log.info(
         "numberBoLSFiles: {0}".format(numberBoLSFiles))
      
      EoLSFolder    = os.path.join(path_eol, theRunNumber)
      eventsEoLS          = [0, 0, 0]
      eventsEoLS_noLastLS = [0, 0, 0]
      lastLumiBU = doSumEoLS(EoLSFolder, eventsEoLS, eventsEoLS_noLastLS)

      if(eventsEoLS[0] != eventsInputBU):
         log.info("PROBLEM eventsEoLS != eventsInputBU: {0} vs. {1}".format(
                  eventsEoLS[0],eventsInputBU))

      # This is done to make sure there won't be files created after we start deleting the folder
      deltaTimeHLTFolderCreation = timedelta(minutes=10)
      hltFolder = os.path.join(EoLSFolder,"hlt")
      if(os.path.exists(hltFolder)):
          m_time_stamp = int(os.stat(hltFolder).st_ctime)
          m_utc_date_time = datetime.utcfromtimestamp(m_time_stamp)
          deltaTimeHLTFolderCreation = datetime.utcnow() - m_utc_date_time
      else:
         log.error("PROBLEM HLT folder does not exist {0}".format(hltFolder))

      if(numberBoLSFiles == 0 and eventsInputBU == eventsInputFU and
        (deltaTimeHLTFolderCreation > timedelta(minutes=5) or lastLumiBU > 3)):
         # This is needed to cleanUp the macroMerger later
         EoRFileNameMiniOutput       = (
            outputSMMergedFolder + "/" + theRunNumber + "_ls0000_MiniEoR_" + 
            outputEndName + ".jsn_TEMP")
         EoRFileNameMiniOutputStable = (
            outputSMMergedFolder + "/" + theRunNumber + "_ls0000_MiniEoR_" + 
            outputEndName + ".jsn")

         theEoRFileMiniOutput = open(EoRFileNameMiniOutput, 'w')
         theEoRFileMiniOutput.write(
            json.dumps({'eventsInputBU':   eventsInputBU, 
                        'eventsInputFU':   eventsInputFU, 
                        'numberBoLSFiles': numberBoLSFiles,
                        'eventsTotalRun':  eventsEoLS[1],
                        'eventsLostBU':    eventsEoLS[2],
                        'eventsInputBU_noLastLS':   eventsEoLS_noLastLS[0], 
                        'eventsTotalRun_noLastLS':  eventsEoLS_noLastLS[1],
                        'eventsLostBU_noLastLS':    eventsEoLS_noLastLS[2],
                        'lastLumiBU':      lastLumiBU}))
         theEoRFileMiniOutput.close()

         shutil.move(EoRFileNameMiniOutput, EoRFileNameMiniOutputStable)

         log.info("Run folder deletion is triggered!: {0} and {1}".format(
                 inputDataFolder,EoLSFolder))
         time.sleep(10)
	 ### DEBUG, CHECK FOLDERS TO BE DELETED
         listFolders = sorted(glob.glob(os.path.join(inputDataFolder, 'stream*')));
         after_eor = dict()
         try:
            after_temp_eor = dict ([(f, None) for f in glob.glob(os.path.join(inputDataFolder, '*.jsn'))])
            after_eor.update(after_temp_eor)
            for nStr in range(0, len(listFolders)):
               after_temp_eor = dict ([(f, None) for f in glob.glob(os.path.join(listFolders[nStr], '*.jsn'))])
               after_eor.update(after_temp_eor)
               after_temp_eor = dict ([(f, None) for f in glob.glob(os.path.join(listFolders[nStr], 'jsns', '*.jsn'))])
               after_eor.update(after_temp_eor)
         except Exception, e:
            log.error("glob.glob operation failed: {0} - {1}".format(inputDataFolder,e))
         afterStringNow = [f for f in after_eor]
	 log.info("Checking folders in {0} before deleting them".format(inputDataFolder))
	 log.info("What we had: {0}".format(afterString))
	 log.info("What we have: {0}".format(afterStringNow))
	 ###
	 if(len(afterString) == len(afterStringNow)):
            try:
               shutil.rmtree(inputDataFolder)
            except Exception,e:
               log.error("Failed removing {0} - {1}".format(inputDataFolder,e))
            try:
               if os.path.islink(EoLSFolder):
                  link_dir = os.readlink(EoLSFolder)
                  log.info("EoLS run dir is a symlink pointing to {0}".format(
                           link_dir))
                  os.unlink(EoLSFolder)
                  EoLSFolder = link_dir
               shutil.rmtree(EoLSFolder)
            except Exception,e:
               log.error("Failed removing {0} - {1}".format(EoLSFolder,e))

            return True
Beispiel #5
0
      raise RuntimeError, msg

after = dict()
try:
   after_temp = dict ([(f, None) for f in glob.glob(os.path.join(inputDataFolder, '*.jsn'))])
   after.update(after_temp)
except Exception, e:
   log.error("glob.glob operation failed: {0} - {1}".format(inputDataFolder,e))
afterStringNoSorted = [f for f in after if ((dataString in f) and ("EoLS" not in f) and ("BoLS" not in f) and ("EoR" not in f))]
afterString = sorted(afterStringNoSorted, reverse=False)

dataFiles = []
for i in range(0, len(afterString)):

   jsonFile = os.path.join(inputDataFolder, afterString[i])
   settings = cmsDataFlowMerger.readJsonFile(jsonFile,0)
      
   if  ("bad" in settings):
      print "corrupted file: ",jsonFile
      continue

   eventsInput      = eventsInput + int(settings['data'][0])
   eventsInputFiles = eventsInputFiles + 1

   if(typeMerging != "mini"):
      eventsTotalInput = int(settings['data'][7])

   fileDataString = afterString[i].split('_')
   dataFiles.append(fileDataString[3].split('.jsn')[0])

iniFiles = []
def doTheChecking(paths_to_watch, path_eol, mergeType, debug):
   eventsIDict     = dict()
   eventsEoLSDict  = dict()
   eventsLDict     = dict()
   EoLSProblemDict = dict()
   eventsFUBUDict  = dict()
   if(float(debug) >= 10): log.info("I will watch: {0}".format(paths_to_watch))

   inputDataFoldersNoSorted = glob.glob(paths_to_watch)
   inputDataFolders = sorted(inputDataFoldersNoSorted, reverse=True)
   if(float(debug) >= 20): log.info("inputDataFolders: {0}".format(inputDataFolders))
   for nf in range(0, len(inputDataFolders)):
       inputDataFolder = inputDataFolders[nf]

       inputDataFolderString = inputDataFolder.split('/')
       # if statement to allow ".../" or ... for input folders 
       if inputDataFolderString[len(inputDataFolderString)-1] == '':
         theRunNumber = inputDataFolderString[len(inputDataFolderString)-2]
       else:
         theRunNumber = inputDataFolderString[len(inputDataFolderString)-1] 

       after = dict()
       listFolders = sorted(glob.glob(os.path.join(inputDataFolder, 'stream*')));
       for nStr in range(0, len(listFolders)):
          try:
             after_temp = dict ([(f, None) for f in glob.glob(os.path.join(listFolders[nStr], '*.jsn'))])
             after.update(after_temp)
             after_temp = dict ([(f, None) for f in glob.glob(os.path.join(listFolders[nStr], '*.ini'))])
             after.update(after_temp)
             after_temp = dict ([(f, None) for f in glob.glob(os.path.join(listFolders[nStr], 'jsns', '*.jsn'))])
             after.update(after_temp)
             after_temp = dict ([(f, None) for f in glob.glob(os.path.join(listFolders[nStr], 'data', '*.ini'))])
             after.update(after_temp)
          except Exception, e:
             log.error("glob.glob operation failed: {0} - {1}".format(inputDataFolder,e))

       afterStringNoSorted = [f for f in after]
       afterString = sorted(afterStringNoSorted, reverse=False)
       if(float(debug) >= 50): log.info("afterString: {0}".format(afterString))

       # loop over JSON files, which will give the list of files to be merged
       for i in range(0, len(afterString)):
     	  if not afterString[i].endswith(".jsn"): continue
     	  if "index" in afterString[i]: continue
     	  if afterString[i].endswith("recv"): continue
     	  if "EoLS" in afterString[i]: continue
     	  if "BoLS" in afterString[i]: continue
     	  if "EoR" in afterString[i]: continue

   	  fileNameString = os.path.basename(afterString[i]).split('_')

     	  if(float(debug) >= 50): log.info("FILE: {0}".format(afterString[i]))
     	  inputJsonFile = os.path.join(inputDataFolder, afterString[i])
     	  if(float(debug) >= 50): log.info("inputJsonFile: {0}".format(inputJsonFile))

   	  settings = cmsDataFlowMerger.readJsonFile(inputJsonFile,debug)

   	  # avoid corrupted files or streamEvD files
     	  if("bad" in settings): continue

   	  # this is the number of input and output events, and the name of the dat file, something critical
     	  # eventsOutput is actually the total number of events to merge in the macromerged stage
   	  eventsInput	    = int(settings['data'][0])
   	  eventsOutput      = int(settings['data'][1])
   	  errorCode	    = 0
     	  file  	    = ""
     	  fileErrorString   = None
     	  fileSize	    = 0
   	  nFilesBU	    = 0
   	  eventsTotalInput  = 0
   	  checkSum	    = 0
   	  NLostEvents	    = 0
     	  transferDest      = "dummy"
     	  if mergeType == "mini":
   	     eventsOutputError = int(settings['data'][2])
     	     errorCode         = int(settings['data'][3])
     	     file	       = str(settings['data'][4])
     	     fileSize	       = int(settings['data'][5])
     	     checkSum	       = int(settings['data'][7])		 

     	     # processed events == input + error events
     	     eventsInput = eventsInput + eventsOutputError

     	     if fileNameString[2] == "streamError":
     		file		= str(settings['data'][6])
     		fileErrorString = file.split(',')
     	  else:
     	     errorCode        = int(settings['data'][2])
     	     file	      = str(settings['data'][3])
     	     fileSize	      = int(settings['data'][4])
     	     checkSum	      = int(settings['data'][5])
   	     nFilesBU	      = int(settings['data'][6])
   	     eventsTotalInput = int(settings['data'][7])
     	     NLostEvents      = int(settings['data'][8])

   	  key = (fileNameString[0],fileNameString[1],fileNameString[2])
   	  if key in eventsIDict.keys():

     	     eventsInput = eventsIDict[key][0] + eventsInput
     	     eventsIDict[key].remove(eventsIDict[key][0])
     	     eventsIDict.update({key:[eventsInput]})

     	     NLostEvents = eventsLDict[key][0] + NLostEvents
     	     eventsLDict[key].remove(eventsLDict[key][0])
     	     eventsLDict.update({key:[NLostEvents]})

     	  else:

     	     eventsIDict.update({key:[eventsInput]})

     	     eventsLDict.update({key:[NLostEvents]})

     	  if mergeType == "mini": 
   	     keyEoLS = (fileNameString[0],fileNameString[1])
     	     if keyEoLS not in eventsEoLSDict.keys():
     		EoLSName = path_eol + "/" + fileNameString[0] + "/" + fileNameString[0] + "_" + fileNameString[1] + "_EoLS.jsn"
   		if(float(debug) >= 20): log.info("EoLSName: {0}".format(EoLSName))
   		if os.path.exists(EoLSName) and os.path.getsize(EoLSName) > 0:
   		   inputEoLSName = open(EoLSName, "r").read()
   		   settingsEoLS  = json.loads(inputEoLSName)
   		   eventsEoLS	 = int(settingsEoLS['data'][0])
   		   filesEoLS	 = int(settingsEoLS['data'][1])
   		   eventsAllEoLS = int(settingsEoLS['data'][2])
     		   NLostEvents   = int(settingsEoLS['data'][3])
     		   eventsEoLSDict.update({keyEoLS:[eventsEoLS,filesEoLS,eventsAllEoLS,NLostEvents]})
   		else:
     		   if (float(debug) >= 30): print "PROBLEM WITH EoLSFile: ",EoLSName
		   keyEoLSRun = (fileNameString[0])
		   if keyEoLSRun not in EoLSProblemDict.keys():
		      EoLSProblemDict.update({keyEoLSRun:["bad"]})

   	     if keyEoLS in eventsEoLSDict.keys():
     		if(float(debug) >= 10): log.info("mini-EventsEoLS/EventsInput-run/ls/stream: {0}, {1} - {2}".format(eventsEoLSDict[keyEoLS][0],eventsIDict[key][0],key))
   		if(eventsEoLSDict[keyEoLS][0] == eventsIDict[key][0]):
   		   if(float(debug) >= 10): log.info("Events match: {0}".format(key))
   		else:
   		   if (float(debug) >= 20):
   		       log.info("Events number does not match: EoL says {0} we have in the files: {1}".format(eventsEoLSDict[keyEoLS][0], eventsIDict[key][0]))
   	     else:
   		   if (float(debug) >= 20):
   		       log.warning("Looks like {0} is not in eventsEoLSDict".format(key))

   	  else:
     	     if(float(debug) >= 10): log.info("macro-EventsTotalInput/EventsInput/NLostEvents-run/ls/stream: {0}, {1}, {2} - {3}".format(eventsTotalInput,eventsIDict[key][0],eventsLDict[key][0],key))
   	     if(eventsTotalInput == (eventsIDict[key][0]+eventsLDict[key][0])):
   		if(float(debug) >= 10): log.info("Events match: {0}".format(key))
   	     else:
   		if (float(debug) >= 20):
   		    log.info("Events number does not match: EoL says {0}, we have in the files: {1} + {2} = {3}".format(eventsTotalInput, eventsIDict[key][0], eventsLDict[key][0], eventsIDict[key][0]+eventsLDict[key][0]))
Beispiel #7
0
def cleanUpRun(debug, EoRFileName, inputDataFolder, afterString, path_eol, 
      theRunNumber, outputSMMergedFolder, outputEndName, 
      completeMergingThreshold):
   
   settingsEoR = cmsDataFlowMerger.readJsonFile(EoRFileName, debug)

   if("bad" in settingsEoR): return False

   eventsInputBU = int(settingsEoR['data'][0])

   eventsInputFU = 0
   for nb in range(0, len(afterString)):
      inputEoRFUJsonFile = afterString[nb]
      settingsLS = cmsDataFlowMerger.readJsonFile(inputEoRFUJsonFile, debug)

      if("bad" in settingsLS): continue

      eventsInputFU = eventsInputFU + int(settingsLS['data'][0])
   
   if(float(debug) >= 50): log.info(
      "eventsInputBU vs. eventsInputFU: {0} vs. {1}".format
      (eventsInputBU,eventsInputFU))
   if (eventsInputBU*completeMergingThreshold <= eventsInputFU):
      numberBoLSFiles = 0
      for nb in range(0, len(afterString)):
         if not afterString[nb].endswith("_BoLS.jsn"): continue
         if "DQM" in afterString[nb]: continue
         if "streamError" in afterString[nb]: continue
         numberBoLSFiles = numberBoLSFiles + 1
      if(float(debug) >= 50): log.info(
         "numberBoLSFiles: {0}".format(numberBoLSFiles))
      
      EoLSFolder    = os.path.join(path_eol, theRunNumber)
      eventsEoLS          = [0, 0, 0]
      eventsEoLS_noLastLS = [0, 0, 0]
      lastLumiBU = doSumEoLS(EoLSFolder, eventsEoLS, eventsEoLS_noLastLS)

      if(eventsEoLS[0] != eventsInputBU):
         log.info("PROBLEM eventsEoLS != eventsInputBU: {0} vs. {1}".format(
                  eventsEoLS[0],eventsInputBU))

      if(numberBoLSFiles == 0 and eventsInputBU == eventsInputFU):
         # This is needed to cleanUp the macroMerger later
         EoRFileNameMiniOutput       = (
            outputSMMergedFolder + "/" + theRunNumber + "_ls0000_MiniEoR_" + 
            outputEndName + ".jsn_TEMP")
         EoRFileNameMiniOutputStable = (
            outputSMMergedFolder + "/" + theRunNumber + "_ls0000_MiniEoR_" + 
            outputEndName + ".jsn")

         theEoRFileMiniOutput = open(EoRFileNameMiniOutput, 'w')
         theEoRFileMiniOutput.write(
            json.dumps({'eventsInputBU':   eventsInputBU, 
                        'eventsInputFU':   eventsInputFU, 
                        'numberBoLSFiles': numberBoLSFiles,
                        'eventsTotalRun':  eventsEoLS[1],
                        'eventsLostBU':    eventsEoLS[2],
                        'eventsInputBU_noLastLS':   eventsEoLS_noLastLS[0], 
                        'eventsTotalRun_noLastLS':  eventsEoLS_noLastLS[1],
                        'eventsLostBU_noLastLS':    eventsEoLS_noLastLS[2],
                        'lastLumiBU':      lastLumiBU}))
         theEoRFileMiniOutput.close()

         shutil.move(EoRFileNameMiniOutput, EoRFileNameMiniOutputStable)

         EoLSFolder = os.path.join(path_eol, theRunNumber)
         log.info("Run folder deletion is triggered!: {0} and {1}".format(
                 inputDataFolder,EoLSFolder))
         time.sleep(10)
         try:
            shutil.rmtree(inputDataFolder)
         except Exception,e:
            log.error("Failed removing {0} - {1}".format(inputDataFolder,e))
         try:
            if os.path.islink(EoLSFolder):
               link_dir = os.readlink(EoLSFolder)
               log.info("EoLS run dir is a symlink pointing to {0}".format(
                       link_dir))
               os.unlink(EoLSFolder)
               EoLSFolder = link_dir
            shutil.rmtree(EoLSFolder)
         except Exception,e:
            log.error("Failed removing {0} - {1}".format(EoLSFolder,e))

         return True
Beispiel #8
0
    ])
    after.update(after_temp)
except Exception, e:
    log.error("glob.glob operation failed: {0} - {1}".format(
        inputDataFolder, e))
afterStringNoSorted = [
    f for f in after if ((dataString in f) and ("EoLS" not in f) and (
        "BoLS" not in f) and ("EoR" not in f))
]
afterString = sorted(afterStringNoSorted, reverse=False)

dataFiles = []
for i in range(0, len(afterString)):

    jsonFile = os.path.join(inputDataFolder, afterString[i])
    settings = cmsDataFlowMerger.readJsonFile(jsonFile, 0)

    if ("bad" in settings):
        print "corrupted file: ", jsonFile
        continue

    eventsInput = eventsInput + int(settings['data'][0])
    eventsInputFiles = eventsInputFiles + 1

    if (typeMerging != "mini"):
        eventsTotalInput = int(settings['data'][7])

    fileDataString = afterString[i].split('_')
    dataFiles.append(fileDataString[3].split('.jsn')[0])

iniFiles = []