def main(): '''Runs the dropBox forever. ''' logging.info('Starting...') fqdn = socket.getfqdn() if fqdn == 'srv-C2C05-11.cms': logging.info('Using tier0 configuration.') dropBoxConfig = config.tier0() elif fqdn == 'srv-C2C05-15.cms': logging.info('Using online configuration.') dropBoxConfig = config.online() elif fqdn == 'vocms226.cern.ch': logging.info('Using offline configuration.') dropBoxConfig = config.offline() elif fqdn == 'vocms225.cern.ch': logging.warning('Using offline configuration (this is the hot-spare! -- should not be started unless something happened with vocms226).') dropBoxConfig = config.offline() elif fqdn.endswith('.cern.ch'): logging.info('Using test configuration.') dropBoxConfig = config.test() else: raise Exception('Not running at CERN.') logging.info('Configuring object...') dropBox = Dropbox.Dropbox(dropBoxConfig) logging.info('Configuring TERM handler...') signal.signal(signal.SIGTERM, handleTERM) logging.info('Running loop...') while not stop: logging.info('Processing all files...') dropBox.processAllFiles() # Avoid the delay if we just finished processing if stop: break if dropBoxConfig.delay: logging.info('Processing all files done; waiting %s seconds for the next run.', dropBoxConfig.delay) time.sleep( dropBoxConfig.delay ) else: # if delay is not set, it means we're Tier-0 and need to run at next 10 min interval: sleepTime = secUntilNext10Min() logging.info('Processing all files done; waiting %s seconds for the next run.', sleepTime) time.sleep( sleepTime ) logging.info('Stopping...')
def main(): '''Entry point. ''' with open(os.path.join(os.path.expanduser(runsPath), 'files.json'), 'rb') as f: runs = json.loads(f.read()) for (runLogCreationTimestamp, runBackend, runStatus, fcsRun, hltRun, files) in runs: runLogCreationTimestamp = str(runLogCreationTimestamp) runBackend = str(runBackend) logging.info('Run %s: %s, %s, %s, %s', runLogCreationTimestamp, runBackend, runStatus, fcsRun, hltRun) # Create the emulated statusUpdater if runBackend == 'online': cfg = config.online() elif runBackend == 'tier0': cfg = config.tier0() elif runBackend == 'offline': cfg = config.offline() else: raise Exception('Unsupported backend to emulate.') cfg.proxy = None statusUpdater = StatusUpdater.StatusUpdater(cfg) statusUpdater.creationTimeStamp = runLogCreationTimestamp statusUpdater.backend = runBackend # Ask and run actions if raw_input('updateRunStatus(%s). Are you sure? ' % runStatus).lower() == 'y': statusUpdater.updateRunStatus(runStatus) else: logging.warning('Skipped.') if raw_input('updateRunRunInfo(%s, %s). Are you sure? ' % (fcsRun, hltRun)).lower() == 'y': statusUpdater.updateRunRunInfo(fcsRun, hltRun) else: logging.warning('Skipped.') with open( os.path.join(os.path.expanduser(runsPath), '%s.log' % runLogCreationTimestamp), 'rb') as f: globalLog = globalLogPrefix + f.read() if raw_input('uploadRunLog(%s, %s). Are you sure? ' % (repr(downloadLog), repr(globalLog))).lower() == 'y': statusUpdater.uploadRunLog(downloadLog, globalLog) else: logging.warning('Skipped.') for (fileHash, fileStatus) in files: fileHash = str(fileHash) logging.info(' File %s: %s', fileHash, fileStatus) with open( os.path.join(os.path.expanduser(runsPath), '%s.log' % fileHash), 'rb') as f: fileLog = f.read() if raw_input( 'statusUpdater.updateFileStatus(%s, %s). Are you sure? ' % (repr(fileHash), fileStatus)).lower() == 'y': statusUpdater.updateFileStatus(fileHash, fileStatus) else: logging.warning('Skipped.') if raw_input( 'statusUpdater.uploadFileLog(%s, %s). Are you sure? ' % (repr(fileHash), repr(fileLog))).lower() == 'y': statusUpdater.uploadFileLog(fileHash, fileLog) else: logging.warning('Skipped.')
def main(): '''Entry point. ''' with open(os.path.join(os.path.expanduser(runsPath), 'files.json'), 'rb') as f: runs = json.loads(f.read()) for (runLogCreationTimestamp, runBackend, runStatus, fcsRun, hltRun, files) in runs: runLogCreationTimestamp = str(runLogCreationTimestamp) runBackend = str(runBackend) logging.info('Run %s: %s, %s, %s, %s', runLogCreationTimestamp, runBackend, runStatus, fcsRun, hltRun) # Create the emulated statusUpdater if runBackend == 'online': cfg = config.online() elif runBackend == 'tier0': cfg = config.tier0() elif runBackend == 'offline': cfg = config.offline() else: raise Exception('Unsupported backend to emulate.') cfg.proxy = None statusUpdater = StatusUpdater.StatusUpdater(cfg) statusUpdater.creationTimeStamp = runLogCreationTimestamp statusUpdater.backend = runBackend # Ask and run actions if raw_input('updateRunStatus(%s). Are you sure? ' % runStatus).lower() == 'y': statusUpdater.updateRunStatus(runStatus) else: logging.warning('Skipped.') if raw_input('updateRunRunInfo(%s, %s). Are you sure? ' % (fcsRun, hltRun)).lower() == 'y': statusUpdater.updateRunRunInfo(fcsRun, hltRun) else: logging.warning('Skipped.') with open(os.path.join(os.path.expanduser(runsPath), '%s.log' % runLogCreationTimestamp), 'rb') as f: globalLog = globalLogPrefix + f.read() if raw_input('uploadRunLog(%s, %s). Are you sure? ' % (repr(downloadLog), repr(globalLog))).lower() == 'y': statusUpdater.uploadRunLog(downloadLog, globalLog) else: logging.warning('Skipped.') for (fileHash, fileStatus) in files: fileHash = str(fileHash) logging.info(' File %s: %s', fileHash, fileStatus) with open(os.path.join(os.path.expanduser(runsPath), '%s.log' % fileHash), 'rb') as f: fileLog = f.read() if raw_input('statusUpdater.updateFileStatus(%s, %s). Are you sure? ' % (repr(fileHash), fileStatus)).lower() == 'y': statusUpdater.updateFileStatus(fileHash, fileStatus) else: logging.warning('Skipped.') if raw_input('statusUpdater.uploadFileLog(%s, %s). Are you sure? ' % (repr(fileHash), repr(fileLog))).lower() == 'y': statusUpdater.uploadFileLog(fileHash, fileLog) else: logging.warning('Skipped.')