Example #1
0
def main():
    replayTags = {}

    files = replay.getFiles()

    i = 0
    for fileName in files:
        i += 1
        logging.info('[%s/%s] %s: Getting replay tags...', i, len(files),
                     fileName)

        tarFile = tarfile.open(
            os.path.join(replay.dropBoxReplayFilesFolder, fileName))

        names = tarFile.getnames()
        if len(names) != 2:
            raise Exception('%s: Invalid number of files in tar file.',
                            fileName)

        baseFileName = names[0].rsplit('.', 1)[0]
        dbFileName = '%s.db' % baseFileName
        txtFileName = '%s.txt' % baseFileName
        if set([dbFileName, txtFileName]) != set(names):
            raise Exception('%s: Invalid file names in tar file.', fileName)

        newMetadata = json.loads(
            metadata.port(tarFile.extractfile(txtFileName).read(), fileName))
        tarFile.close()

        for tag in newMetadata['destinationTags']:
            replayTags.setdefault(newMetadata['destinationDatabase'],
                                  set([])).add(tag)
            for dependentTag in newMetadata['destinationTags'][tag][
                    'dependencies']:
                replayTags[newMetadata['destinationDatabase']].add(
                    dependentTag)

    # Convert the sets to a sorted list. This allows to dump the JSON and
    # also makes future changes easier to compare (since the lists are sorted,
    # when taking a diff we will not see movement of tags)
    outputDict = {}
    for destDB in replayTags:
        outputDict[destDB] = sorted(replayTags[destDB])

    logging.info('Writing output file %s...', outputFile)
    with open(outputFile, 'w') as f:
        json.dump(outputDict, f, sort_keys=True, indent=4)
Example #2
0
def main():
    replayTags = {}

    files = replay.getFiles()

    i = 0
    for fileName in files:
        i += 1
        logging.info('[%s/%s] %s: Getting replay tags...', i, len(files), fileName)

        tarFile = tarfile.open(os.path.join(replay.dropBoxReplayFilesFolder, fileName))

        names = tarFile.getnames()
        if len(names) != 2:
            raise Exception('%s: Invalid number of files in tar file.', fileName)

        baseFileName = names[0].rsplit('.', 1)[0]
        dbFileName = '%s.db' % baseFileName
        txtFileName = '%s.txt' % baseFileName
        if set([dbFileName, txtFileName]) != set(names):
            raise Exception('%s: Invalid file names in tar file.', fileName)
        
        newMetadata = json.loads(metadata.port(tarFile.extractfile(txtFileName).read(), fileName))
        tarFile.close()

        for tag in newMetadata['destinationTags']:
            replayTags.setdefault(newMetadata['destinationDatabase'], set([])).add(tag)
            for dependentTag in newMetadata['destinationTags'][tag]['dependencies']:
                replayTags[newMetadata['destinationDatabase']].add(dependentTag)

    # Convert the sets to a sorted list. This allows to dump the JSON and
    # also makes future changes easier to compare (since the lists are sorted,
    # when taking a diff we will not see movement of tags)
    outputDict = {}
    for destDB in replayTags:
        outputDict[destDB] = sorted(replayTags[destDB])

    logging.info('Writing output file %s...', outputFile)
    with open(outputFile, 'w') as f:
        json.dump(outputDict, f, sort_keys = True, indent = 4)
Example #3
0
def main():
    dropBoxRuns = calculateOldDropBoxRuns()

    with open('/afs/cern.ch/cms/DB/conddb/test/dropbox/replay/runInfo.json', 'rb') as f:
        runInfo = json.load(f)

    # Ask the frontend to clean up the files and database
    (username, account, password) = netrc.netrc().authenticators('newOffDb')
    frontendHttp = http.HTTP()
    frontendHttp.setBaseUrl(doUpload.frontendUrlTemplate % doUpload.frontendHost)

    logging.info('Signing in the frontend...')
    frontendHttp.query('signIn', {
        'username': username,
        'password': password,
    })

    logging.info('Asking the frontend to clean up files and database...')
    frontendHttp.query('cleanUp')

    logging.info('Signing out the frontend...')
    frontendHttp.query('signOut')

    logging.info('Removing files in the backend...')
    execute('rm -rf ../NewOfflineDropBoxBaseDir/TestDropBox/*/*')

    conf = config.replay()

    logging.info('Cleaning up backend database...')
    execute('cmscond_schema_manager -c %s -P %s --dropAll' % (conf.destinationDB, conf.authpath))

    logging.info('Setting up backend database...')
    execute('cmscond_export_database -s sqlite_file:%s -d %s -P %s' % (replayMasterDB, conf.destinationDB, conf.authpath), 'Y\n')

    dropBoxBE = Dropbox.Dropbox( conf )

    # Replay all the runs
    _fwLoad = conditionDatabase.condDB.FWIncantation()

    i = 0
    for runTimestamp in sorted(dropBoxRuns):
        i += 1
        (hltRun, fcsRun) = runInfo[runTimestamp.strftime('%Y-%m-%d %H:%M:%S,%f')[:-3]]
        logging.info('[%s/%s] %s: Replaying run with hltRun %s and fcsRun %s...', i, len(dropBoxRuns), runTimestamp, hltRun, fcsRun)

        j = 0
        for fileName in dropBoxRuns[runTimestamp]:
            j += 1
            logging.info('  [%s/%s] %s: Converting...', j, len(dropBoxRuns[runTimestamp]), fileName)

            tarFile = tarfile.open(os.path.join(dropBoxReplayFilesFolder, fileName))

            names = tarFile.getnames()
            if len(names) != 2:
                raise Exception('%s: Invalid number of files in tar file.', fileName)

            baseFileName = names[0].rsplit('.', 1)[0]
            dbFileName = '%s.db' % baseFileName
            txtFileName = '%s.txt' % baseFileName
            if set([dbFileName, txtFileName]) != set(names):
                raise Exception('%s: Invalid file names in tar file.', fileName)

            with open('/tmp/replayRequest.txt', 'wb') as f:
                f.write(metadata.port(tarFile.extractfile(txtFileName).read(), fileName))

            with open('/tmp/replayRequest.db', 'wb') as f:
                f.write(tarFile.extractfile(dbFileName).read())

            tarFile.close()

            logging.info('  [%s/%s] %s: Uploading...', j, len(dropBoxRuns[runTimestamp]), fileName)

            try:
                doUpload.upload('/tmp/replayRequest', 'private')
            except doUpload.UploadError as e:
                # If it is a error from the server (i.e. UploadError),
                # we can continue with the next files.
                # If it is another kind, we do not catch it since in that case
                # it is a real problem with the upload.py script.
                logging.info('  [%s/%s] %s: Upload error: %s', j, len(dropBoxRuns[runTimestamp]), fileName, str(e))

        dropBoxBE.reprocess(runTimestamp, hltRun, fcsRun)

        if runTimestamp in truncates:
            for runNumber in truncates[runTimestamp]:
                for tag in truncates[runTimestamp][runNumber]:
                    logging.info('[%s/%s] %s: Truncating up to %s tag %s...', i, len(dropBoxRuns), runTimestamp, runNumber, tag)

                    while True:
                        # FIXME: Why can't we instantiate the RDBMS once?
                        db = conditionDatabase.condDB.RDBMS(conf.authpath).getReadOnlyDB(conf.destinationDB)
                        iov = conditionDatabase.IOVChecker(db)
                        iov.load(tag)

                        lastSince = iov.lastSince()
                        if iov.timetype() == 'lumiid':
                            lastSince >>= 32

                        db.closeSession()

                        logging.info('[%s/%s] %s: lastSince now is %s...', i, len(dropBoxRuns), runTimestamp, lastSince)

                        if lastSince < runNumber:
                            break

                        execute('cmscond_truncate_iov -c %s -P %s -t %s' % (conf.destinationDB, conf.authpath, tag))