示例#1
0
    def __getValidation( self, valType, name, alignments, config, options ):
        if valType == "compare":
            alignmentsList = alignments.split( "," )
            firstAlignList = alignmentsList[0].split()
            firstAlignName = firstAlignList[0].strip()
            if firstAlignName == "IDEAL":
                raise AllInOneError("'IDEAL' has to be the second (reference)"
                                      " alignment in 'compare <val_name>: "
                                      "<alignment> <reference>'.")
            if len( firstAlignList ) > 1:
                firstRun = firstAlignList[1]
            else:
                firstRun = "1"
            firstAlign = Alignment( firstAlignName, self.__config, firstRun )
            firstAlignName = firstAlign.name
            secondAlignList = alignmentsList[1].split()
            secondAlignName = secondAlignList[0].strip()
            if len( secondAlignList ) > 1:
                secondRun = secondAlignList[1]
            else:
                secondRun = "1"
            if secondAlignName == "IDEAL":
                secondAlign = secondAlignName
            else:
                secondAlign = Alignment( secondAlignName, self.__config,
                                         secondRun )
                secondAlignName = secondAlign.name
                
            validation = GeometryComparison( name, firstAlign, secondAlign,
                                             self.__config,
                                             self.__commandLineOptions.getImages)
        elif valType == "offline":
            validation = OfflineValidation( name, 
                Alignment( alignments.strip(), self.__config ), self.__config )
        elif valType == "preexistingoffline":
            validation = PreexistingOfflineValidation(name, self.__config)
        elif valType == "offlineDQM":
            validation = OfflineValidationDQM( name, 
                Alignment( alignments.strip(), self.__config ), self.__config )
        elif valType == "mcValidate":
            validation = MonteCarloValidation( name, 
                Alignment( alignments.strip(), self.__config ), self.__config )
        elif valType == "preexistingmcValidate":
            validation = PreexistingMonteCarloValidation(name, self.__config)
        elif valType == "split":
            validation = TrackSplittingValidation( name, 
                Alignment( alignments.strip(), self.__config ), self.__config )
        elif valType == "preexistingsplit":
            validation = PreexistingTrackSplittingValidation(name, self.__config)
        elif valType == "zmumu":
            validation = ZMuMuValidation( name, 
                Alignment( alignments.strip(), self.__config ), self.__config )
        elif valType == "primaryvertex":
            validation = PrimaryVertexValidation( name, 
                Alignment( alignments.strip(), self.__config ), self.__config )
        else:
            raise AllInOneError("Unknown validation mode '%s'"%valType)

        return validation
    def runJob(self):

        general = self.config.getGeneral()
        log = ""

        if self.preexisting:
            if self.validation.config.has_section("IOV"):
                iov = self.validation.config.get("IOV", "iov")
            else:
                iov = "singleIOV"
            preexistingValType = self.valType
            originalValType = preexistingValType.replace('preexisting', '')
            key = (originalValType, self.validation.originalValName, iov)
            if key in ValidationJob.condorConf:
                ValidationJob.condorConf[key].append(
                    ("preexisting", "", general["logdir"]))
            else:
                ValidationJob.condorConf[key] = [("preexisting", "",
                                                  general["logdir"])]
            log = ">             " + self.validation.name + " is already validated."
            return log

        for script in self.scripts:
            name = os.path.splitext(os.path.basename(script))[0]
            ValidationJob.jobCount += 1
            if self.commandLineOptions.dryRun:
                print("%s would run: %s" % (name, os.path.basename(script)))
                continue
            log = ">             Validating " + name
            print(">             Validating " + name)
            if self.validation.jobmode == "interactive":
                log += getCommandOutput2(script)
                ValidationJob.interactCount += 1
            elif self.validation.jobmode.split(",")[0] == "condor":
                if self.validation.config.has_section("IOV"):
                    iov = self.validation.config.get("IOV", "iov")
                else:
                    iov = "singleIOV"
                scriptPaths = script.split("/")
                scriptName = scriptPaths[-1]
                scriptName = scriptName.split(".")
                jobName = "%s" % scriptName[0] + "_%s" % scriptName[
                    1] + "_%s" % scriptName[2]
                key = (self.valType, self.valName, iov)
                if key in ValidationJob.condorConf:
                    ValidationJob.condorConf[key].append(
                        (jobName, script, general["logdir"]))
                else:
                    ValidationJob.condorConf[key] = [(jobName, script,
                                                      general["logdir"])]
            else:
                raise AllInOneError("Unknown 'jobmode'!\n"
                                    "Please change this parameter either in "
                                    "the [general] or in the [" +
                                    self.valType + ":" + self.valName +
                                    "] section to one of the following "
                                    "values:\n"
                                    "\tinteractive\n\tcondor, -q <queue>\n")

        return log
示例#3
0
 def __init__(self, validation, config, options):
     if validation[1] == "":
         # intermediate syntax
         valString = validation[0].split("->")[0]
         alignments = validation[0].split("->")[1]
         # force user to use the normal syntax
         if "->" in validation[0]:
             msg = (
                 "Instead of using the intermediate syntax\n'" +
                 valString.strip() + "-> " + alignments.strip() +
                 ":'\nyou have to use the now fully supported syntax \n'" +
                 valString.strip() + ": " + alignments.strip() + "'.")
             raise AllInOneError(msg)
     else:
         valString = validation[0]
         alignments = validation[1]
     valString = valString.split()
     self.__valType = valString[0]
     self.__valName = valString[1]
     self.__commandLineOptions = options
     self.__config = config
     # workaround for intermediate parallel version
     if self.__valType == "offlineParallel":
         section = "offline" + ":" + self.__valName
     else:
         section = self.__valType + ":" + self.__valName
     if not self.__config.has_section(section):
         raise AllInOneError, (
             "Validation '%s' of type '%s' is requested in"
             " '[validation]' section, but is not defined."
             "\nYou have to add a '[%s]' section." %
             (self.__valName, self.__valType, section))
     self.validation = self.__getValidation(self.__valType, self.__valName,
                                            alignments, self.__config,
                                            options)
示例#4
0
def createMergeScript(path, validations):
    if (len(validations) == 0):
        msg = "Cowardly refusing to merge nothing!"
        raise AllInOneError(msg)

    repMap = validations[0].getRepMap()  #FIXME - not nice this way
    repMap.update({
        "DownloadData": "",
        "CompareAlignments": "",
        "RunExtendedOfflineValidation": ""
    })

    comparisonLists = {
    }  # directory of lists containing the validations that are comparable
    resultPlotFile = ""  # string of a file name for createExtendedValidationScript
    for validation in validations:
        for referenceName in validation.filesToCompare:
            validationName = "%s.%s" % (validation.__class__.__name__,
                                        referenceName)
            validationName = validationName.split(
                ".%s" % GenericValidation.defaultReferenceName)[0]
            if validationName in comparisonLists:
                comparisonLists[validationName].append(validation)
            else:
                comparisonLists[validationName] = [validation]
            if validationName == "OfflineValidation":
                resultPlotFile = validationName

    if "OfflineValidation" in comparisonLists:
        repMap["extendeValScriptPath"] = \
            os.path.join(path, "TkAlExtendedOfflineValidation.C")
        createExtendedValidationScript(comparisonLists["OfflineValidation"],
                                       repMap["extendeValScriptPath"],
                                       resultPlotFile)
        repMap["RunExtendedOfflineValidation"] = \
            replaceByMap(configTemplates.extendedValidationExecution, repMap)

    repMap["CompareAlignments"] = "#run comparisons"
    for validationId in comparisonLists:
        compareStrings = [
            val.getCompareStrings(validationId)
            for val in comparisonLists[validationId]
        ]

        repMap.update({
            "validationId": validationId,
            "compareStrings": " , ".join(compareStrings)
        })

        repMap["CompareAlignments"] += \
            replaceByMap(configTemplates.compareAlignmentsExecution, repMap)

    filePath = os.path.join(path, "TkAlMerge.sh")
    theFile = open(filePath, "w")
    theFile.write(replaceByMap(configTemplates.mergeTemplate, repMap))
    theFile.close()
    os.chmod(filePath, 0755)

    return filePath
示例#5
0
    def __init__(self, validation, config, options):
        self.JobId = []
        if validation[1] == "":
            # intermediate syntax
            valString = validation[0].split("->")[0]
            alignments = validation[0].split("->")[1]
            # force user to use the normal syntax
            if "->" in validation[0]:
                msg = (
                    "Instead of using the intermediate syntax\n'" +
                    valString.strip() + "-> " + alignments.strip() +
                    ":'\nyou have to use the now fully supported syntax \n'" +
                    valString.strip() + ": " + alignments.strip() + "'.")
                raise AllInOneError(msg)
        else:
            valString = validation[0]
            alignments = validation[1]
        valString = valString.split()
        self.__valType = valString[0]
        self.__valName = valString[1]
        self.__commandLineOptions = options
        self.__config = config
        self.__preexisting = ("preexisting" in self.__valType)
        if self.__valType[0] == "*":
            self.__valType = self.__valType[1:]
            self.__preexisting = True

        # workaround for intermediate parallel version
        if self.__valType == "offlineParallel":
            print(
                "offlineParallel and offline are now the same.  To run an offline parallel validation,\n"
                "just set parallelJobs to something > 1.  There is no reason to call it offlineParallel anymore."
            )
            self.__valType = "offline"
        section = self.__valType + ":" + self.__valName
        if not self.__config.has_section(section):
            raise AllInOneError("Validation '%s' of type '%s' is requested in"
                                " '[validation]' section, but is not defined."
                                "\nYou have to add a '[%s]' section." %
                                (self.__valName, self.__valType, section))
        self.validation = self.__getValidation(self.__valType, self.__valName,
                                               alignments, self.__config,
                                               options)
示例#6
0
    def runCondorJobs(outdir):
        dagmanLog = "{}/daglogs".format(outdir)
        os.system("mkdir -p {}".format(dagmanLog))


        with open("{}/validation.condor".format(outdir), "w") as condor:
            condor.write("universe = vanilla" + "\n")
            condor.write("executable = $(scriptName).sh" + "\n")
            condor.write("log = $(scriptName).log" + "\n")
            condor.write("error = $(scriptName).stderr" + "\n")
            condor.write("output = $(scriptName).stdout" + "\n")
            condor.write('requirements = (OpSysAndVer =?= "CentOS7")' + '\n')
            condor.write('+JobFlavour = "tomorrow"' + "\n")
            condor.write('+RequestMemory = {}'.format(1540) + "\n")
            condor.write('+FileTransferDownloadBytes = {}'.format(1540) + "\n")
            condor.write('+AccountingGroup     = "group_u_CMS.CAF.ALCA"' + '\n')
            condor.write("queue")

        with open("{}/validation.dagman".format(outdir), "w") as dagman:
            parents = {}
            for (valType, valName, iov), alignments in six.iteritems(ValidationJob.condorConf):

                parents[(valType, valName, iov)] = []
                for jobInfo in alignments:
                    if not "preexisting" in jobInfo[0]:
                        dagman.write("JOB {}_{} {}/validation.condor".format(jobInfo[0], iov, outdir) + "\n")
                        dagman.write('VARS {}_{} '.format(jobInfo[0], iov) + 'scriptName="{}"'.format('.'.join(jobInfo[1].split('.')[:-1])) + "\n")
                        parents[(valType, valName, iov)].append('{}_{}'.format(jobInfo[0], iov))
                        dagman.write("\n")

                path =  os.path.join(jobInfo[2], "TkAlMerge.sh")
                if os.path.exists( path ):
                    dagman.write("JOB Merge_{}_{}_{} {}/validation.condor".format(valType, valName, iov, outdir) + "\n")
                    dagman.write("VARS Merge_{}_{}_{} ".format(valType, valName, iov) + 'scriptName="{}"'.format(os.path.join(jobInfo[2], "TkAlMerge")) + "\n")
                    dagman.write("\n")
                else:
                    raise AllInOneError("Merge script '[%s]' not found!"%path)

            for (valType, valName, iov), alignments in six.iteritems(ValidationJob.condorConf):
                if len(parents[(valType, valName, iov)]) != 0:
                    dagman.write('PARENT {} '.format(" ".join([parent for parent in parents[(valType, valName, iov)]])) + 'CHILD Merge_{}_{}_{}'.format(valType, valName, iov) + "\n")

        submitCommands = ["condor_submit_dag -no_submit -outfile_dir {} {}/validation.dagman".format(dagmanLog, outdir), "condor_submit {}/validation.dagman.condor.sub".format(outdir)]

        for command in submitCommands:
            subprocess.call(command.split(" "))
def main(argv=None):
    if argv == None:
        argv = sys.argv[1:]
    optParser = optparse.OptionParser()
    optParser.description = """All-in-one Alignment Validation.
This will run various validation procedures either on batch queues or interactively.
If no name is given (-N parameter) a name containing time and date is created automatically.
To merge the outcome of all validation procedures run TkAlMerge.sh in your validation's directory.
"""
    optParser.add_option(
        "-n",
        "--dryRun",
        dest="dryRun",
        action="store_true",
        default=False,
        help=
        "create all scripts and cfg File but do not start jobs (default=False)"
    )
    optParser.add_option(
        "--getImages",
        dest="getImages",
        action="store_true",
        default=True,
        help="get all Images created during the process (default= True)")
    defaultConfig = "TkAlConfig.ini"
    optParser.add_option(
        "-c",
        "--config",
        dest="config",
        default=defaultConfig,
        help=
        "configuration to use (default TkAlConfig.ini) this can be a comma-seperated list of all .ini file you want to merge",
        metavar="CONFIG")
    optParser.add_option(
        "-N",
        "--Name",
        dest="Name",
        help="Name of this validation (default: alignmentValidation_DATE_TIME)",
        metavar="NAME")
    optParser.add_option(
        "-r",
        "--restrictTo",
        dest="restrictTo",
        help=
        "restrict validations to given modes (comma seperated) (default: no restriction)",
        metavar="RESTRICTTO")
    optParser.add_option("-d",
                         "--debug",
                         dest="debugMode",
                         action="store_true",
                         default=False,
                         help="run the tool to get full traceback of errors",
                         metavar="DEBUG")

    (options, args) = optParser.parse_args(argv)

    if not options.restrictTo == None:
        options.restrictTo = options.restrictTo.split(",")

    options.config = [ os.path.abspath( iniFile ) for iniFile in \
                       options.config.split( "," )]

    config = BetterConfigParser()
    outputIniFileSet = set(config.read(options.config))
    failedIniFiles = [
        iniFile for iniFile in options.config
        if iniFile not in outputIniFileSet
    ]

    # Check for missing ini file
    if options.config == [os.path.abspath(defaultConfig)]:
        if (not os.path.exists(defaultConfig)):
            raise AllInOneError("Default 'ini' file '%s' not found!\n"
                                "You can specify another name with the "
                                "command line option '-c'/'--config'." %
                                (defaultConfig))
    else:
        for iniFile in failedIniFiles:
            if not os.path.exists(iniFile):
                raise AllInOneError("'%s' does not exist. Please check for "
                                    "typos in the filename passed to the "
                                    "'-c'/'--config' option!" % (iniFile))
            else:
                raise AllInOneError(("'%s' does exist, but parsing of the "
                                     "content failed!") % iniFile)

    # get the job name
    if options.Name == None:
        existingValDirs = fnmatch.filter(
            os.walk('.').next()[1], "alignmentValidation_*")
        if len(existingValDirs) > 0:
            options.Name = existingValDirs[-1]
        else:
            print("Cannot guess last working directory!")
            print("Please use the parameter '-N' or '--Name' to specify "
                  "the task for which you want a status report.")
            return 1

    # set output path
    outPath = os.path.abspath(options.Name)

    general = config.getGeneral()
    config.set("internals", "workdir",
               os.path.join(general["workdir"], options.Name))
    config.set("internals", "scriptsdir", outPath)
    config.set("general", "datadir",
               os.path.join(general["datadir"], options.Name))
    config.set("general", "logdir",
               os.path.join(general["logdir"], options.Name))
    config.set(
        "general", "eosdir",
        os.path.join("AlignmentValidation", general["eosdir"], options.Name))

    if not os.path.exists(outPath):
        os.makedirs(outPath)
    elif not os.path.isdir(outPath):
        raise AllInOneError(
            "the file %s is in the way rename the Job or move it away" %
            outPath)

    # replace default templates by the ones specified in the "alternateTemplates" section
    loadTemplates(config)

    #save backup configuration file
    backupConfigFile = open(os.path.join(outPath, "usedConfiguration.ini"),
                            "w")
    config.write(backupConfigFile)

    #copy proxy, if there is one
    try:
        proxyexists = int(getCommandOutput2("voms-proxy-info --timeleft")) > 10
    except RuntimeError:
        proxyexists = False

    if proxyexists:
        shutil.copyfile(
            getCommandOutput2("voms-proxy-info --path").strip(),
            os.path.join(outPath, ".user_proxy"))

    validations = []
    jobs = []
    for validation in config.items("validation"):
        validation = validation[0].split("-")
        alignmentList = [validation[1]]
        validationsToAdd = [(validation[0],alignment) \
                                for alignment in alignmentList]
        validations.extend(validationsToAdd)

    for validation in validations:

        job = ValidationJobMultiIOV(validation, config, options, outPath,
                                    len(validations))
        if (job.optionMultiIOV == True):
            jobs.extend(job)
        else:
            jobs.extend(ValidationJob(validation, config, options, 1))

    for job in jobs:
        if job.needsproxy and not proxyexists:
            raise AllInOneError(
                "At least one job needs a grid proxy, please init one.")

    lmap(lambda job: job.createJob(), jobs)

    validations = [job.getValidation() for job in jobs]
    validations = flatten(validations)

    createMergeScript(outPath, validations, options)

    lmap(lambda job: job.runJob(), jobs)

    if options.dryRun:
        pass
    else:
        ValidationJobMultiIOV.runCondorJobs(outPath)
def createMergeScript(path, validations, options):
    if (len(validations) == 0):
        raise AllInOneError("Cowardly refusing to merge nothing!")

    repMap = {}

    comparisonLists = {
    }  # directory of lists containing the validations that are comparable
    for validation in validations:
        if validation.config.has_section("IOV"):
            iov = validation.config.get("IOV", "iov")
            validation.defaultReferenceName = iov
        for referenceName in validation.filesToCompare:
            validationtype = type(validation)
            validationName = validation.name
            if validation.config.has_section("IOV") and (
                    referenceName == "Tracker_defaultRange"
                    or referenceName == "Tracker_autoRange"):
                referenceName = iov
            if issubclass(validationtype, PreexistingValidation):
                validationName = validation.originalValName
                #find the actual validationtype
                for parentclass in validationtype.mro():
                    if not issubclass(parentclass, PreexistingValidation):
                        validationtype = parentclass
                        break
            key = (validationtype, validationName, referenceName)

            if key in comparisonLists:
                comparisonLists[key].append(validation)
            else:
                comparisonLists[key] = [validation]
                repMap[key] = validation.config.getGeneral()
                repMap[key].update({
                    "DownloadData":
                    "",
                    "CompareAlignments":
                    "",
                    "RunValidationPlots":
                    "",
                    "CMSSW_BASE":
                    os.environ["CMSSW_BASE"],
                    "SCRAM_ARCH":
                    os.environ["SCRAM_ARCH"],
                    "CMSSW_RELEASE_BASE":
                    os.environ["CMSSW_RELEASE_BASE"],
                })

                # introduced to merge individual validation outputs separately
                #  -> avoids problems with merge script
                repMap[key]["doMerge"] = "mergeRetCode=0\n"
                repMap[key]["rmUnmerged"] = (
                    "if [[ mergeRetCode -eq 0 ]]; then\n"
                    "    echo -e \\n\"Merging succeeded, removing original files.\"\n"
                )
                repMap[key]["beforeMerge"] = ""
                repMap[key]["mergeParallelFilePrefixes"] = ""
                repMap[key]["createResultsDirectory"] = ""

    #print("comparisonLists")
    #pprint.pprint(comparisonLists)
    anythingToMerge = []

    for (validationtype, validationName,
         referenceName), validations in six.iteritems(comparisonLists):
        #pprint.pprint("validations")
        #pprint.pprint(validations)
        globalDictionaries.plottingOptions = {}
        lmap(lambda validation: validation.getRepMap(), validations)
        #plotInfo = "plots:offline"
        #allPlotInfo = dict(validations[0].config.items(plotInfo))
        #repMap[(validationtype, validationName, referenceName)].update(allPlotInfo)

        for validation in validations:
            validation.getRepMap()
            #pprint.pprint("validation in validations")
            #pprint.pprint(validation)
            #parallel merging
            if not (isinstance(validation, PreexistingValidation)
                    or validation.NJobs == 1
                    or not isinstance(validation, ParallelValidation)):
                if (validationtype, validationName,
                        referenceName) not in anythingToMerge:
                    anythingToMerge.append(
                        (validationtype, validationName, referenceName))
                    repMap[(
                        validationtype, validationName, referenceName
                    )]["doMerge"] += '\n\n\n\necho -e "\n\nMerging results from %s jobs"\n\n' % validationtype.valType
                    repMap[(validationtype, validationName, referenceName
                            )]["beforeMerge"] += validationtype.doInitMerge()
                repMap[(validationtype, validationName,
                        referenceName)]["doMerge"] += validation.doMerge()
                for f in validation.getRepMap()["outputFiles"]:
                    longName = os.path.join(
                        "/eos/cms/store/group/alca_trackeralign/AlignmentValidation/",
                        validation.getRepMap()["eosdir"], f)
                    repMap[(validationtype, validationName, referenceName
                            )]["rmUnmerged"] += "    rm " + longName + "\n"

        repMap[(validationtype, validationName,
                referenceName)]["rmUnmerged"] += (
                    "else\n"
                    "    echo -e \\n\"WARNING: Merging failed, unmerged"
                    " files won't be deleted.\\n"
                    "(Ignore this warning if merging was done earlier)\"\n"
                    "fi\n")

        if anythingToMerge:
            repMap[(validationtype, validationName,
                    referenceName)]["DownloadData"] += replaceByMap(
                        configTemplates.mergeParallelResults,
                        repMap[(validationtype, validationName,
                                referenceName)])
        else:
            repMap[(validationtype, validationName,
                    referenceName)]["DownloadData"] = ""

        repMap[(validationtype, validationName,
                referenceName)]["RunValidationPlots"] = ""
        repMap[(validationtype, validationName,
                referenceName)]["plottingscriptpath"] = ""
        if issubclass(validationtype, ValidationWithPlots):
            repMap[(
                validationtype, validationName, referenceName
            )]["RunValidationPlots"] = validationtype.doRunPlots(validations)

        repMap[(validationtype, validationName,
                referenceName)]["CompareAlignments"] = "#run comparisons"
        if issubclass(validationtype, ValidationWithComparison):
            repMap[(
                validationtype, validationName, referenceName
            )]["CompareAlignments"] += validationtype.doComparison(validations)

        #if not merging parallel, add code to create results directory and set merge script name accordingly
        if validations[0].config.has_section("IOV"):
            repMap[(validationtype, validationName,
                    referenceName)]["createResultsDirectory"] = replaceByMap(
                        configTemplates.createResultsDirectoryTemplate,
                        repMap[(validationtype, validationName,
                                referenceName)])
            filePath = os.path.join(
                repMap[(validationtype, validationName,
                        referenceName)]["scriptsdir"], "TkAlMerge.sh")
        else:
            repMap[(validationtype, validationName,
                    referenceName)]["createResultsDirectory"] = replaceByMap(
                        configTemplates.createResultsDirectoryTemplate,
                        repMap[(validationtype, validationName,
                                referenceName)])
            filePath = os.path.join(path, "TkAlMerge.sh")

        theFile = open(filePath, "w")
        theFile.write(
            replaceByMap(
                configTemplates.mergeTemplate,
                repMap[(validationtype, validationName, referenceName)]))
        theFile.close()
        os.chmod(filePath, 0o755)
    def __performMultiIOV(self, validation, alignments, config, options,
                          outPath):
        validations = []
        if self.valType == "compare":
            alignmentsList = alignments.split(",")
            firstAlignList = alignmentsList[0].split()
            firstAlignName = firstAlignList[0].strip()
            secondAlignList = alignmentsList[1].split()
            secondAlignName = secondAlignList[0].strip()
            compareAlignments = "%s" % firstAlignName + "_vs_%s" % secondAlignName
            sectionMultiIOV = "multiIOV:compare"
            if not self.config.has_section(sectionMultiIOV):
                raise AllInOneError(
                    "section'[%s]' not found. Please define the dataset" %
                    sectionMultiIOV)
            iovList = self.config.get(sectionMultiIOV, "iovs")
            iovList = re.sub(r"\s+", "", iovList, flags=re.UNICODE).split(",")
            for iov in iovList:
                tmpConfig = BetterConfigParser()
                tmpConfig.read(options.config)
                general = tmpConfig.getGeneral()
                tmpConfig.add_section("IOV")
                tmpConfig.set("IOV", "iov", iov)
                tmpConfig.set(
                    "internals", "workdir",
                    os.path.join(
                        general["workdir"], options.Name, self.valType +
                        "_%s" % compareAlignments + "_%s" % iov))
                tmpConfig.set(
                    "internals", "scriptsdir",
                    os.path.join(
                        outPath, self.valType + "_%s" % compareAlignments +
                        "_%s" % iov))
                tmpConfig.set(
                    "general", "datadir",
                    os.path.join(
                        general["datadir"], options.Name, self.valType +
                        "_%s" % compareAlignments + "_%s" % iov))
                tmpConfig.set(
                    "general", "logdir",
                    os.path.join(
                        general["logdir"], options.Name, self.valType +
                        "_%s" % compareAlignments + "_%s" % iov))
                tmpConfig.set(
                    "general", "eosdir",
                    os.path.join(
                        "AlignmentValidation", general["eosdir"], options.Name,
                        self.valType + "_%s" % compareAlignments +
                        "_%s" % iov))
                tmpOptions = copy.deepcopy(options)
                tmpOptions.Name = os.path.join(
                    options.Name,
                    self.valType + "_%s" % compareAlignments + "_%s" % iov)
                tmpOptions.config = tmpConfig
                newOutPath = os.path.abspath(tmpOptions.Name)
                if not os.path.exists(newOutPath):
                    os.makedirs(newOutPath)
                elif not os.path.isdir(newOutPath):
                    raise AllInOneError(
                        "the file %s is in the way rename the Job or move it away"
                        % newOutPath)
                job = ValidationJob(validation, tmpConfig, tmpOptions,
                                    len(iovList))
                validations.append(job)

            return validations

        if "preexisting" in self.valType:
            preexistingValType = self.valType
            preexistingValSection = self.valSection
            preexistingEosdir = self.config.get(self.valSection, "eosdirName")
            originalValType = preexistingValType.replace('preexisting', '')
            originalValName = self.config.get(self.valSection,
                                              "originalValName")
            self.valSection = originalValType + ":" + originalValName
            originalAlignment = self.valName

        datasetList = self.config.get(self.valSection, "dataset")
        datasetList = re.sub(r"\s+", "", datasetList,
                             flags=re.UNICODE).split(",")
        for dataset in datasetList:
            sectionMultiIOV = "multiIOV:%s" % dataset
            if not self.config.has_section(sectionMultiIOV):
                raise AllInOneError(
                    "section'[%s]' not found. Please define the dataset" %
                    sectionMultiIOV)
            else:
                datasetBaseName = self.config.get(sectionMultiIOV, "dataset")
                iovList = self.config.get(sectionMultiIOV, "iovs")
                iovList = re.sub(r"\s+", "", iovList,
                                 flags=re.UNICODE).split(",")
                for iov in iovList:
                    datasetName = datasetBaseName + "_since%s" % iov
                    tmpConfig = BetterConfigParser()
                    tmpConfig.read(options.config)
                    general = tmpConfig.getGeneral()
                    if "preexisting" in self.valType:
                        valType = originalValType
                        valName = originalValName
                    else:
                        valType = self.valType
                        valName = self.valName
                    tmpConfig.add_section("IOV")
                    tmpConfig.set("IOV", "iov", iov)
                    tmpConfig.set(self.valSection, "dataset", datasetName)
                    tmpConfig.set(
                        "internals", "workdir",
                        os.path.join(general["workdir"], options.Name,
                                     valType + "_" + valName + "_%s" % iov))
                    tmpConfig.set(
                        "internals", "scriptsdir",
                        os.path.join(outPath,
                                     valType + "_" + valName + "_%s" % iov))
                    tmpConfig.set(
                        "general", "datadir",
                        os.path.join(general["datadir"], options.Name,
                                     valType + "_" + valName + "_%s" % iov))
                    tmpConfig.set(
                        "general", "logdir",
                        os.path.join(general["logdir"], options.Name,
                                     valType + "_" + valName + "_%s" % iov))
                    tmpConfig.set(
                        "general", "eosdir",
                        os.path.join("AlignmentValidation", general["eosdir"],
                                     options.Name,
                                     valType + "_" + valName + "_%s" % iov))
                    if "preexisting" in self.valType:
                        if self.valType == "preexistingoffline":
                            validationClassName = "AlignmentValidation"
                        #elif self.valType == "preexistingmcValidate":
                        #    validationClassName = "MonteCarloValidation"
                        #elif self.valType == "preexistingsplit":
                        #    validationClassName = "TrackSplittingValidation"
                        #elif self.valType == "preexistingprimaryvertex":
                        #    validationClassName = "PrimaryVertexValidation"
                        else:
                            raise AllInOneError(
                                "Unknown validation mode for preexisting option:'%s'"
                                % self.valType)
                        preexistingEosdirPath = os.path.join(
                            "AlignmentValidation", preexistingEosdir,
                            valType + "_" + valName + "_%s" % iov)
                        file = "/eos/cms/store/group/alca_trackeralign/AlignmentValidation/" + "%s" % preexistingEosdirPath + "/%s" % validationClassName + "_%s" % originalValName + "_%s" % originalAlignment + ".root"
                        tmpConfig.set(preexistingValSection, "file", file)
                    tmpOptions = copy.deepcopy(options)
                    tmpOptions.Name = os.path.join(
                        options.Name, valType + "_" + valName + "_%s" % iov)
                    tmpOptions.config = tmpConfig
                    newOutPath = os.path.abspath(tmpOptions.Name)
                    if not os.path.exists(newOutPath):
                        os.makedirs(newOutPath)
                    elif not os.path.isdir(newOutPath):
                        raise AllInOneError(
                            "the file %s is in the way rename the Job or move it away"
                            % newOutPath)
                    job = ValidationJob(validation, tmpConfig, tmpOptions,
                                        len(iovList))
                    validations.append(job)

        return validations
示例#10
0
def main(argv=None):
    if argv == None:
        argv = sys.argv[1:]
    optParser = optparse.OptionParser()
    optParser.description = """All-in-one Alignment Validation.
This will run various validation procedures either on batch queues or interactively.
If no name is given (-N parameter) a name containing time and date is created automatically.
To merge the outcome of all validation procedures run TkAlMerge.sh in your validation's directory.
"""
    optParser.add_option(
        "-n",
        "--dryRun",
        dest="dryRun",
        action="store_true",
        default=False,
        help=
        "create all scripts and cfg File but do not start jobs (default=False)"
    )
    optParser.add_option(
        "--getImages",
        dest="getImages",
        action="store_true",
        default=True,
        help="get all Images created during the process (default= True)")
    defaultConfig = "TkAlConfig.ini"
    optParser.add_option(
        "-c",
        "--config",
        dest="config",
        default=defaultConfig,
        help=
        "configuration to use (default TkAlConfig.ini) this can be a comma-seperated list of all .ini file you want to merge",
        metavar="CONFIG")
    optParser.add_option(
        "-N",
        "--Name",
        dest="Name",
        help="Name of this validation (default: alignmentValidation_DATE_TIME)",
        metavar="NAME")
    optParser.add_option(
        "-r",
        "--restrictTo",
        dest="restrictTo",
        help=
        "restrict validations to given modes (comma seperated) (default: no restriction)",
        metavar="RESTRICTTO")
    optParser.add_option("-s",
                         "--status",
                         dest="crabStatus",
                         action="store_true",
                         default=False,
                         help="get the status of the crab jobs",
                         metavar="STATUS")
    optParser.add_option("-d",
                         "--debug",
                         dest="debugMode",
                         action="store_true",
                         default=False,
                         help="run the tool to get full traceback of errors",
                         metavar="DEBUG")
    optParser.add_option(
        "-m",
        "--autoMerge",
        dest="autoMerge",
        action="store_true",
        default=False,
        help=
        "submit TkAlMerge.sh to run automatically when all jobs have finished (default=False)."
        " Works only for batch jobs")
    optParser.add_option(
        "--mergeOfflineParallel",
        dest="mergeOfflineParallel",
        action="store_true",
        default=False,
        help=
        "Enable parallel merging of offline data. Best used with -m option. Only works with lxBatch-jobmode",
        metavar="MERGE_PARALLEL")

    (options, args) = optParser.parse_args(argv)

    if not options.restrictTo == None:
        options.restrictTo = options.restrictTo.split(",")

    options.config = [ os.path.abspath( iniFile ) for iniFile in \
                       options.config.split( "," ) ]
    config = BetterConfigParser()
    outputIniFileSet = set(config.read(options.config))
    failedIniFiles = [
        iniFile for iniFile in options.config
        if iniFile not in outputIniFileSet
    ]

    # Check for missing ini file
    if options.config == [os.path.abspath(defaultConfig)]:
        if ( not options.crabStatus ) and \
               ( not os.path.exists( defaultConfig ) ):
            raise AllInOneError("Default 'ini' file '%s' not found!\n"
                                "You can specify another name with the "
                                "command line option '-c'/'--config'." %
                                (defaultConfig))
    else:
        for iniFile in failedIniFiles:
            if not os.path.exists(iniFile):
                raise AllInOneError("'%s' does not exist. Please check for "
                                    "typos in the filename passed to the "
                                    "'-c'/'--config' option!" % (iniFile))
            else:
                raise AllInOneError(("'%s' does exist, but parsing of the "
                                     "content failed!") % iniFile)

    # get the job name
    if options.Name == None:
        if not options.crabStatus:
            options.Name = "alignmentValidation_%s" % (
                datetime.datetime.now().strftime("%y%m%d_%H%M%S"))
        else:
            existingValDirs = fnmatch.filter(
                os.walk('.').next()[1], "alignmentValidation_*")
            if len(existingValDirs) > 0:
                options.Name = existingValDirs[-1]
            else:
                print("Cannot guess last working directory!")
                print("Please use the parameter '-N' or '--Name' to specify "
                      "the task for which you want a status report.")
                return 1

    # set output path
    outPath = os.path.abspath(options.Name)

    # Check status of submitted jobs and return
    if options.crabStatus:
        os.chdir(outPath)
        crabLogDirs = fnmatch.filter(os.walk('.').next()[1], "crab.*")
        if len(crabLogDirs) == 0:
            print("Found no crab tasks for job name '%s'" % (options.Name))
            return 1
        theCrab = crabWrapper.CrabWrapper()
        for crabLogDir in crabLogDirs:
            print()
            print("*" + "=" * 78 + "*")
            print("| Status report and output retrieval for:" + " " *
                  (77 - len("Status report and output retrieval for:")) + "|")
            taskName = crabLogDir.replace("crab.", "")
            print("| " + taskName + " " * (77 - len(taskName)) + "|")
            print("*" + "=" * 78 + "*")
            print()
            crabOptions = {"-getoutput": "", "-c": crabLogDir}
            try:
                theCrab.run(crabOptions)
            except AllInOneError as e:
                print("crab:  No output retrieved for this task.")
            crabOptions = {"-status": "", "-c": crabLogDir}
            theCrab.run(crabOptions)
        return

    general = config.getGeneral()
    config.set("internals", "workdir",
               os.path.join(general["workdir"], options.Name))
    config.set("internals", "scriptsdir", outPath)
    config.set("general", "datadir",
               os.path.join(general["datadir"], options.Name))
    config.set("general", "logdir",
               os.path.join(general["logdir"], options.Name))
    config.set(
        "general", "eosdir",
        os.path.join("AlignmentValidation", general["eosdir"], options.Name))

    if not os.path.exists(outPath):
        os.makedirs(outPath)
    elif not os.path.isdir(outPath):
        raise AllInOneError(
            "the file %s is in the way rename the Job or move it away" %
            outPath)

    # replace default templates by the ones specified in the "alternateTemplates" section
    loadTemplates(config)

    #save backup configuration file
    backupConfigFile = open(os.path.join(outPath, "usedConfiguration.ini"),
                            "w")
    config.write(backupConfigFile)

    #copy proxy, if there is one
    try:
        proxyexists = int(getCommandOutput2("voms-proxy-info --timeleft")) > 10
    except RuntimeError:
        proxyexists = False

    if proxyexists:
        shutil.copyfile(
            getCommandOutput2("voms-proxy-info --path").strip(),
            os.path.join(outPath, ".user_proxy"))

    validations = []
    for validation in config.items("validation"):
        alignmentList = [validation[1]]
        validationsToAdd = [(validation[0],alignment) \
                                for alignment in alignmentList]
        validations.extend(validationsToAdd)
    jobs = [ ValidationJob( validation, config, options) \
                 for validation in validations ]
    for job in jobs:
        if job.needsproxy and not proxyexists:
            raise AllInOneError(
                "At least one job needs a grid proxy, please init one.")
    map(lambda job: job.createJob(), jobs)
    validations = [job.getValidation() for job in jobs]

    if options.mergeOfflineParallel:
        parallelMergeObjects = createMergeScript(
            outPath, validations, options)['parallelMergeObjects']
    else:
        createMergeScript(outPath, validations, options)

    print()
    map(lambda job: job.runJob(), jobs)

    if options.autoMerge and ValidationJob.jobCount == ValidationJob.batchCount and config.getGeneral(
    )["jobmode"].split(",")[0] == "lxBatch":
        print(">             Automatically merging jobs when they have ended")
        # if everything is done as batch job, also submit TkAlMerge.sh to be run
        # after the jobs have finished

        #if parallel merge scripts: manage dependencies
        if options.mergeOfflineParallel and parallelMergeObjects != {}:
            initID = parallelMergeObjects["init"].runJob(config).split(
                "<")[1].split(">")[0]
            parallelIDs = []
            for parallelMergeScript in parallelMergeObjects["parallel"]:
                parallelMergeScript.addDependency(initID)
                for job in jobs:
                    if isinstance(
                            job.validation, OfflineValidation
                    ) and "TkAlMerge" + job.validation.alignmentToValidate.name == parallelMergeScript.name:
                        parallelMergeScript.addDependency(job.JobId)
                parallelIDs.append(
                    parallelMergeScript.runJob(config).split("<")[1].split(">")
                    [0])
            parallelMergeObjects["continue"].addDependency(parallelIDs)
            parallelMergeObjects["continue"].addDependency(
                ValidationJob.batchJobIds)
            parallelMergeObjects["continue"].runJob(config)

        else:
            repMap = {
                "commands":
                config.getGeneral()["jobmode"].split(",")[1],
                "jobName":
                "TkAlMerge",
                "logDir":
                config.getGeneral()["logdir"],
                "script":
                "TkAlMerge.sh",
                "bsub":
                "/afs/cern.ch/cms/caf/scripts/cmsbsub",
                "conditions":
                '"' + " && ".join([
                    "ended(" + jobId + ")"
                    for jobId in ValidationJob.batchJobIds
                ]) + '"'
            }
            for ext in ("stdout", "stderr", "stdout.gz", "stderr.gz"):
                oldlog = "%(logDir)s/%(jobName)s." % repMap + ext
                if os.path.exists(oldlog):
                    os.remove(oldlog)

            #issue job
            getCommandOutput2("%(bsub)s %(commands)s "
                              "-o %(logDir)s/%(jobName)s.stdout "
                              "-e %(logDir)s/%(jobName)s.stderr "
                              "-w %(conditions)s "
                              "%(logDir)s/%(script)s" % repMap)
示例#11
0
def createMergeScript(path, validations, options):
    if (len(validations) == 0):
        raise AllInOneError("Cowardly refusing to merge nothing!")

    config = validations[0].config
    repMap = config.getGeneral()
    repMap.update({
        "DownloadData": "",
        "CompareAlignments": "",
        "RunValidationPlots": "",
        "CMSSW_BASE": os.environ["CMSSW_BASE"],
        "SCRAM_ARCH": os.environ["SCRAM_ARCH"],
        "CMSSW_RELEASE_BASE": os.environ["CMSSW_RELEASE_BASE"],
    })

    comparisonLists = {
    }  # directory of lists containing the validations that are comparable
    for validation in validations:
        for referenceName in validation.filesToCompare:
            validationtype = type(validation)
            if issubclass(validationtype, PreexistingValidation):
                #find the actual validationtype
                for parentclass in validationtype.mro():
                    if not issubclass(parentclass, PreexistingValidation):
                        validationtype = parentclass
                        break
            key = (validationtype, referenceName)
            if key in comparisonLists:
                comparisonLists[key].append(validation)
            else:
                comparisonLists[key] = [validation]

    # introduced to merge individual validation outputs separately
    #  -> avoids problems with merge script
    repMap["doMerge"] = "mergeRetCode=0\n"
    repMap["rmUnmerged"] = (
        "if [[ mergeRetCode -eq 0 ]]; then\n"
        "    echo -e \\n\"Merging succeeded, removing original files.\"\n")
    repMap["beforeMerge"] = ""
    repMap["mergeParallelFilePrefixes"] = ""
    repMap["createResultsDirectory"] = ""

    anythingToMerge = []

    #prepare dictionary containing handle objects for parallel merge batch jobs
    if options.mergeOfflineParallel:
        parallelMergeObjects = {}
    for (validationType,
         referencename), validations in six.iteritems(comparisonLists):
        for validation in validations:
            #parallel merging
            if (isinstance(validation, PreexistingValidation)
                    or validation.NJobs == 1
                    or not isinstance(validation, ParallelValidation)):
                continue
            if options.mergeOfflineParallel and validationType.valType == 'offline' and validation.jobmode.split(
                    ",")[0] == "lxBatch":
                repMapTemp = repMap.copy()
                if validationType not in anythingToMerge:
                    anythingToMerge += [validationType]
                    #create init script
                    fileName = "TkAlMergeInit"
                    filePath = os.path.join(path, fileName + ".sh")
                    theFile = open(filePath, "w")
                    repMapTemp["createResultsDirectory"] = "#!/bin/bash"
                    repMapTemp["createResultsDirectory"] += replaceByMap(
                        configTemplates.createResultsDirectoryTemplate,
                        repMapTemp)
                    theFile.write(
                        replaceByMap(
                            configTemplates.createResultsDirectoryTemplate,
                            repMapTemp))
                    theFile.close()
                    os.chmod(filePath, 0o755)
                    #create handle
                    parallelMergeObjects["init"] = ParallelMergeJob(
                        fileName, filePath, [])
                    #clear 'create result directory' code
                    repMapTemp["createResultsDirectory"] = ""

                #edit repMapTmp as necessary:
                #fill contents of mergeParallelResults
                repMapTemp["beforeMerge"] += validationType.doInitMerge()
                repMapTemp[
                    "doMerge"] += '\n\n\n\necho -e "\n\nMerging results from %s jobs with alignment %s"\n\n' % (
                        validationType.valType,
                        validation.alignmentToValidate.name)
                repMapTemp["doMerge"] += validation.doMerge()
                for f in validation.getRepMap()["outputFiles"]:
                    longName = os.path.join("/eos/cms/store/caf/user/$USER/",
                                            validation.getRepMap()["eosdir"],
                                            f)
                    repMapTemp["rmUnmerged"] += "    rm " + longName + "\n"

                repMapTemp["rmUnmerged"] += (
                    "else\n"
                    "    echo -e \\n\"WARNING: Merging failed, unmerged"
                    " files won't be deleted.\\n"
                    "(Ignore this warning if merging was done earlier)\"\n"
                    "fi\n")

                #fill mergeParallelResults area of mergeTemplate
                repMapTemp["DownloadData"] = replaceByMap(
                    configTemplates.mergeParallelResults, repMapTemp)
                #fill runValidationPlots area of mergeTemplate
                repMapTemp["RunValidationPlots"] = validationType.doRunPlots(
                    validations)

                #create script file
                fileName = "TkAlMergeOfflineValidation" + validation.name + validation.alignmentToValidate.name
                filePath = os.path.join(path, fileName + ".sh")
                theFile = open(filePath, "w")
                theFile.write(
                    replaceByMap(configTemplates.mergeParallelOfflineTemplate,
                                 repMapTemp))
                theFile.close()
                os.chmod(filePath, 0o755)
                #create handle object
                if "parallel" in parallelMergeObjects:
                    parallelMergeObjects["parallel"].append(
                        ParallelMergeJob(fileName, filePath, []))
                else:
                    parallelMergeObjects["parallel"] = [
                        ParallelMergeJob(fileName, filePath, [])
                    ]
                continue

            else:
                if validationType not in anythingToMerge:
                    anythingToMerge += [validationType]
                    repMap[
                        "doMerge"] += '\n\n\n\necho -e "\n\nMerging results from %s jobs"\n\n' % validationType.valType
                    repMap["beforeMerge"] += validationType.doInitMerge()
                repMap["doMerge"] += validation.doMerge()
                for f in validation.getRepMap()["outputFiles"]:
                    longName = os.path.join("/eos/cms/store/caf/user/$USER/",
                                            validation.getRepMap()["eosdir"],
                                            f)
                    repMap["rmUnmerged"] += "    rm " + longName + "\n"

    repMap["rmUnmerged"] += (
        "else\n"
        "    echo -e \\n\"WARNING: Merging failed, unmerged"
        " files won't be deleted.\\n"
        "(Ignore this warning if merging was done earlier)\"\n"
        "fi\n")

    if anythingToMerge:
        repMap["DownloadData"] += replaceByMap(
            configTemplates.mergeParallelResults, repMap)
    else:
        repMap["DownloadData"] = ""

    repMap["RunValidationPlots"] = ""
    for (validationType,
         referencename), validations in six.iteritems(comparisonLists):
        if issubclass(validationType, ValidationWithPlots):
            repMap["RunValidationPlots"] += validationType.doRunPlots(
                validations)

    repMap["CompareAlignments"] = "#run comparisons"
    for (validationType,
         referencename), validations in six.iteritems(comparisonLists):
        if issubclass(validationType, ValidationWithComparison):
            repMap["CompareAlignments"] += validationType.doComparison(
                validations)

    #if user wants to merge parallely and if there are valid parallel scripts, create handle for plotting job and set merge script name accordingly
    if options.mergeOfflineParallel and parallelMergeObjects != {}:
        parallelMergeObjects["continue"] = ParallelMergeJob(
            "TkAlMergeFinal", os.path.join(path, "TkAlMergeFinal.sh"), [])
        filePath = os.path.join(path, "TkAlMergeFinal.sh")
    #if not merging parallel, add code to create results directory and set merge script name accordingly
    else:
        repMap["createResultsDirectory"] = replaceByMap(
            configTemplates.createResultsDirectoryTemplate, repMap)
        filePath = os.path.join(path, "TkAlMerge.sh")

    #filePath = os.path.join(path, "TkAlMerge.sh")
    theFile = open(filePath, "w")
    theFile.write(replaceByMap(configTemplates.mergeTemplate, repMap))
    theFile.close()
    os.chmod(filePath, 0o755)

    if options.mergeOfflineParallel:
        return {
            'TkAlMerge.sh': filePath,
            'parallelMergeObjects': parallelMergeObjects
        }
    else:
        return filePath
示例#12
0
    def runJob(self):
        if self.__preexisting:
            if self.validation.jobid:
                self.batchJobIds.append(self.validation.jobid)
            log = ">             " + self.validation.name + " is already validated."
            print(log)
            return log
        else:
            if self.validation.jobid:
                print(
                    "jobid {} will be ignored, since the validation {} is not preexisting"
                    .format(self.validation.jobid, self.validation.name))

        general = self.__config.getGeneral()
        log = ""
        for script in self.__scripts:
            name = os.path.splitext(os.path.basename(script))[0]
            ValidationJob.jobCount += 1
            if self.__commandLineOptions.dryRun:
                print("%s would run: %s" % (name, os.path.basename(script)))
                continue
            log = ">             Validating " + name
            print(">             Validating " + name)
            if self.validation.jobmode == "interactive":
                log += getCommandOutput2(script)
                ValidationJob.interactCount += 1
            elif self.validation.jobmode.split(",")[0] == "lxBatch":
                repMap = {
                    "commands": self.validation.jobmode.split(",")[1],
                    "logDir": general["logdir"],
                    "jobName": name,
                    "script": script,
                    "bsub": "/afs/cern.ch/cms/caf/scripts/cmsbsub"
                }
                for ext in ("stdout", "stderr", "stdout.gz", "stderr.gz"):
                    oldlog = "%(logDir)s/%(jobName)s." % repMap + ext
                    if os.path.exists(oldlog):
                        os.remove(oldlog)
                bsubOut = getCommandOutput2("%(bsub)s %(commands)s "
                                            "-J %(jobName)s "
                                            "-o %(logDir)s/%(jobName)s.stdout "
                                            "-e %(logDir)s/%(jobName)s.stderr "
                                            "%(script)s" % repMap)
                #Attention: here it is assumed that bsub returns a string
                #containing a job id like <123456789>
                jobid = bsubOut.split("<")[1].split(">")[0]
                self.JobId.append(jobid)
                ValidationJob.batchJobIds.append(jobid)
                log += bsubOut
                ValidationJob.batchCount += 1
            elif self.validation.jobmode.split(",")[0] == "crab":
                os.chdir(general["logdir"])
                crabName = "crab." + os.path.basename(script)[:-3]
                theCrab = crabWrapper.CrabWrapper()
                options = {
                    "-create": "",
                    "-cfg": crabName + ".cfg",
                    "-submit": ""
                }
                try:
                    theCrab.run(options)
                except AllInOneError as e:
                    print("crab:", str(e).split("\n")[0])
                    exit(1)
                ValidationJob.crabCount += 1

            else:
                raise AllInOneError("Unknown 'jobmode'!\n"
                                    "Please change this parameter either in "
                                    "the [general] or in the [" +
                                    self.__valType + ":" + self.__valName +
                                    "] section to one of the following "
                                    "values:\n"
                                    "\tinteractive\n\tlxBatch, -q <queue>\n"
                                    "\tcrab, -q <queue>")

        return log
示例#13
0
def createMergeScript( path, validations ):
    if(len(validations) == 0):
        raise AllInOneError("Cowardly refusing to merge nothing!")

    repMap = validations[0].getRepMap() #FIXME - not nice this way
    repMap.update({
            "DownloadData":"",
            "CompareAlignments":"",
            "RunExtendedOfflineValidation":"",
            "RunTrackSplitPlot":""
            })

    comparisonLists = {} # directory of lists containing the validations that are comparable
    for validation in validations:
        for referenceName in validation.filesToCompare:
            validationName = "%s.%s"%(validation.__class__.__name__, referenceName)
            validationName = validationName.split(".%s"%GenericValidation.defaultReferenceName )[0]
            if validationName in comparisonLists:
                comparisonLists[ validationName ].append( validation )
            else:
                comparisonLists[ validationName ] = [ validation ]

    # introduced to merge individual validation outputs separately
    #  -> avoids problems with merge script
    repMap["haddLoop"] = "mergeRetCode=0\n"
    repMap["rmUnmerged"] = ("if [[ mergeRetCode -eq 0 ]]; then\n"
                            "    echo -e \\n\"Merging succeeded, removing original files.\"\n")
    repMap["copyMergeScripts"] = ""
    repMap["mergeParallelFilePrefixes"] = ""

    anythingToMerge = []
    for validationType in comparisonLists:
        for validation in comparisonLists[validationType]:
            if validation.NJobs == 1:
                continue
            if validationType not in anythingToMerge:
                anythingToMerge += [validationType]
                repMap["haddLoop"] += '\n\n\n\necho -e "\n\nMerging results from %s jobs"\n\n' % validationType
            repMap["haddLoop"] = validation.appendToMerge(repMap["haddLoop"])
            repMap["haddLoop"] += "tmpMergeRetCode=${?}\n"
            repMap["haddLoop"] += ("if [[ tmpMergeRetCode -eq 0 ]]; then "
                                   "cmsStage -f "
                                   +validation.getRepMap()["finalOutputFile"]
                                   +" "
                                   +validation.getRepMap()["finalResultFile"]
                                   +"; fi\n")
            repMap["haddLoop"] += ("if [[ ${tmpMergeRetCode} -gt ${mergeRetCode} ]]; then "
                                   "mergeRetCode=${tmpMergeRetCode}; fi\n")
            for f in validation.getRepMap()["outputFiles"]:
                longName = os.path.join("/store/caf/user/$USER/",
                                        validation.getRepMap()["eosdir"], f)
                repMap["rmUnmerged"] += "    cmsRm "+longName+"\n"
    repMap["rmUnmerged"] += ("else\n"
                             "    echo -e \\n\"WARNING: Merging failed, unmerged"
                             " files won't be deleted.\\n"
                             "(Ignore this warning if merging was done earlier)\"\n"
                             "fi\n")

    if "OfflineValidation" in anythingToMerge:
        repMap["mergeOfflineParJobsScriptPath"] = os.path.join(path, "TkAlOfflineJobsMerge.C")
        createOfflineParJobsMergeScript( comparisonLists["OfflineValidation"],
                                         repMap["mergeOfflineParJobsScriptPath"] )
        repMap["copyMergeScripts"] += ("cp .oO[CMSSW_BASE]Oo./src/Alignment/OfflineValidation/scripts/merge_TrackerOfflineValidation.C .\n"
                                       "rfcp %s .\n" % repMap["mergeOfflineParJobsScriptPath"])

    if anythingToMerge:
        # DownloadData is the section which merges output files from parallel jobs
        # it uses the file TkAlOfflineJobsMerge.C
        repMap["DownloadData"] += replaceByMap( configTemplates.mergeParallelResults, repMap )
    else:
        repMap["DownloadData"] = ""


    if "OfflineValidation" in comparisonLists:
        repMap["extendedValScriptPath"] = os.path.join(path, "TkAlExtendedOfflineValidation.C")
        createExtendedValidationScript(comparisonLists["OfflineValidation"],
                                       repMap["extendedValScriptPath"],
                                       "OfflineValidation")
        repMap["RunExtendedOfflineValidation"] = \
            replaceByMap(configTemplates.extendedValidationExecution, repMap)

    if "TrackSplittingValidation" in comparisonLists:
        repMap["trackSplitPlotScriptPath"] = \
            os.path.join(path, "TkAlTrackSplitPlot.C")
        createTrackSplitPlotScript(comparisonLists["TrackSplittingValidation"],
                                       repMap["trackSplitPlotScriptPath"] )
        repMap["RunTrackSplitPlot"] = \
            replaceByMap(configTemplates.trackSplitPlotExecution, repMap)

    repMap["CompareAlignments"] = "#run comparisons"
    for validationId in comparisonLists:
        compareStrings = [ val.getCompareStrings(validationId) for val in comparisonLists[validationId] ]
        compareStringsPlain = [ val.getCompareStrings(validationId, plain=True) for val in comparisonLists[validationId] ]
            
        repMap.update({"validationId": validationId,
                       "compareStrings": " , ".join(compareStrings),
                       "compareStringsPlain": " ".join(compareStringsPlain) })
        
        repMap["CompareAlignments"] += \
            replaceByMap(configTemplates.compareAlignmentsExecution, repMap)
      
    filePath = os.path.join(path, "TkAlMerge.sh")
    theFile = open( filePath, "w" )
    theFile.write( replaceByMap( configTemplates.mergeTemplate, repMap ) )
    theFile.close()
    os.chmod(filePath,0755)
    
    return filePath
示例#14
0
def createMergeScript(path, validations):
    if (len(validations) == 0):
        raise AllInOneError("Cowardly refusing to merge nothing!")

    config = validations[0].config
    repMap = config.getGeneral()
    repMap.update({
        "DownloadData": "",
        "CompareAlignments": "",
        "RunValidationPlots": "",
        "CMSSW_BASE": os.environ["CMSSW_BASE"],
        "SCRAM_ARCH": os.environ["SCRAM_ARCH"],
        "CMSSW_RELEASE_BASE": os.environ["CMSSW_RELEASE_BASE"],
    })

    comparisonLists = {
    }  # directory of lists containing the validations that are comparable
    for validation in validations:
        for referenceName in validation.filesToCompare:
            validationtype = type(validation)
            if isinstance(validationtype, PreexistingValidation):
                #find the actual validationtype
                for parentclass in validationtype.mro():
                    if not issubclass(parentclass, PreexistingValidation):
                        validationtype = parentclass
                        break
            key = (validationtype, referenceName)
            if key in comparisonLists:
                comparisonLists[key].append(validation)
            else:
                comparisonLists[key] = [validation]

    # introduced to merge individual validation outputs separately
    #  -> avoids problems with merge script
    repMap["doMerge"] = "mergeRetCode=0\n"
    repMap["rmUnmerged"] = (
        "if [[ mergeRetCode -eq 0 ]]; then\n"
        "    echo -e \\n\"Merging succeeded, removing original files.\"\n")
    repMap["beforeMerge"] = ""
    repMap["mergeParallelFilePrefixes"] = ""

    anythingToMerge = []

    for (validationType,
         referencename), validations in comparisonLists.iteritems():
        for validation in validations:
            if (isinstance(validation, PreexistingValidation)
                    or validation.NJobs == 1
                    or not isinstance(validation, ParallelValidation)):
                continue
            if validationType not in anythingToMerge:
                anythingToMerge += [validationType]
                repMap[
                    "doMerge"] += '\n\n\n\necho -e "\n\nMerging results from %s jobs"\n\n' % validationType.valType
                repMap["beforeMerge"] += validationType.doInitMerge()
            repMap["doMerge"] += validation.doMerge()
            for f in validation.getRepMap()["outputFiles"]:
                longName = os.path.join("/store/caf/user/$USER/",
                                        validation.getRepMap()["eosdir"], f)
                repMap["rmUnmerged"] += "    eos rm " + longName + "\n"
    repMap["rmUnmerged"] += (
        "else\n"
        "    echo -e \\n\"WARNING: Merging failed, unmerged"
        " files won't be deleted.\\n"
        "(Ignore this warning if merging was done earlier)\"\n"
        "fi\n")

    if anythingToMerge:
        repMap["DownloadData"] += replaceByMap(
            configTemplates.mergeParallelResults, repMap)
    else:
        repMap["DownloadData"] = ""

    repMap["RunValidationPlots"] = ""
    for (validationType,
         referencename), validations in comparisonLists.iteritems():
        if issubclass(validationType, ValidationWithPlots):
            repMap["RunValidationPlots"] += validationType.doRunPlots(
                validations)

    repMap["CompareAlignments"] = "#run comparisons"
    for (validationType,
         referencename), validations in comparisonLists.iteritems():
        if issubclass(validationType, ValidationWithComparison):
            repMap["CompareAlignments"] += validationType.doComparison(
                validations)

    filePath = os.path.join(path, "TkAlMerge.sh")
    theFile = open(filePath, "w")
    theFile.write(replaceByMap(configTemplates.mergeTemplate, repMap))
    theFile.close()
    os.chmod(filePath, 0o755)

    return filePath
示例#15
0
def createMergeScript(path, validations):
    if (len(validations) == 0):
        raise AllInOneError("Cowardly refusing to merge nothing!")

    config = validations[0].config
    repMap = config.getGeneral()
    repMap.update({
        "DownloadData": "",
        "CompareAlignments": "",
        "RunExtendedOfflineValidation": "",
        "RunTrackSplitPlot": "",
        "MergeZmumuPlots": "",
        "RunPrimaryVertexPlot": "",
        "CMSSW_BASE": os.environ["CMSSW_BASE"],
        "SCRAM_ARCH": os.environ["SCRAM_ARCH"],
        "CMSSW_RELEASE_BASE": os.environ["CMSSW_RELEASE_BASE"],
    })

    comparisonLists = {
    }  # directory of lists containing the validations that are comparable
    for validation in validations:
        for referenceName in validation.filesToCompare:
            validationName = "%s.%s" % (validation.__class__.__name__,
                                        referenceName)
            validationName = validationName.split(
                ".%s" % GenericValidation.defaultReferenceName)[0]
            validationName = validationName.split("Preexisting")[-1]
            if validationName in comparisonLists:
                comparisonLists[validationName].append(validation)
            else:
                comparisonLists[validationName] = [validation]

    # introduced to merge individual validation outputs separately
    #  -> avoids problems with merge script
    repMap["haddLoop"] = "mergeRetCode=0\n"
    repMap["rmUnmerged"] = (
        "if [[ mergeRetCode -eq 0 ]]; then\n"
        "    echo -e \\n\"Merging succeeded, removing original files.\"\n")
    repMap["copyMergeScripts"] = ""
    repMap["mergeParallelFilePrefixes"] = ""

    anythingToMerge = []

    for validationType in comparisonLists:
        for validation in comparisonLists[validationType]:
            if isinstance(validation,
                          PreexistingValidation) or validation.NJobs == 1:
                continue
            if validationType not in anythingToMerge:
                anythingToMerge += [validationType]
                repMap[
                    "haddLoop"] += '\n\n\n\necho -e "\n\nMerging results from %s jobs"\n\n' % validationType
            repMap["haddLoop"] = validation.appendToMerge(repMap["haddLoop"])
            repMap["haddLoop"] += "tmpMergeRetCode=${?}\n"
            repMap["haddLoop"] += ("if [[ tmpMergeRetCode -eq 0 ]]; then "
                                   "xrdcp -f " +
                                   validation.getRepMap()["finalOutputFile"] +
                                   " root://eoscms//eos/cms" +
                                   validation.getRepMap()["finalResultFile"] +
                                   "; fi\n")
            repMap["haddLoop"] += (
                "if [[ ${tmpMergeRetCode} -gt ${mergeRetCode} ]]; then "
                "mergeRetCode=${tmpMergeRetCode}; fi\n")
            for f in validation.getRepMap()["outputFiles"]:
                longName = os.path.join("/store/caf/user/$USER/",
                                        validation.getRepMap()["eosdir"], f)
                repMap["rmUnmerged"] += "    $eos rm " + longName + "\n"
    repMap["rmUnmerged"] += (
        "else\n"
        "    echo -e \\n\"WARNING: Merging failed, unmerged"
        " files won't be deleted.\\n"
        "(Ignore this warning if merging was done earlier)\"\n"
        "fi\n")

    if "OfflineValidation" in anythingToMerge:
        repMap["mergeOfflineParJobsScriptPath"] = os.path.join(
            path, "TkAlOfflineJobsMerge.C")

        createOfflineParJobsMergeScript(
            comparisonLists["OfflineValidation"],
            repMap["mergeOfflineParJobsScriptPath"])
        repMap["copyMergeScripts"] += (
            "cp .oO[Alignment/OfflineValidation]Oo./scripts/merge_TrackerOfflineValidation.C .\n"
            "rfcp %s .\n" % repMap["mergeOfflineParJobsScriptPath"])
        repMap_offline = repMap.copy()
        repMap_offline.update(PlottingOptions(config, "offline"))
        repMap["copyMergeScripts"] = \
            replaceByMap(repMap["copyMergeScripts"], repMap_offline)

    if anythingToMerge:
        # DownloadData is the section which merges output files from parallel jobs
        # it uses the file TkAlOfflineJobsMerge.C
        repMap["DownloadData"] += replaceByMap(
            configTemplates.mergeParallelResults, repMap)
    else:
        repMap["DownloadData"] = ""

    if "OfflineValidation" in comparisonLists:
        repMap["extendedValScriptPath"] = os.path.join(
            path, "TkAlExtendedOfflineValidation.C")
        createExtendedValidationScript(comparisonLists["OfflineValidation"],
                                       repMap["extendedValScriptPath"],
                                       "OfflineValidation")
        repMap_offline = repMap.copy()
        repMap_offline.update(PlottingOptions(config, "offline"))
        repMap["RunExtendedOfflineValidation"] = \
            replaceByMap(configTemplates.extendedValidationExecution, repMap_offline)

    if "TrackSplittingValidation" in comparisonLists:
        repMap["trackSplitPlotScriptPath"] = \
            os.path.join(path, "TkAlTrackSplitPlot.C")
        createTrackSplitPlotScript(comparisonLists["TrackSplittingValidation"],
                                   repMap["trackSplitPlotScriptPath"])
        repMap_split = repMap.copy()
        repMap_split.update(PlottingOptions(config, "split"))
        repMap["RunTrackSplitPlot"] = \
            replaceByMap(configTemplates.trackSplitPlotExecution, repMap_split)

    if "ZMuMuValidation" in comparisonLists:
        repMap["mergeZmumuPlotsScriptPath"] = \
            os.path.join(path, "TkAlMergeZmumuPlots.C")
        createMergeZmumuPlotsScript(comparisonLists["ZMuMuValidation"],
                                    repMap["mergeZmumuPlotsScriptPath"])
        repMap_zMuMu = repMap.copy()
        repMap_zMuMu.update(PlottingOptions(config, "zmumu"))
        repMap["MergeZmumuPlots"] = \
            replaceByMap(configTemplates.mergeZmumuPlotsExecution, repMap_zMuMu)

    if "PrimaryVertexValidation" in comparisonLists:
        repMap["PrimaryVertexPlotScriptPath"] = \
            os.path.join(path, "TkAlPrimaryVertexValidationPlot.C")

        createPrimaryVertexPlotScript(
            comparisonLists["PrimaryVertexValidation"],
            repMap["PrimaryVertexPlotScriptPath"])
        repMap_PVVal = repMap.copy()
        repMap_PVVal.update(PlottingOptions(config, "primaryvertex"))
        repMap["RunPrimaryVertexPlot"] = \
            replaceByMap(configTemplates.PrimaryVertexPlotExecution, repMap_PVVal)

    repMap["CompareAlignments"] = "#run comparisons"
    if "OfflineValidation" in comparisonLists:
        compareStrings = [
            val.getCompareStrings("OfflineValidation")
            for val in comparisonLists["OfflineValidation"]
        ]
        compareStringsPlain = [
            val.getCompareStrings("OfflineValidation", plain=True)
            for val in comparisonLists["OfflineValidation"]
        ]

        repMap_offline = repMap.copy()
        repMap_offline.update(PlottingOptions(config, "offline"))
        repMap_offline.update({
            "validationId":
            "OfflineValidation",
            "compareStrings":
            " , ".join(compareStrings),
            "compareStringsPlain":
            " ".join(compareStringsPlain)
        })

        repMap["CompareAlignments"] += \
            replaceByMap(configTemplates.compareAlignmentsExecution, repMap_offline)

    filePath = os.path.join(path, "TkAlMerge.sh")
    theFile = open(filePath, "w")
    theFile.write(replaceByMap(configTemplates.mergeTemplate, repMap))
    theFile.close()
    os.chmod(filePath, 0o755)

    return filePath