Esempio n. 1
0
 def dump_cff(self,
              outName=None,
              jsonPath=None,
              begin=None,
              end=None,
              firstRun=None,
              lastRun=None):
     if outName == None:
         outName = "Dataset"
     packageName = os.path.join("Alignment", "OfflineValidation")
     if not os.path.exists(
             os.path.join(os.environ["CMSSW_BASE"], "src", packageName)):
         msg = ("You try to store the predefined dataset'%s'.\n"
                "For that you need to check out the package '%s' to your "
                "private relase area in\n" % (outName, packageName) +
                os.environ["CMSSW_BASE"])
         raise AllInOneError(msg)
     theMap = {
         "process": "",
         "tab": "",
         "nEvents": str(-1),
         "importCms": "import FWCore.ParameterSet.Config as cms\n"
     }
     dataset_cff = self.__createSnippet(jsonPath=jsonPath,
                                        begin=begin,
                                        end=end,
                                        firstRun=firstRun,
                                        lastRun=lastRun,
                                        repMap=theMap)
     filePath = os.path.join(os.environ["CMSSW_BASE"], "src", packageName,
                             "python", outName + "_cff.py")
     if os.path.exists(filePath):
         existMsg = "The predefined dataset '%s' already exists.\n" % (
             outName)
         askString = "Do you want to overwrite it? [y/n]\n"
         inputQuery = existMsg + askString
         while True:
             userInput = raw_input(inputQuery).lower()
             if userInput == "y":
                 break
             elif userInput == "n":
                 return
             else:
                 inputQuery = askString
     print(
         "The predefined dataset '%s' will be stored in the file\n" %
         (outName) + filePath +
         "\nFor future use you have to do 'scram b'.")
     print
     theFile = open(filePath, "w")
     theFile.write(dataset_cff)
     theFile.close()
     return
Esempio n. 2
0
    def __getMagneticFieldForRun( self, run = -1, tolerance = 0.5 ):
        """For MC, this returns the same as the previous function.
           For data, it gets the magnetic field from the runs.  This is important for
           deciding which template to use for offlinevalidation
        """
        if self.__dataType == "mc" and self.__magneticField == "MagneticField":
            return 3.8                                        #For 3.8T MC the default MagneticField is used
        if "T" in self.__magneticField:
            Bfield = self.__magneticField.split("T")[0].replace("MagneticField_","")
            try:
                return float(Bfield) / 10.0                       #e.g. 38T and 38T_PostLS1 both return 3.8
            except ValueError:
                pass
        if self.__predefined:
            with open(self.__filename) as f:
                Bfield = None
                for line in f.readlines():
                    if line.startswith("#magnetic field: ") and "," in line:
                        if Bfield is not None:
                            raise AllInOneError(self.__filename + " has multiple 'magnetic field' lines.")
                        return float(line.replace("#magnetic field: ", "").split(",")[1].split("#")[0].strip())

        if run > 0:
            dasQuery = ('run = %s'%run)                         #for data
            data = self.__getData(dasQuery)
            try:
                return self.__findInJson(data, ["run","bfield"])
            except KeyError:
                return "unknown Can't get the magnetic field for run %s from DAS" % run

        #run < 0 - find B field for the first and last runs, and make sure they're compatible
        #  (to within tolerance)
        #NOT FOOLPROOF!  The magnetic field might go up and then down, or vice versa
        if self.__firstusedrun is None or self.__lastusedrun is None:
            return "unknown Can't get the exact magnetic field for the dataset until data has been retrieved from DAS."
        firstrunB = self.__getMagneticFieldForRun(self.__firstusedrun)
        lastrunB = self.__getMagneticFieldForRun(self.__lastusedrun)
        try:
            if abs(firstrunB - lastrunB) <= tolerance:
                return .5*(firstrunB + lastrunB)
            print firstrunB, lastrunB, tolerance
            return ("unknown The beginning and end of your run range for %s\n"
                    "have different magnetic fields (%s, %s)!\n"
                    "Try limiting the run range using firstRun, lastRun, begin, end, or JSON,\n"
                    "or increasing the tolerance (in dataset.py) from %s.") % (self.__name, firstrunB, lastrunB, tolerance)
        except TypeError:
            try:
                if "unknown" in firstrunB:
                    return firstrunB
                else:
                    return lastrunB
            except TypeError:
                return lastrunB
Esempio n. 3
0
 def __getData(self, dasQuery, dasLimit=0):
     dasData = das_client.get_data('https://cmsweb.cern.ch', dasQuery, 0,
                                   dasLimit, False)
     if isinstance(dasData, str):
         jsondict = json.loads(dasData)
     else:
         jsondict = dasData
     # Check, if the DAS query fails
     if jsondict["status"] != 'ok':
         msg = "Status not 'ok', but:", jsondict["status"]
         raise AllInOneError(msg)
     return jsondict["data"]
    def __init__(self, valName, config):
        self.general = config.getGeneral()
        self.name = self.general["name"] = valName
        self.config = config

        theUpdate = config.getResultingSection("preexisting" + self.valType +
                                               ":" + self.name,
                                               defaultDict=self.defaults,
                                               demandPars=self.mandatories)
        self.general.update(theUpdate)

        self.title = self.general["title"]
        if "|" in self.title or "," in self.title or '"' in self.title:
            msg = "The characters '|', '\"', and ',' cannot be used in the alignment title!"
            raise AllInOneError(msg)
        self.needsproxy = boolfromstring(self.general["needsproxy"],
                                         "needsproxy")
        self.jobid = self.general["jobid"]
        if self.jobid:
            try:  #make sure it's actually a valid jobid
                output = getCommandOutput2("bjobs %(jobid)s 2>&1" %
                                           self.general)
                if "is not found" in output: raise RuntimeError
            except RuntimeError:
                raise AllInOneError(
                    "%s is not a valid jobid.\nMaybe it finished already?" %
                    self.jobid)

        knownOpts = set(
            self.defaults.keys()) | self.mandatories | self.optionals
        ignoreOpts = []
        config.checkInput("preexisting" + self.valType + ":" + self.name,
                          knownSimpleOptions=knownOpts,
                          ignoreOptions=ignoreOpts)
        self.jobmode = None

        try:  #initialize plotting options for this validation type
            result = PlottingOptions(self.config, self.valType)
        except KeyError:
            pass
Esempio n. 5
0
    def convertTimeToRun(self,
                         begin=None,
                         end=None,
                         firstRun=None,
                         lastRun=None,
                         shortTuple=True):
        if (begin and firstRun) or (end and lastRun):
            msg = (
                "The Usage of " +
                "'begin' & 'firstRun' " * int(bool(begin and firstRun)) +
                "and " * int(bool(
                    (begin and firstRun) and (end and lastRun))) +
                "'end' & 'lastRun' " * int(bool(end and lastRun)) +
                "is ambigous.")
            raise AllInOneError(msg)

        runList = [run["run_number"] for run in self.__getRunList()]
        runTimeList = [run["creation_time"] for run in self.__getRunList()]
        if begin:
            try:
                runIndex = self.__find_ge(runTimeList, begin)
            except ValueError:
                msg = ("Your 'begin' is after the creation time of the last "
                       "run in the dataset\n'%s'" % (self.__name))
                raise AllInOneError(msg)
            firstRun = runList[runIndex]
            begin = None
        if end:
            try:
                runIndex = self.__find_lt(runTimeList, end)
            except ValueError:
                msg = ("Your 'end' is before the creation time of the first "
                       "run in the dataset\n'%s'" % (self.__name))
                raise AllInOneError(msg)
            lastRun = runList[runIndex]
            end = None
        if shortTuple:
            return firstRun, lastRun
        else:
            return begin, end, firstRun, lastRun
Esempio n. 6
0
    def createConfiguration(self, path, configBaseName = "TkAlOfflineValidation"):
        # if offline validation uses N parallel jobs, we create here N cfg files
        numberParallelJobs = int( self.general["parallelJobs"] )
        # limit maximum number of parallel jobs to 40
        # (each output file is approximately 20MB)
        maximumNumberJobs = 40
        if numberParallelJobs > maximumNumberJobs:
            msg = ("Maximum allowed number of parallel jobs "
                   +str(maximumNumberJobs)+" exceeded!!!")
            raise AllInOneError(msg)
        # if maxevents is not specified, cannot calculate number of events for
        # each parallel job, and therefore running only a single job
        if int( self.general["maxevents"] ) == -1:
            msg = ("Maximum number of events (maxevents) not specified: "
                   "cannot use parallel jobs in offline validation")
            raise AllInOneError(msg)
        if numberParallelJobs > 1:    
            if self.general["offlineModuleLevelHistsTransient"] == "True":
                msg = ("To be able to merge results when running parallel jobs,"
                       " set offlineModuleLevelHistsTransient to false.")
                raise AllInOneError(msg)
        for index in range(numberParallelJobs):
            cfgName = "%s.%s.%s_%s_cfg.py"%(configBaseName, self.name,
                                            self.alignmentToValidate.name,
                                            str(index))
            repMap = self.getRepMap()
            # in this parallel job, skip index*(maxEvents/nJobs) events from
            # the beginning
            # (first index is zero, so no skipping for a single job)
            # and use _index_ in the name of the output file
            repMap.update({"nIndex": str(index)})
            # Create the result file directly to datadir since should not use /tmp/
            # see https://cern.service-now.com/service-portal/article.do?n=KB0000484
            repMap.update({"outputFile": self.outputFiles[index]})
            repMap["outputFile"] = os.path.expandvars( repMap["outputFile"] )

            cfgs = {cfgName:replaceByMap(configTemplates.offlineParallelTemplate,
                                         repMap)}
            self.filesToCompare[GenericValidationData.defaultReferenceName] = repMap["resultFile"] 
            GenericValidationData.createConfiguration(self, cfgs, path)
Esempio n. 7
0
    def __init__(self,
                 valName,
                 alignment,
                 referenceAlignment,
                 config,
                 copyImages=True,
                 randomWorkdirPart=None):
        """
        Constructor of the GeometryComparison class.

        Arguments:
        - `valName`: String which identifies individual validation instances
        - `alignment`: `Alignment` instance to validate
        - `referenceAlignment`: `Alignment` instance which is compared
                                with `alignment`
        - `config`: `BetterConfigParser` instance which includes the
                    configuration of the validations
        - `copyImages`: Boolean which indicates whether png- and pdf-files
                        should be copied back from the batch farm
        - `randomWorkDirPart`: If this option is ommitted a random number is
                               generated to create unique path names for the
                               individual validation instances.
        """
        defaults = {
            "3DSubdetector1": "1",
            "3DSubdetector2": "2",
            "3DTranslationalScaleFactor": "50"
        }
        mandatories = ["levels", "dbOutput"]
        GenericValidation.__init__(self,
                                   valName,
                                   alignment,
                                   config,
                                   "compare",
                                   addDefaults=defaults,
                                   addMandatories=mandatories)
        if not randomWorkdirPart == None:
            self.randomWorkdirPart = randomWorkdirPart
        self.referenceAlignment = referenceAlignment
        referenceName = "IDEAL"
        if not self.referenceAlignment == "IDEAL":
            referenceName = self.referenceAlignment.name

        allCompares = config.getCompares()
        self.__compares = {}
        if valName in allCompares:
            self.__compares[valName] = allCompares[valName]
        else:
            msg = ("Could not find compare section '%s' in '%s'" %
                   (valName, allCompares))
            raise AllInOneError(msg)
        self.copyImages = copyImages
Esempio n. 8
0
def parsestyle(style):
    try: #simplest case: it's an int
        return int(style)
    except ValueError:
        pass

    try: #kStar, kDot, ...
        style = str(getattr(ROOT,style))
        return int(style)
    except (AttributeError, ValueError):
        pass

    raise AllInOneError("style has to be an integer or a ROOT constant (kDashed, kStar, ...)!")
    def __init__(self, valName, config, valType,
                 addDefaults = {}, addMandatories=[]):
        self.name = valName
        self.general = config.getGeneral()
        self.config = config
        self.filesToCompare = {}

        defaults = {"title": self.name, "jobid": "", "subdetector": "BPIX"}
        defaults.update(addDefaults)
        mandatories = ["file", "color", "style"]
        mandatories += addMandatories

        theUpdate = config.getResultingSection("preexisting"+valType+":"+self.name,
                                               defaultDict = defaults,
                                               demandPars = mandatories)
        self.general.update(theUpdate)

        self.title = self.general["title"]
        if "|" in self.title or "," in self.title or '"' in self.title:
            msg = "The characters '|', '\"', and ',' cannot be used in the alignment title!"
            raise AllInOneError(msg)

        self.jobid = self.general["jobid"]
        if self.jobid:
            try:  #make sure it's actually a valid jobid
                output = getCommandOutput2("bjobs %(jobid)s 2>&1"%self.general)
                if "is not found" in output: raise RuntimeError
            except RuntimeError:
                raise AllInOneError("%s is not a valid jobid.\nMaybe it finished already?"%self.jobid)

        self.filesToCompare[GenericValidationData.defaultReferenceName] = \
            self.general["file"]

        knownOpts = defaults.keys()+mandatories
        ignoreOpts = []
        config.checkInput("preexisting"+valType+":"+self.name,
                          knownSimpleOptions = knownOpts,
                          ignoreOptions = ignoreOpts)
        self.jobmode = None
Esempio n. 10
0
 def __updateDict(self, dictionary, section):
     result = dictionary
     try:
         for option in self.options(section):
             result[option] = self.get(section, option)
         if "local" + section.title() in self.sections():
             for option in self.options("local" + section.title()):
                 result[option] = self.get("local" + section.title(),
                                           option)
     except ConfigParser.NoSectionError, section:
         msg = ("%s in configuration files. This section is mandatory." %
                (str(section).replace(":", "", 1)))
         raise AllInOneError(msg)
Esempio n. 11
0
    def datasetSnippet( self, jsonPath = None, begin = None, end = None,
                        firstRun = None, lastRun = None, crab = False, parent = False ):
        if self.__predefined and parent:
                with open(self.__filename) as f:
                    if "secFiles.extend" not in f.read():
                        msg = ("The predefined dataset '%s' does not contain secondary files, "
                               "which your validation requires!") % self.__name
                        if self.__official:
                            self.__name = self.__origName
                            self.__predefined = False
                            print msg
                            print ("Retreiving the files from DAS.  You will be asked if you want "
                                   "to overwrite the old dataset.\n"
                                   "It will still be compatible with validations that don't need secondary files.")
                        else:
                            raise AllInOneError(msg)

        if self.__predefined:
            snippet = ("process.load(\"Alignment.OfflineValidation.%s_cff\")\n"
                       "process.maxEvents = cms.untracked.PSet(\n"
                       "    input = cms.untracked.int32(.oO[nEvents]Oo. / .oO[parallelJobs]Oo.)\n"
                       ")\n"
                       "process.source.skipEvents=cms.untracked.uint32(.oO[nIndex]Oo.*.oO[nEvents]Oo./.oO[parallelJobs]Oo.)"
                       %(self.__name))
            if not parent:
                with open(self.__filename) as f:
                    if "secFiles.extend" in f.read():
                        snippet += "\nprocess.source.secondaryFileNames = cms.untracked.vstring()"
            return snippet
        theMap = { "process": "process.",
                   "tab": " " * len( "process." ),
                   "nEvents": ".oO[nEvents]Oo. / .oO[parallelJobs]Oo.",
                   "skipEventsString": "process.source.skipEvents=cms.untracked.uint32(.oO[nIndex]Oo.*.oO[nEvents]Oo./.oO[parallelJobs]Oo.)\n",
                   "importCms": "",
                   "header": ""
                   }
        datasetSnippet = self.__createSnippet( jsonPath = jsonPath,
                                               begin = begin,
                                               end = end,
                                               firstRun = firstRun,
                                               lastRun = lastRun,
                                               repMap = theMap,
                                               crab = crab,
                                               parent = parent )
        if jsonPath == "" and begin == "" and end == "" and firstRun == "" and lastRun == "":
            try:
                self.dump_cff(parent = parent)
            except AllInOneError, e:
                print "Can't store the dataset as a cff:"
                print e
                print "This may be inconvenient in the future, but will not cause a problem for this validation."
Esempio n. 12
0
def addIndex(filename, njobs, index = None):
    if index is None:
        return [addIndex(filename, njobs, i) for i in range(njobs)]
    if njobs == 1:
        return filename

    fileExtension = None
    for extension in fileExtensions:
        if filename.endswith(extension):
            fileExtension = extension
    if fileExtension is None:
        raise AllInOneError(fileName + " does not end with any of the extensions "
                                     + str(fileExtensions))
    return replacelast(filename, fileExtension, "_" + str(index) + fileExtension)
 def __init__(self, config, addDefaults = {}, addMandatories=[], addneedpackages=[]):
     defaults = {
                 "outliercut": "-1.0",
                 "subdetector": "none",
                }
     defaults.update(addDefaults)
     mandatories = []
     mandatories += addMandatories
     needpackages = ["Alignment/CommonAlignmentProducer"]
     needpackages += addneedpackages
     BasePlottingOptions.__init__(self, config, "split", defaults, mandatories, needpackages)
     validsubdets = self.validsubdets()
     if self.general["subdetector"] not in validsubdets:
         raise AllInOneError("'%s' is not a valid subdetector!\n" % self.general["subdetector"] + "The options are: " + ", ".join(validsubdets))
Esempio n. 14
0
    def createConfiguration(self, path):
        cfgName = "%s.%s.%s_cfg.py"%( self.configBaseName, self.name,
                                      self.alignmentToValidate.name )
        repMap = self.getRepMap()
        if self.NJobs > 1 and self.general["offlineModuleLevelHistsTransient"] == "True":
            msg = ("To be able to merge results when running parallel jobs,"
                   " set offlineModuleLevelHistsTransient to false.")
            raise AllInOneError(msg)

        templateToUse = configTemplates.offlineTemplate
        if self.AutoAlternates:
            if "Cosmics" in self.general["trackcollection"]:
                Bfield = self.dataset.magneticFieldForRun()
                if Bfield > 3.3 and Bfield < 4.3:                 #Should never be 4.3, but this covers strings, which always compare bigger than ints
                    templateToUse = configTemplates.CosmicsOfflineValidation
                    print ("B field for %s = %sT.  Using the template for cosmics at 3.8T.\n"
                           "To override this behavior, specify AutoAlternates = false in the [alternateTemplates] section") % (self.dataset.name(), Bfield)
                elif Bfield < 0.5:
                    templateToUse = configTemplates.CosmicsAt0TOfflineValidation
                    print ("B field for %s = %sT.  Using the template for cosmics at 0T.\n"
                           "To override this behavior, specify AutoAlternates = false in the [alternateTemplates] section") % (self.dataset.name(), Bfield)
                else:
                    try:
                        if "unknown " in Bfield:
                            msg = Bfield.replace("unknown ","",1)
                        elif "Bfield" is "unknown":
                            msg = "Can't get the B field for %s." % self.dataset.name()
                    except TypeError:
                        msg = "B field for %s = %sT.  This is not that close to 0T or 3.8T." % (self.dataset.name(), Bfield)
                    raise AllInOneError(msg + "\n"
                                        "To use this data, turn off the automatic alternates using AutoAlternates = false\n"
                                        "in the [alternateTemplates] section, and choose the alternate template yourself.")

        cfgs = {cfgName: templateToUse}
        self.filesToCompare[
            GenericValidationData.defaultReferenceName ] = repMap["finalResultFile"]
        return GenericValidationData.createConfiguration(self, cfgs, path, repMap = repMap)
Esempio n. 15
0
 def __init__(self, datasetName, dasLimit=0):
     self.__name = datasetName
     # check, if dataset name matches CMS dataset naming scheme
     if re.match(r'/.+/.+/.+', self.__name):
         self.__dataType = self.__getDataType()
         self.__predefined = False
     else:
         fileName = self.__name + "_cff.py"
         searchPath1 = os.path.join(os.environ["CMSSW_BASE"], "python",
                                    "Alignment", "OfflineValidation",
                                    fileName)
         searchPath2 = os.path.join(os.environ["CMSSW_BASE"], "src",
                                    "Alignment", "OfflineValidation",
                                    "python", fileName)
         searchPath3 = os.path.join(os.environ["CMSSW_RELEASE_BASE"],
                                    "python", "Alignment",
                                    "OfflineValidation", fileName)
         if os.path.exists(searchPath1):
             pass
         elif os.path.exists(searchPath2):
             msg = ("The predefined dataset '%s' does exist in '%s', but "
                    "you need to run 'scram b' first." %
                    (self.__name, searchPath2))
             raise AllInOneError(msg)
         elif os.path.exists(searchPath3):
             pass
         else:
             msg = ("The predefined dataset '%s' does not exist. Please "
                    "create it first or check for typos." % (self.__name))
             raise AllInOneError(msg)
         self.__dataType = "unknown"
         self.__predefined = True
     self.__dasLimit = dasLimit
     self.__fileList = None
     self.__fileInfoList = None
     self.__runList = None
Esempio n. 16
0
    def __init__(self, valName, alignment, config):
        super(PrimaryVertexValidation, self).__init__(valName, alignment,
                                                      config)

        for name in "doBPix", "doFPix":
            self.general[name] = pythonboolstring(self.general[name], name)

        if self.general["pvvalidationreference"].startswith("/store"):
            self.general[
                "pvvalidationreference"] = "root://eoscms//eos/cms" + self.general[
                    "pvvalidationreference"]
        if self.NJobs > 1:
            raise AllInOneError(
                "Parallel jobs not implemented for the PrimaryVertex validation!\n"
                "Please set parallelJobs = 1.")
Esempio n. 17
0
 def __init__(self,
              valName,
              alignment,
              config,
              configBaseName="TkAlZMuMuValidation",
              scriptBaseName="TkAlZMuMuValidation",
              crabCfgBaseName="TkAlZMuMuValidation",
              resultBaseName="ZMuMuValidation",
              outputBaseName="ZMuMuValidation"):
     defaults = {
         "zmumureference":
         ("/store/caf/user/emiglior/Alignment/TkAlDiMuonValidation/Reference/BiasCheck_DYToMuMu_Summer12_TkAlZMuMu_IDEAL.root"
          ),
         "resonance":
         "Z",
         "switchONfit":
         "false",
         "rebinphi":
         "4",
         "rebinetadiff":
         "2",
         "rebineta":
         "2",
         "rebinpt":
         "8",
     }
     mandatories = ["etamaxneg", "etaminneg", "etamaxpos", "etaminpos"]
     self.configBaseName = configBaseName
     self.scriptBaseName = scriptBaseName
     self.crabCfgBaseName = crabCfgBaseName
     self.resultBaseName = resultBaseName
     self.outputBaseName = outputBaseName
     self.needParentFiles = False
     GenericValidationData.__init__(self,
                                    valName,
                                    alignment,
                                    config,
                                    "zmumu",
                                    addDefaults=defaults,
                                    addMandatories=mandatories)
     if self.general["zmumureference"].startswith("/store"):
         self.general[
             "zmumureference"] = "root://eoscms//eos/cms" + self.general[
                 "zmumureference"]
     if self.NJobs > 1:
         raise AllInOneError(
             "Parallel jobs not implemented for the Z->mumu validation!\n"
             "Please set parallelJobs = 1.")
Esempio n. 18
0
    def __init__(self,
                 valName,
                 alignment,
                 config,
                 addDefaults={},
                 addMandatories=[],
                 configBaseName="TkAlOfflineValidation",
                 scriptBaseName="TkAlOfflineValidation",
                 crabCfgBaseName="TkAlOfflineValidation",
                 resultBaseName="AlignmentValidation",
                 outputBaseName="AlignmentValidation"):
        defaults = {
            "offlineModuleLevelHistsTransient": "False",
            "offlineModuleLevelProfiles": "True",
            "stripYResiduals": "False",
        }
        deprecateddefaults = {
            "DMRMethod": "",
            "DMRMinimum": "",
            "DMROptions": "",
            "OfflineTreeBaseDir": "",
            "SurfaceShapes": "",
        }

        mandatories = ["trackcollection"]
        defaults.update(deprecateddefaults)
        defaults.update(addDefaults)
        mandatories += addMandatories
        self.configBaseName = configBaseName
        self.scriptBaseName = scriptBaseName
        self.crabCfgBaseName = crabCfgBaseName
        self.resultBaseName = resultBaseName
        self.outputBaseName = outputBaseName
        self.needParentFiles = False
        GenericValidationData.__init__(self,
                                       valName,
                                       alignment,
                                       config,
                                       "offline",
                                       addDefaults=defaults,
                                       addMandatories=mandatories)

        for option in deprecateddefaults:
            if self.general[option]:
                raise AllInOneError(
                    "The '%s' option has been moved to the [plots:offline] section.  Please specify it there."
                    % option)
            del self.general[option]
Esempio n. 19
0
    def __init__(self,
                 valName,
                 alignment,
                 config,
                 configBaseName="TkAlPrimaryVertexValidation",
                 scriptBaseName="TkAlPrimaryVertexValidation",
                 crabCfgBaseName="TkAlPrimaryVertexValidation",
                 resultBaseName="PrimaryVertexValidation",
                 outputBaseName="PrimaryVertexValidation"):
        defaults = {
            "pvvalidationreference":
            ("/store/caf/user/musich/Alignment/TkAlPrimaryVertexValidation/Reference/PrimaryVertexValidation_test_pvvalidation_mc_design_mc_48bins.root"
             ),
            "ttrhbuilder":
            "WithAngleAndTemplate",
            "doBPix":
            "True",
            "doFPix":
            "True"
        }

        mandatories = [
            "isda", "ismc", "runboundary", "trackcollection",
            "vertexcollection", "lumilist", "ptCut", "etaCut", "runControl",
            "numberOfBins"
        ]
        self.configBaseName = configBaseName
        self.scriptBaseName = scriptBaseName
        self.crabCfgBaseName = crabCfgBaseName
        self.resultBaseName = resultBaseName
        self.outputBaseName = outputBaseName
        self.needParentFiles = False
        GenericValidationData.__init__(self,
                                       valName,
                                       alignment,
                                       config,
                                       "primaryvertex",
                                       addDefaults=defaults,
                                       addMandatories=mandatories)

        if self.general["pvvalidationreference"].startswith("/store"):
            self.general[
                "pvvalidationreference"] = "root://eoscms//eos/cms" + self.general[
                    "pvvalidationreference"]
        if self.NJobs > 1:
            raise AllInOneError(
                "Parallel jobs not implemented for the PrimaryVertex validation!\n"
                "Please set parallelJobs = 1.")
Esempio n. 20
0
 def cosmics0T(self):
     if "Cosmics" not in self.general["trackcollection"]: return False
     Bfield = self.dataset.magneticFieldForRun()
     if Bfield < 0.5: return True
     if isinstance(Bfield, str):
         if "unknown " in Bfield:
             msg = Bfield.replace("unknown ", "", 1)
         elif Bfield == "unknown":
             msg = "Can't get the B field for %s." % self.dataset.name()
         else:
             msg = "B field = {}???".format(Bfield)
         raise AllInOneError(
             msg + "\n"
             "To use this dataset, specify magneticfield = [value] in your .ini config file."
         )
     return False
 def __init__(self, valName, config,
              addDefaults = {}, addMandatories=[]):
     defaults = {}
     deprecateddefaults = {
         "DMRMethod":"",
         "DMRMinimum":"",
         "DMROptions":"",
         "OfflineTreeBaseDir":"",
         "SurfaceShapes":""
         }
     defaults.update(deprecateddefaults)
     defaults.update(addDefaults)
     PreexistingValidation.__init__(self, valName, config, "offline",
                                    defaults, addMandatories)
     for option in deprecateddefaults:
         if self.general[option]:
             raise AllInOneError("The '%s' option has been moved to the [plots:offline] section.  Please specify it there."%option)
Esempio n. 22
0
    def __init__(self,
                 valName,
                 alignment,
                 config,
                 configBaseName="TkAlOfflineValidationDQM"):
        OfflineValidation.__init__(self,
                                   valName,
                                   alignment,
                                   config,
                                   configBaseName=configBaseName)
        if not config.has_section("DQM"):
            msg = "You need to have a DQM section in your configfile!"
            raise AllInOneError(msg)

        self.__PrimaryDataset = config.get("DQM", "primaryDataset")
        self.__firstRun = int(config.get("DQM", "firstRun"))
        self.__lastRun = int(config.get("DQM", "lastRun"))
 def getAlignments(self):
     alignments = []
     for section in self.sections():
         if "alignment:" in section:
             alignments.append(
                 Alignment(section.split("alignment:")[1], self))
     names_after_cleaning = [alignment.name for alignment in alignments]
     duplicates = [
         name for name, count in collections.Counter(
             names_after_cleaning).items() if count > 1
     ]
     if len(duplicates) > 0:
         msg = "Duplicate alignment names after removing invalid characters: "
         msg += ", ".join(duplicates) + "\n"
         msg += "Please rename the alignments to avoid name clashes."
         raise AllInOneError(msg)
     return alignments
Esempio n. 24
0
    def getRepMap(self, alignment = None):
        repMap = OfflineValidation.getRepMap(self, alignment)
        repMap.update({
                "workdir": os.path.expandvars(repMap["workdir"]),
		"offlineValidationMode": "Dqm",
                "offlineValidationFileOutput": configTemplates.offlineDqmFileOutputTemplate,
                "workflow": ("/%s/TkAl%s-.oO[alignmentName]Oo._R%09i_R%09i_"
                             "ValSkim-v1/ALCARECO"
                             %(self.__PrimaryDataset,
                               datetime.datetime.now().strftime("%y"),
                               self.__firstRun, self.__lastRun)),
                "firstRunNumber": "%i"% self.__firstRun
                })
        if "__" in repMap["workflow"]:
            msg = ("the DQM workflow specefication must not contain '__'. "
                   "it is: %s"%repMap["workflow"])
            raise AllInOneError(msg)
        return repMap
Esempio n. 25
0
    def __init__(self,
                 valName,
                 alignment,
                 referenceAlignment,
                 config,
                 copyImages=True,
                 randomWorkdirPart=None):
        """
        Constructor of the GeometryComparison class.

        Arguments:
        - `valName`: String which identifies individual validation instances
        - `alignment`: `Alignment` instance to validate
        - `referenceAlignment`: `Alignment` instance which is compared
                                with `alignment`
        - `config`: `BetterConfigParser` instance which includes the
                    configuration of the validations
        - `copyImages`: Boolean which indicates whether png- and pdf-files 
                        should be copied back from the batch farm
        - `randomWorkDirPart`: If this option is ommitted a random number is
                               generated to create unique path names for the
                               individual validation instances.
        """
        GenericValidation.__init__(self, valName, alignment, config)
        if not randomWorkdirPart == None:
            self.randomWorkdirPart = randomWorkdirPart
        self.referenceAlignment = referenceAlignment
        try:  # try to override 'jobmode' from [general] section
            self.jobmode = config.get("compare:" + self.name, "jobmode")
        except ConfigParser.NoOptionError:
            pass
        referenceName = "IDEAL"
        if not self.referenceAlignment == "IDEAL":
            referenceName = self.referenceAlignment.name

        allCompares = config.getCompares()
        self.__compares = {}
        if valName in allCompares:
            self.__compares[valName] = allCompares[valName]
        else:
            msg = ("Could not find compare section '%s' in '%s'" %
                   (valName, allCompares))
            raise AllInOneError(msg)
        self.copyImages = copyImages
Esempio n. 26
0
def replaceByMap(target, the_map):
    """This function replaces `.oO[key]Oo.` by `the_map[key]` in target.

    Arguments:
    - `target`: String which contains symbolic tags of the form `.oO[key]Oo.`
    - `the_map`: Dictionary which has to contain the `key`s in `target` as keys
    """

    result = target
    for key in the_map:
        lifeSaver = 10e3
        iteration = 0
        while ".oO[" in result and "]Oo." in result:
            for key in the_map:
                try:
                    result = result.replace(".oO[" + key + "]Oo.",
                                            the_map[key])
                except TypeError:  #try a dict
                    try:
                        for keykey, value in six.iteritems(the_map[key]):
                            result = result.replace(
                                ".oO[" + key + "['" + keykey + "']]Oo.", value)
                            result = result.replace(
                                ".oO[" + key + '["' + keykey + '"]]Oo.', value)
                    except AttributeError:  #try a list
                        try:
                            for index, value in enumerate(the_map[key]):
                                result = result.replace(
                                    ".oO[" + key + "[" + str(index) + "]]Oo.",
                                    value)
                        except TypeError:
                            raise TypeError(
                                "Something is wrong in replaceByMap!  Need a string, dict, or list, but the_map(%s)=%s!"
                                % (repr(key), repr(the_map[key])))
                iteration += 1
            if iteration > lifeSaver:
                problematicLines = ""
                for line in result.splitlines():
                    if ".oO[" in result and "]Oo." in line:
                        problematicLines += "%s\n" % line
                msg = ("Oh Dear, there seems to be an endless loop in "
                       "replaceByMap!!\n%s\n%s" % (problematicLines, the_map))
                raise AllInOneError(msg)
    return result
Esempio n. 27
0
 def getCompareStrings(self, requestId=None):
     result = {}
     repMap = self.alignmentToValidate.getRepMap()
     for validationId in self.filesToCompare:
         repMap["file"] = self.filesToCompare[validationId]
         if repMap["file"].startswith("/castor/"):
             repMap["file"] = "rfio:%(file)s" % repMap
         result[
             validationId] = "%(file)s=%(name)s|%(color)s|%(style)s" % repMap
     if requestId == None:
         return result
     else:
         if not "." in requestId:
             requestId += ".%s" % GenericValidation.defaultReferenceName
         if not requestId.split(".")[-1] in result:
             msg = ("could not find %s in reference Objects!" %
                    requestId.split(".")[-1])
             raise AllInOneError(msg)
         return result[requestId.split(".")[-1]]
Esempio n. 28
0
 def __init__(self, name, config, runGeomComp="1"):
     self.condShorts = {
         "TrackerAlignmentErrorExtendedRcd": {
             "zeroAPE": {
                 "connectString": ("frontier://FrontierProd"
                                   "/CMS_CONDITIONS"),
                 "tagName": "TrackerIdealGeometryErrorsExtended210_mc",
                 "labelName": ""
             }
         },
         "TrackerSurfaceDeformationRcd": {
             "zeroDeformations": {
                 "connectString": ("frontier://FrontierProd"
                                   "/CMS_CONDITIONS"),
                 "tagName": "TrackerSurfaceDeformations_zero",
                 "labelName": ""
             }
         },
     }
     section = "alignment:%s" % name
     if not config.has_section(section):
         raise AllInOneError, ("section %s not found. Please define the "
                               "alignment!" % section)
     config.checkInput(
         section,
         knownSimpleOptions=['globaltag', 'style', 'color', 'title'],
         knownKeywords=['condition'])
     self.name = name
     if config.exists(section, "title"):
         self.title = config.get(section, "title")
     else:
         self.title = self.name
     if (int(runGeomComp) != 1):
         self.name += "_run" + runGeomComp
         self.title += " run " + runGeomComp
     if "|" in self.title or "," in self.title or '"' in self.title:
         msg = "The characters '|', '\"', and ',' cannot be used in the alignment title!"
         raise AllInOneError(msg)
     self.runGeomComp = runGeomComp
     self.globaltag = config.get(section, "globaltag")
     self.conditions = self.__getConditions(config, section)
     self.color = config.get(section, "color")
     self.style = config.get(section, "style")
Esempio n. 29
0
    def __fileListSnippet(self,
                          crab=False,
                          parent=False,
                          firstRun=None,
                          lastRun=None,
                          forcerunselection=False):
        if crab:
            files = ""
        else:
            splitFileList = list(
                self.__chunks(
                    self.fileList(firstRun=firstRun,
                                  lastRun=lastRun,
                                  forcerunselection=forcerunselection), 255))
            if not splitFileList:
                raise AllInOneError(
                    "No files found for dataset {}.  Check the spelling, or maybe specify another das instance?"
                    .format(self.__name))
            fileStr = ["',\n'".join(files) for files in splitFileList]
            fileStr = [ "readFiles.extend( [\n'" + files + "'\n] )" \
                        for files in fileStr ]
            files = "\n".join(fileStr)

            if parent:
                splitParentFileList = list(
                    self.__chunks(
                        self.fileList(parent=True,
                                      firstRun=firstRun,
                                      lastRun=lastRun,
                                      forcerunselection=forcerunselection),
                        255))
                parentFileStr = [
                    "',\n'".join(parentFiles)
                    for parentFiles in splitParentFileList
                ]
                parentFileStr = [ "secFiles.extend( [\n'" + parentFiles + "'\n] )" \
                            for parentFiles in parentFileStr ]
                parentFiles = "\n".join(parentFileStr)
                files += "\n\n" + parentFiles

        return files
Esempio n. 30
0
 def __init__(self,
              valName,
              alignment,
              config,
              configBaseName="TkAlMcValidate",
              scriptBaseName="TkAlMcValidate",
              crabCfgBaseName="TkAlMcValidate",
              resultBaseName="McValidation",
              outputBaseName="McValidation"):
     self.configBaseName = configBaseName
     self.scriptBaseName = scriptBaseName
     self.crabCfgBaseName = crabCfgBaseName
     self.resultBaseName = resultBaseName
     self.outputBaseName = outputBaseName
     self.needParentFiles = True
     GenericValidationData.__init__(self, valName, alignment, config,
                                    "mcValidate")
     if self.NJobs > 1:
         raise AllInOneError(
             "Parallel jobs not implemented for the MC validation!\n"
             "Please set parallelJobs = 1.")