Пример #1
0
    def parse(self, config, path):
        logger = logging.getLogger("mpsvalidate")

        # extract process object from aligment_merge.py file
        try:
            process = mps_tools.get_process_object(path)
        except ImportError:
            logger.error("AdditionalData: {0} does not exist".format(path))
            return

        # find alignable selectors
        param_builder = process.AlignmentProducer.ParameterBuilder
        for index,sel in enumerate(param_builder.parameterTypes):
            selector_name = sel.split(",")[0].strip()
            self.selectors[index] = {
                "name": selector_name,
                "selector": getattr(param_builder, selector_name),
                }

        # find IOV definition
        if len(process.AlignmentProducer.RunRangeSelection) > 0:
            self.iov_definition = \
                process.AlignmentProducer.RunRangeSelection.dumpPython()

        # find pede steerer configuration
        pede_steerer = process.AlignmentProducer.algoConfig.pedeSteerer
        self.pede_steerer_method  = pede_steerer.method.value()
        self.pede_steerer_options = pede_steerer.options.value()
        self.pede_steerer_command = pede_steerer.pedeCommand.value()
Пример #2
0
    def parse(self, config, path):
        logger = logging.getLogger("mpsvalidate")

        # extract process object from aligment_merge.py file
        try:
            process = mps_tools.get_process_object(path)
        except ImportError:
            logger.error("AdditionalData: {0} does not exist".format(path))
            return

        # find alignable selectors
        param_builder = process.AlignmentProducer.ParameterBuilder
        for index, sel in enumerate(param_builder.parameterTypes):
            selector_name = sel.split(",")[0].strip()
            self.selectors[index] = {
                "name": selector_name,
                "selector": getattr(param_builder, selector_name),
            }

        # find IOV definition
        if len(process.AlignmentProducer.RunRangeSelection) > 0:
            self.iov_definition = \
                process.AlignmentProducer.RunRangeSelection.dumpPython()

        # find pede steerer configuration
        pede_steerer = process.AlignmentProducer.algoConfig.pedeSteerer
        self.pede_steerer_method = pede_steerer.method.value()
        self.pede_steerer_options = pede_steerer.options.value()
        self.pede_steerer_command = pede_steerer.pedeCommand.value()
Пример #3
0
def get_used_binaries(cfg, no_binary_check):
    """Returns list of used binary IDs.
    
    Arguments:
    - `cfg`: python config used to run the pede job
    - `no_binary_check`: if 'True' a check for file existence is skipped
    """

    cms_process = mps_tools.get_process_object(cfg)

    binaries = cms_process.AlignmentProducer.algoConfig.mergeBinaryFiles
    if no_binary_check:
        used_binaries = binaries
    else:
        # following check works only if 'args.cfg' was run from the same directory:
        used_binaries = [
            b for b in binaries
            if os.path.exists(os.path.join(os.path.dirname(cfg), b))
        ]

    used_binaries = [
        int(re.sub(r"milleBinary(\d+)\.dat", r"\1", b)) for b in used_binaries
    ]

    return used_binaries
Пример #4
0
def get_used_binaries(cfg, no_binary_check):
    """Returns list of used binary IDs.
    
    Arguments:
    - `cfg`: python config used to run the pede job
    - `no_binary_check`: if 'True' a check for file existence is skipped
    """

    cms_process = mps_tools.get_process_object(cfg)

    binaries = cms_process.AlignmentProducer.algoConfig.mergeBinaryFiles
    if no_binary_check:
        used_binaries = binaries
    else:
        # following check works only if 'args.cfg' was run from the same directory:
        used_binaries = [b for b in binaries
                         if os.path.exists(os.path.join(os.path.dirname(cfg), b))]

    used_binaries = [int(re.sub(r"milleBinary(\d+)\.dat", r"\1", b))
                     for b in used_binaries]

    return used_binaries
Пример #5
0
    except IOError:
        print "The config-template '" + configTemplate + "' cannot be found."
        sys.exit(1)

    tmpFile = re.sub('setupGlobaltag\s*\=\s*[\"\'](.*?)[\"\']',
                     'setupGlobaltag = \"' + globalTag + '\"', tmpFile)
    tmpFile = re.sub('setupCollection\s*\=\s*[\"\'](.*?)[\"\']',
                     'setupCollection = \"' + collection + '\"', tmpFile)
    tmpFile = re.sub(re.compile("setupRunStartGeometry\s*\=\s*.*$", re.M),
                     "setupRunStartGeometry = " + first_run, tmpFile)

    thisCfgTemplate = "tmp.py"
    with open(thisCfgTemplate, "w") as f:
        f.write(tmpFile)

    cms_process = mps_tools.get_process_object(thisCfgTemplate)

    overrideGT = create_input_db(cms_process, first_run)
    with open(thisCfgTemplate, "a") as f:
        f.write(overrideGT)

    for setting in pedesettings:
        print
        print "=" * 60
        if setting is None:
            print "Creating pede job."
        else:
            print "Creating pede jobs using settings from '{0}'.".format(
                setting)
        for weight_conf in weight_confs:
            print "-" * 60
Пример #6
0
def write_HTCondor_submit_file_pede(path, script, config, lib):
    """Writes 'job.submit' file in `path`.

    Arguments:
    - `path`: job directory
    - `script`: script to be executed
    - `config`: cfg file
    - `lib`: MPS lib object
    """

    resources = lib.get_class("pede").split("_")[1:]  # strip off 'htcondor'
    job_flavour = resources[-1]

    job_submit_template = """\
universe              = vanilla
executable            = {script:s}
output                = {jobm:s}/STDOUT
error                 = {jobm:s}/STDOUT
log                   = {jobm:s}/HTCJOB
notification          = Always
transfer_output_files = ""
request_memory        = {pedeMem:d}M

# adapted to space used on eos for binaries:
request_disk          = {disk:d}

# adapted to threads parameter in pede options and number of available cores
request_cpus          = {cpus:d}

+JobFlavour           = "{flavour:s}"
"""
    if "bigmem" in resources:
        job_submit_template += """\
+BigMemJob            = True
+AccountingGroup      = "group_u_CMS.e_cms_caf_bigmem"

# automatically remove the job if the submitter has no permissions to run a BigMemJob
periodic_remove       = !regexp("group_u_CMS.e_cms_caf_bigmem", AccountingGroup) && BigMemJob =?= True
"""
    job_submit_template += "\nqueue\n"

    print("Determine number of pede threads...")
    cms_process = mps_tools.get_process_object(os.path.join(Path, mergeCfg))
    pede_options = cms_process.AlignmentProducer.algoConfig.pedeSteerer.options.value(
    )
    n_threads = 1
    for option in pede_options:
        if "threads" in option:
            n_threads = option.replace("threads", "").strip()
            n_threads = max(map(lambda x: int(x), n_threads.split()))
            break
    if n_threads > 16: n_threads = 16  # HTCondor machines have (currently) 16
    # cores, i.e. we ensure here that the job
    # would fit core-wise on one machine

    print("Determine required disk space on remote host...")
    # determine usage by each file instead of whole directory as this is what
    # matters for the specified disk usage:
    spco = subprocess.check_output  # to make code below more less verbose
    opj = os.path.join  # dito
    cmd = ["du", "--apparent-size"]
    disk_usage = [
        int(item.split()[0])
        for directory in ("binaries", "monitors", "tree_files") for item in
        spco(cmd + glob.glob(opj(lib.mssDir, directory, "*"))).splitlines()
    ]
    disk_usage = sum(disk_usage)
    disk_usage *= 1.1  # reserve 10% additional space

    job_submit_file = os.path.join(Path, "job.submit")
    with open(job_submit_file, "w") as f:
        f.write(
            job_submit_template.format(script=os.path.abspath(script),
                                       jobm=os.path.abspath(path),
                                       pedeMem=lib.pedeMem,
                                       disk=int(disk_usage),
                                       cpus=n_threads,
                                       flavour=job_flavour))

    return job_submit_file
Пример #7
0
        print("Bad merge script file name", args.merge_script)
        sys.exit(1)

if args.mss_dir.strip() != "":
    if ":" in args.mss_dir:
        lib.mssDirPool = args.mss_dir.split(":")
        lib.mssDirPool, args.mss_dir = lib.mssDirPool[0], ":".join(
            lib.mssDirPool[1:])
        lib.mssDir = args.mss_dir

pedeMemMin = 1024  # Minimum memory allocated for pede: 1024MB=1GB

# Try to guess the memory requirements from the pede executable name.
# 2.5GB is used as default otherwise.
# AP - 23.03.2010
cms_process = mps_tools.get_process_object(args.config_template)
pedeMemDef = cms_process.AlignmentProducer.algoConfig.pedeSteerer.pedeCommand.value(
)
pedeMemDef = os.path.basename(
    pedeMemDef
)  # This is the pede executable (only the file name, eg "pede_4GB").
pedeMemDef = pedeMemDef.split("_")[-1]
pedeMemDef = pedeMemDef.replace("GB", "")
try:
    pedeMemDef = 1024 * float(pedeMemDef)
    if pedeMemDef < pedeMemMin:
        pedeMemDef = pedeMemMin  # pedeMemDef must be >= pedeMemMin.
except ValueError:
    pedeMemDef = int(1024 * 2.5)

# Allocate memory for the pede job.
Пример #8
0
        sys.exit(1)

    tmpFile = re.sub('setupGlobaltag\s*\=\s*[\"\'](.*?)[\"\']',
                     'setupGlobaltag = \"'+globalTag+'\"',
                     tmpFile)
    tmpFile = re.sub('setupCollection\s*\=\s*[\"\'](.*?)[\"\']',
                     'setupCollection = \"'+collection+'\"',
                     tmpFile)
    tmpFile = re.sub(re.compile("setupRunStartGeometry\s*\=\s*.*$", re.M),
                     "setupRunStartGeometry = "+first_run,
                     tmpFile)

    thisCfgTemplate = "tmp.py"
    with open(thisCfgTemplate, "w") as f: f.write(tmpFile)

    cms_process = mps_tools.get_process_object(thisCfgTemplate)

    overrideGT = create_input_db(cms_process, first_run)
    with open(thisCfgTemplate, "a") as f: f.write(overrideGT)

    for setting in pedesettings:
        print
        print "="*60
        if setting is None:
            print "Creating pede job."
        else:
            print "Creating pede jobs using settings from '{0}'.".format(setting)
        for weight_conf in weight_confs:
            print "-"*60
            # blank weights
            handle_process_call(["mps_weight.pl", "-c"])
Пример #9
0
def write_HTCondor_submit_file(path, script, config, lib):
    """Writes 'job.submit' file in `path`.

    Arguments:
    - `path`: job directory
    - `script`: script to be executed
    - `config`: cfg file
    - `lib`: MPS lib object
    """

    resources = lib.get_class("pede").split("_")[1:] # strip off 'htcondor'
    job_flavour = resources[-1]

    job_submit_template="""\
universe              = vanilla
executable            = {script:s}
output                = {jobm:s}/STDOUT
error                 = {jobm:s}/STDOUT
log                   = {jobm:s}/HTCJOB
notification          = Always
transfer_output_files = ""
request_memory        = {pedeMem:d}M

# adapted to space used on eos for binaries:
request_disk          = {disk:d}

# adapted to threads parameter in pede options and number of available cores
request_cpus          = {cpus:d}

+JobFlavour           = "{flavour:s}"
"""
    if "bigmem" in resources:
        job_submit_template += """\
+BigMemJob            = True
+AccountingGroup      = "group_u_CMS.e_cms_caf_bigmem"

# automatically remove the job if the submitter has no permissions to run a BigMemJob
periodic_remove       = !regexp("group_u_CMS.e_cms_caf_bigmem", AccountingGroup) && BigMemJob =?= True
"""
    job_submit_template += "\nqueue\n"

    print("Determine number of pede threads...")
    cms_process = mps_tools.get_process_object(os.path.join(Path, mergeCfg))
    pede_options = cms_process.AlignmentProducer.algoConfig.pedeSteerer.options.value()
    n_threads = 1
    for option in pede_options:
        if "threads" in option:
            n_threads = option.replace("threads", "").strip()
            n_threads = max(map(lambda x: int(x), n_threads.split()))
            break
    if n_threads > 16: n_threads = 16 # HTCondor machines have (currently) 16
                                      # cores, i.e. we ensure here that the job
                                      # would fit core-wise on one machine

    print("Determine required disk space on remote host...")
    # determine usage by each file instead of whole directory as this is what
    # matters for the specified disk usage:
    spco = subprocess.check_output # to make code below more less verbose
    opj = os.path.join             # dito
    cmd = ["du", "--apparent-size"]
    disk_usage = [int(item.split()[0])
                  for directory in ("binaries", "monitors", "tree_files")
                  for item
                  in spco(cmd+
                          glob.glob(opj(lib.mssDir, directory, "*"))).splitlines()]
    disk_usage = sum(disk_usage)
    disk_usage *= 1.1 # reserve 10% additional space

    job_submit_file = os.path.join(Path, "job.submit")
    with open(job_submit_file, "w") as f:
        f.write(job_submit_template.format(script = os.path.abspath(script),
                                           jobm = os.path.abspath(path),
                                           pedeMem = lib.pedeMem,
                                           disk = int(disk_usage),
                                           cpus = n_threads,
                                           flavour = job_flavour))

    return job_submit_file
Пример #10
0
    def _create_pede_jobs(self):
        """Create pede jobs from the given input."""

        for setting in self._pede_settings:
            print()
            print("="*75)
            if setting is None:
                print("Creating pede job{}.".format(
                    "s" if len(self._pede_settings)*len(self._weight_configs) > 1 else ""))
                print("-"*75)
            else:
                print("Creating pede jobs using settings from '{0}'.".format(setting))
            for weight_conf in self._weight_configs:
                # blank weights
                self._handle_process_call(["mps_weight.pl", "-c"])

                thisCfgTemplate = "tmp.py"
                with open(thisCfgTemplate, "w") as f: f.write(self._config_template)
                if self._override_gt is None:
                    self._cms_process = mps_tools.get_process_object(thisCfgTemplate)
                    self._create_input_db()
                with open(thisCfgTemplate, "a") as f: f.write(self._override_gt)

                for name,weight in weight_conf:
                    self._handle_process_call(["mps_weight.pl", "-N", name, weight], True)

                if not self._first_pede_config:
                    # create new mergejob
                    self._handle_process_call(["mps_setupm.pl"], self._args.verbose)

                # read mps.db to find directory of new mergejob
                lib = mpslib.jobdatabase()
                lib.read_db()

                # short cut for jobm path
                jobm_path = os.path.join("jobData", lib.JOBDIR[-1])

                # delete old merge-config
                command = ["rm", "-f", os.path.join(jobm_path, "alignment_merge.py")]
                self._handle_process_call(command, self._args.verbose)

                # create new merge-config
                command = [
                    "mps_merge.py",
                    "-w", thisCfgTemplate,
                    os.path.join(jobm_path, "alignment_merge.py"),
                    jobm_path,
                    str(lib.nJobs),
                ]
                if setting is not None: command.extend(["-a", setting])
                print("-"*75)
                print(" ".join(command))
                self._handle_process_call(command, self._args.verbose)
                self._create_tracker_tree()
                if self._first_pede_config:
                    os.symlink(self._tracker_tree_path,
                               os.path.abspath(os.path.join(jobm_path,
                                                            ".TrackerTree.root")))
                    self._first_pede_config = False

                # store weights configuration
                with open(os.path.join(jobm_path, ".weights.pkl"), "wb") as f:
                    cPickle.dump(weight_conf, f, 2)
                print("="*75)

        # remove temporary file
        self._handle_process_call(["rm", thisCfgTemplate])
Пример #11
0
    def _create_mille_jobs(self):
        """Create the mille jobs based on the [dataset:<name>] sections."""

        gt_regex = re.compile('setupGlobaltag\s*\=\s*[\"\'](.*?)[\"\']')
        sg_regex = re.compile("setupRunStartGeometry\s*\=\s*.*$", re.M)
        collection_regex = re.compile('setupCollection\s*\=\s*[\"\'](.*?)[\"\']')
        czt_regex = re.compile('setupCosmicsZeroTesla\s*\=\s*.*$', re.M)
        cdm_regex = re.compile('setupCosmicsDecoMode\s*\=\s*.*$', re.M)
        pw_regex = re.compile('setupPrimaryWidth\s*\=\s*.*$', re.M)
        json_regex = re.compile('setupJson\s*\=\s*.*$', re.M)

        first_dataset = True
        for name, dataset in six.iteritems(self._datasets):
            print("="*75)
            # Build config from template/Fill in variables
            try:
                with open(dataset["configTemplate"],"r") as f:
                    tmpFile = f.read()
            except IOError:
                print("The config-template called", end=' ')
                print(dataset["configTemplate"], "cannot be found.")
                sys.exit(1)

            tmpFile = re.sub(gt_regex,
                             'setupGlobaltag = \"'+dataset["globaltag"]+'\"',
                             tmpFile)
            tmpFile = re.sub(sg_regex,
                             "setupRunStartGeometry = "+
                             self._general_options["FirstRunForStartGeometry"], tmpFile)
            tmpFile = re.sub(collection_regex,
                             'setupCollection = \"'+dataset["collection"]+'\"',
                             tmpFile)
            if "ALCARECOTkAlCosmics" in dataset["collection"]:
                if dataset['cosmicsZeroTesla']:
                    tmpFile = re.sub(czt_regex,
                                     'setupCosmicsZeroTesla = True',
                                     tmpFile)
                else :
                    tmpFile = re.sub(czt_regex,
                                    'setupCosmicsZeroTesla = False',
                                     tmpFile)

                if dataset['cosmicsDecoMode']:
                     tmpFile = re.sub(cdm_regex,
                                     'setupCosmicsDecoMode = True',
                                     tmpFile)
                else:
                    tmpFile = re.sub(cdm_regex,
                                     'setupCosmicsDecoMode = False',
                                     tmpFile)

            if dataset['primaryWidth'] > 0.0:
                tmpFile = re.sub(pw_regex,
                                 'setupPrimaryWidth = '+str(dataset["primaryWidth"]),
                                 tmpFile)
            if dataset['json'] != '':
                tmpFile = re.sub(json_regex,
                                 'setupJson = \"'+dataset["json"]+'\"',
                                 tmpFile)

            thisCfgTemplate = "tmp.py"
            with open(thisCfgTemplate, "w") as f:
                f.write(tmpFile)


            # Set mps_setup append option for datasets following the first one
            append = "-a"
            if first_dataset:
                append = ""
                first_dataset = False
                self._config_template = tmpFile
                self._cms_process = mps_tools.get_process_object(thisCfgTemplate)
                self._create_input_db()

            with open(thisCfgTemplate, "a") as f: f.write(self._override_gt)


            # create mps_setup command
            command = ["mps_setup.py",
                       "-m",
                       append,
                       "-M", self._general_options["pedeMem"],
                       "-N", name,
                       self._mille_script,
                       thisCfgTemplate,
                       dataset["inputFileList"],
                       str(dataset["njobs"]),
                       self._general_options["classInf"],
                       self._general_options["jobname"],
                       self._pede_script,
                       "cmscafuser:"******"numberOfEvents"] > 0:
                command.extend(["--max-events", str(dataset["numberOfEvents"])])
            command = [x for x in command if len(x.strip()) > 0]

            # Some output:
            print("Creating jobs for dataset:", name)
            print("-"*75)
            print("Baseconfig:        ", dataset["configTemplate"])
            print("Collection:        ", dataset["collection"])
            if "ALCARECOTkAlCosmics" in dataset["collection"]:
                print("cosmicsDecoMode:   ", dataset["cosmicsDecoMode"])
                print("cosmicsZeroTesla:  ", dataset["cosmicsZeroTesla"])
            print("Globaltag:         ", dataset["globaltag"])
            print("Number of jobs:    ", dataset["njobs"])
            print("Inputfilelist:     ", dataset["inputFileList"])
            if dataset["json"] != "":
                print("Jsonfile:          ", dataset["json"])
            if self._args.verbose:
                print("Pass to mps_setup: ", " ".join(command))

            # call the command and toggle verbose output
            self._handle_process_call(command, self._args.verbose)

            # remove temporary file
            self._handle_process_call(["rm", thisCfgTemplate])
Пример #12
0
    def _create_pede_jobs(self):
        """Create pede jobs from the given input."""

        for setting in self._pede_settings:
            print
            print "="*75
            if setting is None:
                print "Creating pede job{}.".format(
                    "s" if len(self._pede_settings)*len(self._weight_configs) > 1 else "")
                print "-"*75
            else:
                print "Creating pede jobs using settings from '{0}'.".format(setting)
            for weight_conf in self._weight_configs:
                # blank weights
                self._handle_process_call(["mps_weight.pl", "-c"])

                thisCfgTemplate = "tmp.py"
                with open(thisCfgTemplate, "w") as f: f.write(self._config_template)
                if self._override_gt is None:
                    self._cms_process = mps_tools.get_process_object(thisCfgTemplate)
                    self._create_input_db()
                with open(thisCfgTemplate, "a") as f: f.write(self._override_gt)

                for name,weight in weight_conf:
                    self._handle_process_call(["mps_weight.pl", "-N", name, weight], True)

                if not self._first_pede_config:
                    # create new mergejob
                    self._handle_process_call(["mps_setupm.pl"], self._args.verbose)

                # read mps.db to find directory of new mergejob
                lib = mpslib.jobdatabase()
                lib.read_db()

                # short cut for jobm path
                jobm_path = os.path.join("jobData", lib.JOBDIR[-1])

                # delete old merge-config
                command = ["rm", "-f", os.path.join(jobm_path, "alignment_merge.py")]
                self._handle_process_call(command, self._args.verbose)

                # create new merge-config
                command = [
                    "mps_merge.py",
                    "-w", thisCfgTemplate,
                    os.path.join(jobm_path, "alignment_merge.py"),
                    jobm_path,
                    str(lib.nJobs),
                ]
                if setting is not None: command.extend(["-a", setting])
                print "-"*75
                print " ".join(command)
                self._handle_process_call(command, self._args.verbose)
                self._create_tracker_tree()
                if self._first_pede_config:
                    os.symlink(self._tracker_tree_path,
                               os.path.abspath(os.path.join(jobm_path,
                                                            ".TrackerTree.root")))
                    self._first_pede_config = False

                # store weights configuration
                with open(os.path.join(jobm_path, ".weights.pkl"), "wb") as f:
                    cPickle.dump(weight_conf, f, 2)
                print "="*75

        # remove temporary file
        self._handle_process_call(["rm", thisCfgTemplate])
Пример #13
0
    def _create_mille_jobs(self):
        """Create the mille jobs based on the [dataset:<name>] sections."""

        gt_regex = re.compile('setupGlobaltag\s*\=\s*[\"\'](.*?)[\"\']')
        sg_regex = re.compile("setupRunStartGeometry\s*\=\s*.*$", re.M)
        collection_regex = re.compile('setupCollection\s*\=\s*[\"\'](.*?)[\"\']')
        czt_regex = re.compile('setupCosmicsZeroTesla\s*\=\s*.*$', re.M)
        cdm_regex = re.compile('setupCosmicsDecoMode\s*\=\s*.*$', re.M)
        pw_regex = re.compile('setupPrimaryWidth\s*\=\s*.*$', re.M)
        json_regex = re.compile('setupJson\s*\=\s*.*$', re.M)

        first_dataset = True
        for name, dataset in self._datasets.iteritems():
            print "="*75
            # Build config from template/Fill in variables
            try:
                with open(dataset["configTemplate"],"r") as f:
                    tmpFile = f.read()
            except IOError:
                print "The config-template called",
                print dataset["configTemplate"], "cannot be found."
                sys.exit(1)

            tmpFile = re.sub(gt_regex,
                             'setupGlobaltag = \"'+dataset["globaltag"]+'\"',
                             tmpFile)
            tmpFile = re.sub(sg_regex,
                             "setupRunStartGeometry = "+
                             self._general_options["FirstRunForStartGeometry"], tmpFile)
            tmpFile = re.sub(collection_regex,
                             'setupCollection = \"'+dataset["collection"]+'\"',
                             tmpFile)
            if dataset['cosmicsZeroTesla']:
                tmpFile = re.sub(czt_regex,
                                 'setupCosmicsZeroTesla = True',
                                 tmpFile)
            if dataset['cosmicsDecoMode']:
                tmpFile = re.sub(cdm_regex,
                                 'setupCosmicsDecoMode = True',
                                 tmpFile)
            if dataset['primaryWidth'] > 0.0:
                tmpFile = re.sub(pw_regex,
                                 'setupPrimaryWidth = '+str(dataset["primaryWidth"]),
                                 tmpFile)
            if dataset['json'] != '':
                tmpFile = re.sub(json_regex,
                                 'setupJson = \"'+dataset["json"]+'\"',
                                 tmpFile)

            thisCfgTemplate = "tmp.py"
            with open(thisCfgTemplate, "w") as f:
                f.write(tmpFile)


            # Set mps_setup append option for datasets following the first one
            append = "-a"
            if first_dataset:
                append = ""
                first_dataset = False
                self._config_template = tmpFile
                self._cms_process = mps_tools.get_process_object(thisCfgTemplate)
                self._create_input_db()

            with open(thisCfgTemplate, "a") as f: f.write(self._override_gt)


            # create mps_setup command
            command = ["mps_setup.py",
                       "-m",
                       append,
                       "-M", self._general_options["pedeMem"],
                       "-N", name,
                       self._mille_script,
                       thisCfgTemplate,
                       dataset["inputFileList"],
                       str(dataset["njobs"]),
                       self._general_options["classInf"],
                       self._general_options["jobname"],
                       self._pede_script,
                       "cmscafuser:"******"numberOfEvents"] > 0:
                command.extend(["--max-events", str(dataset["numberOfEvents"])])
            command = [x for x in command if len(x.strip()) > 0]

            # Some output:
            print "Creating jobs for dataset:", name
            print "-"*75
            print "Baseconfig:        ", dataset["configTemplate"]
            print "Collection:        ", dataset["collection"]
            if dataset["collection"] in ("ALCARECOTkAlCosmicsCTF0T",
                                         "ALCARECOTkAlCosmicsInCollisions"):
                print "cosmicsDecoMode:   ", dataset["cosmicsDecoMode"]
                print "cosmicsZeroTesla:  ", dataset["cosmicsZeroTesla"]
            print "Globaltag:         ", dataset["globaltag"]
            print "Number of jobs:    ", dataset["njobs"]
            print "Inputfilelist:     ", dataset["inputFileList"]
            if dataset["json"] != "":
                print "Jsonfile:          ", dataset["json"]
            if self._args.verbose:
                print "Pass to mps_setup: ", " ".join(command)

            # call the command and toggle verbose output
            self._handle_process_call(command, self._args.verbose)

            # remove temporary file
            self._handle_process_call(["rm", thisCfgTemplate])
Пример #14
0
    if not os.access(args.merge_script, os.R_OK):
        print "Bad merge script file name", args.merge_script
        sys.exit(1)

if args.mss_dir.strip() != "":
    if ":" in args.mss_dir:
        lib.mssDirPool = args.mss_dir.split(":")
        lib.mssDirPool, args.mss_dir = lib.mssDirPool[0], ":".join(lib.mssDirPool[1:])
        lib.mssDir = args.mss_dir

pedeMemMin = 1024 # Minimum memory allocated for pede: 1024MB=1GB

# Try to guess the memory requirements from the pede executable name.
# 2.5GB is used as default otherwise.
# AP - 23.03.2010
cms_process = mps_tools.get_process_object(args.config_template)
pedeMemDef = cms_process.AlignmentProducer.algoConfig.pedeSteerer.pedeCommand.value()
pedeMemDef = os.path.basename(pedeMemDef) # This is the pede executable (only the file name, eg "pede_4GB").
pedeMemDef = pedeMemDef.split("_")[-1]
pedeMemDef = pedeMemDef.replace("GB", "")
try:
    pedeMemDef = 1024*float(pedeMemDef)
    if pedeMemDef < pedeMemMin: pedeMemDef = pedeMemMin # pedeMemDef must be >= pedeMemMin.
except ValueError:
    pedeMemDef = int(1024*2.5)


# Allocate memory for the pede job.
# The value specified by the user (-M option) prevails on the one evinced from the executable name.
# AP - 23.03.2010
if not args.memory or args.memory < pedeMemMin: