Example #1
0
def submit_all_runCalib_dags(pairs_files, log_dir, append, pu_bins, eta_bins,
                             force_submit):
    """Create and submit DAG runCalibration jobs for all pairs files.

    Parameters
    ----------
    pairs_files : list[str], optional
        List of pairs files to process. Must be full path.

    log_dir : str, optional
        Directory for STDOUT/STDERR/LOG files. Should be on /storage.

    append : str, optional
        String to append to filenames to track various settings (e.g. PU bin).

    pu_bins : list[list[int, int]], optional
        List of PU bin edges.

    eta_bins : list[float], optional
        List of eta bin edges, including upper edge of last bin.

    force_submit : bool, optional
        If True, forces job submission even if proposed output files already exists.
        Otherwise, program quits before submission.
    """
    # Update the matcher script for the worker nodes
    setup_script = 'worker_setup.sh'
    cc.update_setup_script(setup_script, os.environ['CMSSW_VERSION'],
                           os.environ['ROOTSYS'])

    # Update the hadd script for the worker node
    hadd_setup_script = 'cmssw_setup.sh'
    cc.update_hadd_setup_script(hadd_setup_script, os.environ['CMSSW_VERSION'])

    # Additional files to copy across - other modules. etc
    common_input_files = ['runCalibration.py', 'binning.py', 'common_utils.py']
    common_input_files = [
        os.path.join(os.path.dirname(os.getcwd()), f)
        for f in common_input_files
    ]

    status_files = []

    # Submit a DAG for each pairs file
    for pfile in pairs_files:
        print 'Processing', pfile
        sfile = submit_runCalib_dag(pairs_file=pfile,
                                    log_dir=log_dir,
                                    append=append,
                                    pu_bins=pu_bins,
                                    eta_bins=eta_bins,
                                    common_input_files=common_input_files,
                                    force_submit=force_submit)
        status_files.append(sfile)

    if status_files:
        status_files = list(
            chain.from_iterable(status_files))  # flatten the list
        print 'All statuses:'
        print 'DAGstatus.py ', ' '.join(status_files)
def submit_all_resolution_dags(pairs_files, max_l1_pt, log_dir, append,
                               pu_bins, eta_bins, force_submit):
    """Create and submit DAG makeResolutionPlots jobs for all pairs files.

    Parameters
    ----------
    pairs_files : list[str], optional
        List of pairs files to process. Must be full path.

    max_l1_pt : int, optional
        Maximum L1 pt to consider when making plots.

    log_dir : str, optional
        Directory for STDOUT/STDERR/LOG files. Should be on /storage.

    append : str, optional
        String to append to filenames to track various settings (e.g. PU bin).

    pu_bins : list[list[int, int]], optional
        List of PU bin edges.

    eta_bins : list[float], optional
        List of eta bin edges, including upper edge of last bin.

    force_submit : bool, optional
        If True, forces job submission even if proposed output files already exists.
        Otherwise, program quits before submission.
    """
    # Update the matcher script for the worker nodes
    setup_script = 'worker_setup.sh'
    cc.update_setup_script(setup_script, os.environ['CMSSW_VERSION'], os.environ['ROOTSYS'])

    # Update the hadd script for the worker node
    hadd_setup_script = 'cmssw_setup.sh'
    cc.update_hadd_setup_script(hadd_setup_script, os.environ['CMSSW_VERSION'])

    # Additional files to copy across - other modules. etc
    common_input_files = ['makeResolutionPlots.py', 'binning.py', 'common_utils.py']
    common_input_files = [os.path.join(os.path.dirname(os.getcwd()), f) for f in common_input_files]

    status_files = []

    # Submit a DAG for each pairs file
    for pfile in pairs_files:
        print 'Processing', pfile
        sfile = submit_resolution_dag(pairs_file=pfile, max_l1_pt=max_l1_pt,
                                      log_dir=log_dir, append=append,
                                      pu_bins=pu_bins, eta_bins=eta_bins,
                                      common_input_files=common_input_files,
                                      force_submit=force_submit)
        status_files.append(sfile)

    if len(status_files) > 0:
        if not isinstance(status_files[0], str):
            # flatten the list
            status_files = list(chain.from_iterable(status_files))
        print 'All statuses:'
        print 'DAGstatus.py ', ' '.join(status_files)
def submit_all_resolution_dags(pairs_files, max_l1_pt, log_dir, append,
                               pu_bins, eta_bins, force_submit):
    """Create and submit DAG makeResolutionPlots jobs for all pairs files.

    Parameters
    ----------
    pairs_files : list[str], optional
        List of pairs files to process. Must be full path.

    max_l1_pt : int, optional
        Maximum L1 pt to consider when making plots.

    log_dir : str, optional
        Directory for STDOUT/STDERR/LOG files. Should be on /storage.

    append : str, optional
        String to append to filenames to track various settings (e.g. PU bin).

    pu_bins : list[list[int, int]], optional
        List of PU bin edges.

    eta_bins : list[float], optional
        List of eta bin edges, including upper edge of last bin.

    force_submit : bool, optional
        If True, forces job submission even if proposed output files already exists.
        Otherwise, program quits before submission.
    """
    # Update the matcher script for the worker nodes
    setup_script = 'worker_setup.sh'
    cc.update_setup_script(setup_script, os.environ['CMSSW_VERSION'],
                           os.environ['ROOTSYS'])

    # Update the hadd script for the worker node
    hadd_setup_script = 'cmssw_setup.sh'
    cc.update_hadd_setup_script(hadd_setup_script, os.environ['CMSSW_VERSION'])

    # Additional files to copy across - other modules. etc
    common_input_files = [
        'makeResolutionPlots.py', 'binning.py', 'common_utils.py'
    ]
    common_input_files = [
        os.path.join(os.path.dirname(os.getcwd()), f)
        for f in common_input_files
    ]

    # Submit a DAG for each pairs file
    for pfile in pairs_files:
        print 'Processing', pfile
        submit_resolution_dag(pairs_file=pfile,
                              max_l1_pt=max_l1_pt,
                              log_dir=log_dir,
                              append=append,
                              pu_bins=pu_bins,
                              eta_bins=eta_bins,
                              common_input_files=common_input_files,
                              force_submit=force_submit)
def submit_all_matcher_dags(exe, ntuple_dirs, log_dir, append,
                            l1_dir, ref_dir, deltaR, ref_min_pt, cleaning_cut,
                            force_submit):
    """Create and submit DAG checkCalibration jobs for all pairs files.

    Parameters
    ----------
    exe : str
        Name of executable.

    ntuple_dirs : list[str]
        List of directories with L1Ntuples to run over.

    log_dir : str, optional
        Directory for STDOUT/STDERR/LOG files. Should be on /storage.

    append : str, optional
        String to append to filenames to track various settings (e.g. deltaR cut).

    l1_dir : str
        Name of TDirectory in Ntuple that holds L1 jets.

    ref_dir : str
        Name of TDirectory in Ntuple that holds reference jets.

    deltaR : float
        Maximum deltaR(L1, Ref) for a match.

    ref_min_pt : float
        Minimum pT cut on reference jets to be considered for matching.

    cleaning_cut : str
        Cleaning cut to be applied. If '' or None, no cut applied.
        Other options include "TIGHTLEPVETO", "TIGHT", and "LOOSE".
        Also requires events to pass CSC filter & HBHE noise filters.

    force_submit : bool, optional
        If True, forces job submission even if proposed output files
        already exists.
        Oherwise, program quits before submission.
    """
    # Update the matcher script for the worker nodes
    setup_script = 'worker_setup.sh'
    cc.update_setup_script(setup_script, os.environ['CMSSW_VERSION'], os.environ['ROOTSYS'])

    # Update the hadd script for the worker node
    hadd_setup_script = 'cmssw_setup.sh'
    cc.update_hadd_setup_script(hadd_setup_script, os.environ['CMSSW_VERSION'])

    status_files = []

    # Submit a DAG for each pairs file
    for ndir in ntuple_dirs:
        print '>>> Processing', ndir
        sfile = submit_matcher_dag(exe=exe, ntuple_dir=ndir, log_dir=log_dir,
                                   l1_dir=l1_dir, ref_dir=ref_dir,
                                   deltaR=deltaR, ref_min_pt=ref_min_pt,
                                   cleaning_cut=cleaning_cut,
                                   append=append, force_submit=force_submit)
        status_files.append(sfile)

    if status_files:
        print 'All statuses:'
        print 'DAGstatus.py ', ' '.join(status_files)
def submit_all_matcher_dags(exe, ntuple_dirs, log_dir, append, l1_dir, ref_dir,
                            deltaR, ref_min_pt, cleaning_cut, force_submit):
    """Create and submit DAG checkCalibration jobs for all pairs files.

    Parameters
    ----------
    exe : str
        Name of executable.

    ntuple_dirs : list[str]
        List of directories with L1Ntuples to run over.

    log_dir : str, optional
        Directory for STDOUT/STDERR/LOG files. Should be on /storage.

    append : str, optional
        String to append to filenames to track various settings (e.g. deltaR cut).

    l1_dir : str
        Name of TDirectory in Ntuple that holds L1 jets.

    ref_dir : str
        Name of TDirectory in Ntuple that holds reference jets.

    deltaR : float
        Maximum deltaR(L1, Ref) for a match.

    ref_min_pt : float
        Minimum pT cut on reference jets to be considered for matching.

    cleaning_cut : str
        Cleaning cut to be applied. If '' or None, no cut applied.
        Other options include "TIGHTLEPVETO", "TIGHT", and "LOOSE".
        Also requires events to pass CSC filter & HBHE noise filters.

    force_submit : bool, optional
        If True, forces job submission even if proposed output files
        already exists.
        Oherwise, program quits before submission.
    """
    # Update the matcher script for the worker nodes
    setup_script = 'worker_setup.sh'
    cc.update_setup_script(setup_script, os.environ['CMSSW_VERSION'],
                           os.environ['ROOTSYS'])

    # Update the hadd script for the worker node
    hadd_setup_script = 'cmssw_setup.sh'
    cc.update_hadd_setup_script(hadd_setup_script, os.environ['CMSSW_VERSION'])

    status_files = []

    # Submit a DAG for each pairs file
    for ndir in ntuple_dirs:
        print '>>> Processing', ndir
        sfile = submit_matcher_dag(exe=exe,
                                   ntuple_dir=ndir,
                                   log_dir=log_dir,
                                   l1_dir=l1_dir,
                                   ref_dir=ref_dir,
                                   deltaR=deltaR,
                                   ref_min_pt=ref_min_pt,
                                   cleaning_cut=cleaning_cut,
                                   append=append,
                                   force_submit=force_submit)
        status_files.append(sfile)

    if status_files:
        print 'All statuses:'
        print 'DAGstatus.py ', ' '.join(status_files)