Exemple #1
0
    """
    The possible file creation options
  """
    all = 0,
    ConfigFiles = 1,
    CrossValidFile = 2,
    ppFile = 3


tuningJobFileParser = ArgumentParser(
    add_help=False, description='Create files used by TuningJob.')
tuningJobFileParser.add_argument(
    'fileType',
    choices=get_attributes(JobFileTypeCreation,
                           onlyVars=True,
                           getProtected=False),
    nargs='+',
    help="""Which kind of files to create. You can choose one
                     or more of the available choices, just don't use all with
                     the other available choices.""")
tuningJobFileParser.add_argument('--compress',
                                 type=BooleanStr,
                                 help="Whether to compress files or not.")
################################################################################
jobConfig = tuningJobFileParser.add_argument_group(
    "JobConfig Files Creation Options", """Change configuration for
                                       job config files creation.""")
jobConfig.add_argument('-oJConf',
                       '--jobConfiFilesOutputFolder',
                       default=NotSet,
                       help="The job config files output folder.")
Exemple #2
0
#!/usr/bin/env python
from RingerCore import Logger, LoggerNamespace, ArgumentParser, emptyArgumentsPrintHelp

parser    = ArgumentParser()

parser.add_argument('-o', '--output',action='store', default="merge.root", 
                    help='output merged file')
parser.add_argument('-n', '--nFilesPerMerge', type=int, default=2, 
                    help='Number of files per merge')
parser.add_argument('-i', '--input' ,action='store', nargs='+',required=True, 
                    help='input file')


mainLogger = Logger.getModuleLogger(__name__)

#***************************** Main ******************************
emptyArgumentsPrintHelp( parser )

args = parser.parse_args( namespace = LoggerNamespace() )
mainLogger.info('Starting merging files...')
poutput = args.output.replace('.root','')

import numpy as np
mainLogger.info(('Trying to merge %d files')%(len(args.input)))

files = args.input
files.sort()
imerge=0

all_merged_files=str()
while len(files) > 0:
Exemple #3
0
#        projCollection{et,eta,sort} = A % your matrix projection here!
#     end
#   end
# end
# save('your pp file name', 'nEtaBin', 'nEtBin', 'projCollection')
#

from RingerCore import Logger, LoggingLevel, save, load, ArgumentParser, emptyArgumentsPrintHelp
from TuningTools import fixPPCol
import numpy as np
import sys, os

mainParser = ArgumentParser()

mainParser.add_argument('-i','--input', action='store',  required = True,
                        help = "The input files [.mat] that will be used to generate a extract the preproc proj matrix.\
                            Your matlab file must be inside:  projCollection = [[[]]], nEtaBin = int, nEtBin = int, \
                            nSort = int")
mainParser.add_argument('-o','--output', action='store',  required = False, default = 'ppCol',
                        help = "The output file")

emptyArgumentsPrintHelp( mainParser )

mainLogger = Logger.getModuleLogger( __name__, LoggingLevel.INFO )
args=mainParser.parse_args()


import scipy.io
mainLogger.info(('Loading matlab file: %s')%(args.input))
rawData = scipy.io.loadmat(args.input)
collections = rawData['projCollection']
nEta = rawData['nEtaBin'][0][0]
def TuningJobFileParser():
    tuningJobFileParser = ArgumentParser(
        add_help=False, description='Create files used by TuningJob.')
    tuningJobFileParser.add_argument(
        'fileType',
        choices=get_attributes(JobFileTypeCreation,
                               onlyVars=True,
                               getProtected=False),
        nargs='+',
        help="""Which kind of files to create. You can choose one
                       or more of the available choices, just don't use all with
                       the other available choices.""")
    tuningJobFileParser.add_argument('--compress',
                                     type=BooleanStr,
                                     help="Whether to compress files or not.")
    ################################################################################
    jobConfig = tuningJobFileParser.add_argument_group(
        "JobConfig Files Creation Options", """Change configuration for
                                         job config files creation.""")
    jobConfig.add_argument('-oJConf',
                           '--jobConfiFilesOutputFolder',
                           default=NotSet,
                           help="The job config files output folder.")
    jobConfig.add_argument('--neuronBounds',
                           nargs='+',
                           type=int,
                           default=NotSet,
                           help="""
                              Input a sequential bounded list to be used as the
                              neuron job range, the arguments should have the
                              same format from the seq unix command or as the
                              Matlab format. If not specified, the range will
                              start from 1. I.e. 5 2 9 leads to [5 7 9] and 50
                              leads to 1:50
                                 """)
    jobConfig.add_argument('--sortBounds',
                           nargs='+',
                           type=int,
                           default=NotSet,
                           help="""
                            Input a sequential bounded list using seq format to
                            be used as the sort job range, but the last bound
                            will be opened just as happens when using python
                            range function. If not specified, the range will
                            start from 0. I.e. 5 2 9 leads to [5 7] and 50 leads
                            to range(50)
                                """)
    jobConfig.add_argument(
        '--nInits',
        nargs='?',
        type=int,
        default=NotSet,
        help="The number of initilizations of the discriminator.")
    jobConfig.add_argument('--nNeuronsPerJob',
                           type=int,
                           default=NotSet,
                           help="The number of hidden layer neurons per job.")
    jobConfig.add_argument('--nSortsPerJob',
                           type=int,
                           default=NotSet,
                           help="The number of sorts per job.")
    jobConfig.add_argument('--nInitsPerJob',
                           type=int,
                           default=NotSet,
                           help="The number of initializations per job.")
    ################################################################################
    crossConfig = tuningJobFileParser.add_argument_group(
        "CrossValid File Creation Options",
        """Change configuration for CrossValid
                                           file creation.""")
    crossConfig.add_argument('-outCross',
                             '--crossValidOutputFile',
                             default='crossValid',
                             help="The cross validation output file.")
    crossConfig.add_argument('-m',
                             '--method',
                             default=NotSet,
                             type=CrossValidMethod,
                             help="The Cross-Validation method.")
    crossConfig.add_argument(
        '-ns',
        '--nSorts',
        type=int,
        default=NotSet,
        help="""The number of sort used by cross validation
                                  configuration.""")
    crossConfig.add_argument(
        '-nb',
        '--nBoxes',
        type=int,
        default=NotSet,
        help="""The number of boxes used by cross validation
                                   configuration.""")
    crossConfig.add_argument('-ntr',
                             '--nTrain',
                             type=int,
                             default=NotSet,
                             help="""The number of train boxes used by cross
                                  validation.""")
    crossConfig.add_argument('-nval',
                             '--nValid',
                             type=int,
                             default=NotSet,
                             help="""The number of valid boxes used by cross
                                  validation.""")
    crossConfig.add_argument('-ntst',
                             '--nTest',
                             type=int,
                             default=NotSet,
                             help="""The number of test boxes used by cross
                                  validation.""")
    crossConfig.add_argument(
        '-seed',
        type=int,
        default=NotSet,
        help="The seed value for generating CrossValid object.")
    ################################################################################
    ppConfig = tuningJobFileParser.add_argument_group(
        "PreProc File Creation Options",
        """Change configuration for pre-processing 
                                        file creation. These options will only
                                        be taken into account if job fileType is
                                        set to "ppFile" or "all".""")
    ppConfig.add_argument('-outPP',
                          '--preProcOutputFile',
                          default='ppFile',
                          help="The pre-processing validation output file")
    ppConfig.add_argument('-ppCol',
                          type=str,
                          default='[[Norm1()]]',
                          help="""The pre-processing collection to apply. The
                               string will be parsed by python and created using
                               the available pre-processings on
                               TuningTools.PreProc.py file.
                               
                               This string can have classes from the PreProc
                               module initialized with determined values. E.g.:

                               -ppCol "[[[Norm1(),MapStd()],[RingerRp(2.,1.3)],[MapStd]],[[Norm1(),MapStd],[Norm1],[MapStd]],[[Norm1,MapStd],[Norm1({'level' : 'VERBOSE'})],[MapStd({'d' : {'level' : 'VERBOSE'}})]]]"

                               The explicit usage of () or not will make no
                               difference resulting in the class instance
                               initialization.

                               Also, a special syntax need to be used when
                               passing keyword arguments as specified in:

                               MapStd({'level' : 'VERBOSE'}) (equivalent in python) => MapStd( level = VERBOSE )

                               MapStd({'d' : {'level' : 'VERBOSE'}}) => MapStd( d = { level : VERBOSE } )
                               """)
    ppConfig.add_argument('-pp_ns',
                          '--pp_nSorts',
                          default=NotSet,
                          type=int,
                          help="""The number of sort used by cross validation
                               configuration. Import from nSorts if not set."""
                          )
    ppConfig.add_argument('-pp_nEt',
                          '--pp_nEtBins',
                          default=NotSet,
                          type=int,
                          help="""The number of et bins.""")
    ppConfig.add_argument('-pp_nEta',
                          '--pp_nEtaBins',
                          default=NotSet,
                          type=int,
                          help="""The number of eta bins.""")
    ################################################################################
    return tuningJobFileParser
Exemple #5
0
#!/usr/bin/env python

from RingerCore import (ArgumentParser, BooleanStr, OMP_NUM_THREADS, grouper,
                        Logger, LoggingLevel)
from itertools import repeat
import sys, os, tempfile
from shutil import rmtree
from subprocess import call, PIPE

hadd_chunk = ArgumentParser(add_help=False, description='')
hadd_chunk.add_argument('target', action='store', help="Target file")
hadd_chunk.add_argument('files', action='store', nargs='+', help="Input files")
hadd_chunk.add_argument('--chunk-size',
                        action='store',
                        type=int,
                        default=50,
                        help="Chunk size of the hadd jobs")
hadd_chunk.add_argument('--divide-by-run',
                        action='store_true',
                        help="""Try to divide using run counts equally.""")
args, argv = hadd_chunk.parse_known_args()
argv = list(argv)

mainLogger = Logger.getModuleLogger(__name__, LoggingLevel.INFO)


def lcall(inputs):
    target = inputs[0]
    files = list(filter(lambda x: x is not None, inputs[1]))
    with open(os.devnull, "w") as w:
        if not call(['hadd'] + argv + [target] + files, stdout=w):
Exemple #6
0
defaultTrigList = [
    'e24_medium_L1EM18VH',
    'e24_lhmedium_L1EM18VH',
    'e24_tight_L1EM20VH',
    #'e24_vloose_L1EM20VH',
    #'e5_loose_idperf',
    #'e5_lhloose_idperf',
    #'e5_tight_idperf',
    #'e5_lhtight_idperf',
    'e24_medium_idperf_L1EM20VH',
    'e24_lhmedium_idperf_L1EM20VH'
]

parser = ArgumentParser()
parser.add_argument('--inFolderList',
                    nargs='+',
                    required=True,
                    help="Input container to retrieve data")
parser.add_argument('--signalDS',
                    action='store_true',
                    help="Whether the dataset contains TPNtuple")
parser.add_argument('--outfile',
                    action='store',
                    default="mergedOutput.root",
                    help="Name of the output file")
parser.add_argument('--triggerList',
                    nargs='+',
                    default=defaultTrigList,
                    help="Trigger list to keep on the filtered file.")
args = parser.parse_args()

mainLogger = Logger.getModuleLogger(__name__, LoggingLevel.INFO)
Exemple #7
0
    def save(self):
        self._logger.info(('Saving file %s') % (self._outputDS))
        self._file.Close()


######################### __main__ ############################
from RingerCore import expandFolders, csvStr2List, ArgumentParser
from pprint import pprint

mainFilterParser = ArgumentParser()

mainFilterParser.add_argument(
    '-i',
    '--inputFiles',
    action='store',
    metavar='InputFiles',
    required=True,
    nargs='+',
    help="The input files that will be used to generate a extract file")

mainFilterParser.add_argument(
    '-t',
    '--trigger',
    action='store',
    default='e0_perf_L1EM15',
    required=True,
    help="Trigger list to keep on the filtered file.")

mainFilterParser.add_argument('--basepath',
                              action='store',
                              default='HLT/Egamma/Expert',
Exemple #8
0
defaultTrigList = [
    'e24_medium_L1EM18VH',
    'e24_lhmedium_L1EM18VH',
    'e24_tight_L1EM20VH',
    #'e24_vloose_L1EM20VH',
    #'e5_loose_idperf',
    #'e5_lhloose_idperf',
    #'e5_tight_idperf',
    #'e5_lhtight_idperf',
    'e24_medium_idperf_L1EM20VH',
    'e24_lhmedium_idperf_L1EM20VH'
]

parser = ArgumentParser()
parser.add_argument('--inDS',
                    action='store',
                    required=True,
                    help="Input container to retrieve data")
parser.add_argument('--outFolder', action='store', default="dumpOutput")
parser.add_argument('--triggerList', nargs='+', default=defaultTrigList)
parser.add_argument('--numberOfSamplesPerPackage', type=int, default=50)
args = parser.parse_args()

mainLogger = Logger.getModuleLogger(__name__, LoggingLevel.INFO)

if os.path.exists('dq2_ls.txt'):
    os.system('rm dq2_ls.txt')

if args.inDS[-1] != '/':
    args.inDS += '/'

if args.outFolder[-1] != '/':