############################################################################### createDataParser = argparse.ArgumentParser(add_help = False, description = 'Create TuningTool data from PhysVal.') from TuningTools.ReadData import Reference, Detector mainCreateData = createDataParser.add_argument_group( "Required arguments", "") mainCreateData.add_argument('-s','--sgnInputFiles', action='store', metavar='SignalInputFiles', required = True, nargs='+', help = "The signal files that will be used to tune the discriminators") mainCreateData.add_argument('-b','--bkgInputFiles', action='store', metavar='BackgroundInputFiles', required = True, nargs='+', help = "The background files that will be used to tune the discriminators") mainCreateData.add_argument('-op','--operation', default = NotSet, help = """The Ringer operation determining in each Trigger level or what is the offline operation point reference. Possible options are: """ \ + str(get_attributes( RingerOperation, onlyVars = True, getProtected = False)) ) mainCreateData.add_argument('-t','--treePath', metavar='TreePath', action = 'store', default = NotSet, type=str, nargs='+', help = """The Tree path to be filtered on the files. It can be a value for each dataset.""") optCreateData = createDataParser.add_argument_group( "Configuration extra arguments", "") optCreateData.add_argument('--reference', action='store', nargs='+', default = NotSet, help = """ The reference used for filtering datasets. It needs to be set to a value on the Reference enumeration on ReadData file. You can set only one value to be used for both datasets, or one value first for the Signal dataset and the second for the Background dataset. Possible options are: """ \ + str( get_attributes( Reference, onlyVars = True, getProtected = False) ),
class JobFileTypeCreation(EnumStringification): """ The possible file creation options """ all = 0, ConfigFiles = 1, CrossValidFile = 2, ppFile = 3 tuningJobFileParser = ArgumentParser( add_help=False, description='Create files used by TuningJob.') tuningJobFileParser.add_argument( 'fileType', choices=get_attributes(JobFileTypeCreation, onlyVars=True, getProtected=False), nargs='+', help="""Which kind of files to create. You can choose one or more of the available choices, just don't use all with the other available choices.""") tuningJobFileParser.add_argument('--compress', type=BooleanStr, help="Whether to compress files or not.") ################################################################################ jobConfig = tuningJobFileParser.add_argument_group( "JobConfig Files Creation Options", """Change configuration for job config files creation.""") jobConfig.add_argument('-oJConf', '--jobConfiFilesOutputFolder', default=NotSet,
from TuningTools import GridJobFilter mainParser = argparse.ArgumentParser(description = 'Merge files into unique file.', add_help = False) mainMergeParser = mainParser.add_argument_group( "Required arguments", "") mainMergeParser.add_argument('-i','--inputFiles', action='store', metavar='InputFiles', required = True, nargs='+', help = "The input files that will be used to generate a unique file") mainMergeParser.add_argument('-o','--outputFile', action='store', metavar='OutputFile', required = True, help = "The output file generated") mainMergeParser.add_argument('-wm','--writeMethod', action='store', default = "ShUtil", help = "The write method to use. Possibles method are: " \ + str(get_attributes( WriteMethod, onlyVars = True, getProtected = False)) ) mainMergeParser.add_argument('--allowTmpFiles', action='store', default = "True", help = "When reading .pic files, whether the creation of temporary files is enabled." \ + str(get_attributes( BooleanStr, onlyVars = True, getProtected = False)) ) optMergeParser = mainParser.add_argument_group( "Optional arguments", "") optMergeParser.add_argument('--binFilters', action='store', default = NotSet, help = """This option filter the files types from each job. It can be a string with the name of a class defined on python/CrossValidStat dedicated to automatically separate the files or a comma separated list of patterns that identify unique group of files for each bin. A python list can also be speficied. E.g.: You can specify 'group001,group002' if you have file001.group001.pic, file002.group001, file001.group002, file002.group002 available and group001
def TuningJobFileParser(): tuningJobFileParser = ArgumentParser( add_help=False, description='Create files used by TuningJob.') tuningJobFileParser.add_argument( 'fileType', choices=get_attributes(JobFileTypeCreation, onlyVars=True, getProtected=False), nargs='+', help="""Which kind of files to create. You can choose one or more of the available choices, just don't use all with the other available choices.""") tuningJobFileParser.add_argument('--compress', type=BooleanStr, help="Whether to compress files or not.") ################################################################################ jobConfig = tuningJobFileParser.add_argument_group( "JobConfig Files Creation Options", """Change configuration for job config files creation.""") jobConfig.add_argument('-oJConf', '--jobConfiFilesOutputFolder', default=NotSet, help="The job config files output folder.") jobConfig.add_argument('--neuronBounds', nargs='+', type=int, default=NotSet, help=""" Input a sequential bounded list to be used as the neuron job range, the arguments should have the same format from the seq unix command or as the Matlab format. If not specified, the range will start from 1. I.e. 5 2 9 leads to [5 7 9] and 50 leads to 1:50 """) jobConfig.add_argument('--sortBounds', nargs='+', type=int, default=NotSet, help=""" Input a sequential bounded list using seq format to be used as the sort job range, but the last bound will be opened just as happens when using python range function. If not specified, the range will start from 0. I.e. 5 2 9 leads to [5 7] and 50 leads to range(50) """) jobConfig.add_argument( '--nInits', nargs='?', type=int, default=NotSet, help="The number of initilizations of the discriminator.") jobConfig.add_argument('--nNeuronsPerJob', type=int, default=NotSet, help="The number of hidden layer neurons per job.") jobConfig.add_argument('--nSortsPerJob', type=int, default=NotSet, help="The number of sorts per job.") jobConfig.add_argument('--nInitsPerJob', type=int, default=NotSet, help="The number of initializations per job.") ################################################################################ crossConfig = tuningJobFileParser.add_argument_group( "CrossValid File Creation Options", """Change configuration for CrossValid file creation.""") crossConfig.add_argument('-outCross', '--crossValidOutputFile', default='crossValid', help="The cross validation output file.") crossConfig.add_argument('-m', '--method', default=NotSet, type=CrossValidMethod, help="The Cross-Validation method.") crossConfig.add_argument( '-ns', '--nSorts', type=int, default=NotSet, help="""The number of sort used by cross validation configuration.""") crossConfig.add_argument( '-nb', '--nBoxes', type=int, default=NotSet, help="""The number of boxes used by cross validation configuration.""") crossConfig.add_argument('-ntr', '--nTrain', type=int, default=NotSet, help="""The number of train boxes used by cross validation.""") crossConfig.add_argument('-nval', '--nValid', type=int, default=NotSet, help="""The number of valid boxes used by cross validation.""") crossConfig.add_argument('-ntst', '--nTest', type=int, default=NotSet, help="""The number of test boxes used by cross validation.""") crossConfig.add_argument( '-seed', type=int, default=NotSet, help="The seed value for generating CrossValid object.") ################################################################################ ppConfig = tuningJobFileParser.add_argument_group( "PreProc File Creation Options", """Change configuration for pre-processing file creation. These options will only be taken into account if job fileType is set to "ppFile" or "all".""") ppConfig.add_argument('-outPP', '--preProcOutputFile', default='ppFile', help="The pre-processing validation output file") ppConfig.add_argument('-ppCol', type=str, default='[[Norm1()]]', help="""The pre-processing collection to apply. The string will be parsed by python and created using the available pre-processings on TuningTools.PreProc.py file. This string can have classes from the PreProc module initialized with determined values. E.g.: -ppCol "[[[Norm1(),MapStd()],[RingerRp(2.,1.3)],[MapStd]],[[Norm1(),MapStd],[Norm1],[MapStd]],[[Norm1,MapStd],[Norm1({'level' : 'VERBOSE'})],[MapStd({'d' : {'level' : 'VERBOSE'}})]]]" The explicit usage of () or not will make no difference resulting in the class instance initialization. Also, a special syntax need to be used when passing keyword arguments as specified in: MapStd({'level' : 'VERBOSE'}) (equivalent in python) => MapStd( level = VERBOSE ) MapStd({'d' : {'level' : 'VERBOSE'}}) => MapStd( d = { level : VERBOSE } ) """) ppConfig.add_argument('-pp_ns', '--pp_nSorts', default=NotSet, type=int, help="""The number of sort used by cross validation configuration. Import from nSorts if not set.""" ) ppConfig.add_argument('-pp_nEt', '--pp_nEtBins', default=NotSet, type=int, help="""The number of et bins.""") ppConfig.add_argument('-pp_nEta', '--pp_nEtaBins', default=NotSet, type=int, help="""The number of eta bins.""") ################################################################################ return tuningJobFileParser
################################################################################ tuningJobParser = argparse.ArgumentParser(add_help = False, description = 'Tune discriminator for a specific TuningTool data.', conflict_handler = 'resolve') tuningDataArgs = tuningJobParser.add_argument_group( "Required arguments", "") tuningDataArgs.add_argument('-d', '--data', action='store', metavar='data', required = True, help = "The data file that will be used to tune the discriminators") tuningOptArgs = tuningJobParser.add_argument_group( "Optional arguments", "") tuningOptArgs.add_argument('--outputFileBase', action='store', default = NotSet, help = """Base name for the output file.""") tuningOptArgs.add_argument('-op','--operation', default = None, help = """The Ringer operation determining in each Trigger level or what is the offline operation point reference. Possible options are: """ \ + str(get_attributes( RingerOperation, onlyVars = True, getProtected = False)) ) tuningCrossVars = tuningJobParser.add_argument_group( "Cross-validation configuration", "") tuningCrossVars.add_argument('-x', '--crossFile', action='store', default = NotSet, help = """The cross-validation file path, pointing to a file created with the create tuning job files""") tuningLoopVars = tuningJobParser.add_argument_group( "Looping configuration", "") tuningLoopVars.add_argument('-c','--confFileList', nargs='+', default = None, help = """A python list or a comma separated list of the root files containing the configuration to run the jobs. The files can be generated using a CreateConfFiles instance which can be accessed via command line using the createTuningJobFiles.py script.""") tuningLoopVars.add_argument('--neuronBounds', nargs='+', type=int, default = None, help = """ Input a sequential bounded list to be used as the neuron job range, the arguments should have the same format from the seq unix command or as the
""", ) optArgs.add_argument( "-idx", "--binFilterIdx", default=None, nargs="+", type=int, help="""The index of the bin job to run. e.g. two bins, idx will be: 0 and 1""", ) optArgs.add_argument( "--doMonitoring", default=NotSet, dest="_doMonitoring", help="Enable or disable monitoring file creation. Allowed options: " + str(get_attributes(BooleanStr, onlyVars=True, getProtected=False)), ) optArgs.add_argument( "--doMatlab", default=NotSet, dest="_doMatlab", help="Enable or disable matlab file creation. Allowed options: " + str(get_attributes(BooleanStr, onlyVars=True, getProtected=False)), ) optArgs.add_argument( "--doCompress", default=NotSet, dest="_doCompress", help="Enable or disable raw output file compression. Allowed options: " + str(get_attributes(BooleanStr, onlyVars=True, getProtected=False)), )
from TuningTools.CrossValid import CrossValidMethod ################################################################################ # Create tuningJob file related objects ################################################################################ class JobFileTypeCreation( EnumStringification ): """ The possible file creation options """ all = 0, ConfigFiles = 1, CrossValidFile = 2, ppFile = 3 tuningJobFileParser = argparse.ArgumentParser( add_help = False, description = 'Create files used by TuningJob.' ) tuningJobFileParser.add_argument('fileType', choices = get_attributes(JobFileTypeCreation, onlyVars = True, getProtected = False), nargs='+', help = """Which kind of files to create. You can choose one or more of the available choices, just don't use all with the other available choices.""") tuningJobFileParser.add_argument('--compress', default='True', dest = '_compress', help = "Whether to compress files or not. Allowed options: " + \ str( get_attributes( BooleanStr, onlyVars = True, getProtected = False ) ) ) ################################################################################ jobConfig = tuningJobFileParser.add_argument_group( "JobConfig Files Creation Options", """Change configuration for job config files creation.""") jobConfig.add_argument('-oJConf', '--jobConfiFilesOutputFolder', default = NotSet, help = "The job config files output folder.")