def addDAODArguments(parser, mergerTrf=True): DAODTypes = knownDAODTypes() if mergerTrf: parser.defineArgGroup('Input DAOD', 'Input DAOD files to be merged') parser.defineArgGroup('Output DAOD', 'Output merged DAOD files') for DAOD in DAODTypes: parser.add_argument( "--input" + DAOD + "File", nargs="+", type=trfArgClasses.argFactory(trfArgClasses.argPOOLFile, io="input", type="AOD", subtype=DAOD), help="Input DAOD file of " + DAOD + " derivation", group="Input DAOD") parser.add_argument( "--output" + DAOD + "_MRGFile", type=trfArgClasses.argFactory(trfArgClasses.argPOOLFile, io="output", type="AOD", subtype=DAOD), help="Output merged DAOD file of " + DAOD + " derivation", group="Output DAOD") else: parser.defineArgGroup('Output DAOD', 'Output derivation DAOD files') for DAOD in DAODTypes: parser.add_argument( "--output" + DAOD + "File", type=trfArgClasses.argFactory(trfArgClasses.argPOOLFile, io="output", type="AOD", subtype=DAOD), help="Output DAOD file of " + DAOD + " derivation", group="Output DAOD")
def addMyArgs(parser): # Use arggroup to get these arguments in their own sub-section (of --help) parser.defineArgGroup('AODMerge_tf', 'AOD merge job specific options') parser.add_argument('--inputAODFile', nargs='+', type=trfArgClasses.argFactory( trfArgClasses.argPOOLFile, io='input', type='aod'), help='Input AOD file', group='AODMerge_tf') parser.add_argument('--inputAOD_MRGFile', nargs='+', type=trfArgClasses.argFactory( trfArgClasses.argPOOLFile, io='input', type='aod'), help='Input (premarged) AOD file', group='AODMerge_tf') parser.add_argument( '--outputAOD_MRGFile', '--outputAODFile', type=trfArgClasses.argFactory(trfArgClasses.argPOOLFile, io='output', type='aod'), help='Output merged AOD file', group='AODMerge_tf') parser.add_argument('--outputTAGFile', type=trfArgClasses.argFactory(trfArgClasses.argTAGFile, io='output', type='tag'), help='Output TAG file', group='AODMerge_tf') parser.add_argument('--fastPoolMerge', type=trfArgClasses.argFactory(trfArgClasses.argBool), help='Hybrid POOL merging switch (default True)')
def addMyArgs(parser): # Use arggroup to get these arguments in their own sub-section (of --help) parser.defineArgGroup('Archiver', 'Options') parser.defineArgGroup('Unarchiver', 'Options') parser.add_argument('--inputDataFile', '--inputFile', nargs='+', type=trfArgClasses.argFactory(trfArgClasses.argFile, io='input', type='misc'), help='Input file(s)', group='Archiver') parser.add_argument('--inputArchFile', type=trfArgClasses.argFactory(trfArgClasses.argFile, io='input', type='misc'), help='Input archive file', group='Unarchiver') parser.add_argument( '--path', group='Unarchiver', help= 'Specifies a different directory to extract to. The default is the current working directory', default='.') parser.add_argument('--outputArchFile', '--outputFile', type=trfArgClasses.argFactory(trfArgClasses.argFile, io='output', type='misc'), help='Output archive file', group='Archiver')
def addFCS_NtupArgs(parser): parser.defineArgGroup('FCS_Ntup_tf', 'FCS_Ntup_tf specific options') parser.add_argument('--inputESDFile', nargs='+', type=argFactory(argPOOLFile, io='input'), help='Input ESD files', group='FCS_Ntup_tf') parser.add_argument('--outputNTUP_FCSFile', nargs='+', type=argFactory(argFile, io='output'), help='Output NTUP_FCS files', group='FCS_Ntup_tf') parser.add_argument('--outputGeoFileName', type=argFactory(argString), help='Optional geometry file', group='FCS_Ntup_tf') parser.add_argument('--doG4Hits', type=argFactory(argBool), help='Flag to turn on the writing of G4 hits', group='FCS_Ntup_tf') parser.add_argument( '--saveAllBranches', type=argFactory(argBool), help='Flag to turn on extra branches for Hits and G4hits', group='FCS_Ntup_tf')
def addMyArgs(parser): # Use arggroup to get these arguments in their own sub-section (of --help) parser.defineArgGroup('RAWMerge_tf', 'RAWMerge specific options') parser.add_argument('--inputBSFile', nargs='+', type=trfArgClasses.argFactory(trfArgClasses.argBSFile, io='input'), help='Input BS file(s)', group='RAWMerge_tf') parser.add_argument( '--outputBS_MRGFile', '--outputBSFile', type=trfArgClasses.argFactory(trfArgClasses.argBSFile, io='output'), help= 'Output merged BS file (best if this file ends in ._0001.data, but see allowRename option below)', group='RAWMerge_tf') parser.add_argument( '--maskEmptyInputs', type=trfArgClasses.argFactory(trfArgClasses.argBool), group='RAWMerge_tf', help= 'If true then empty BS files are not included in the merge (default True)', default=trfArgClasses.argBool('True')) parser.add_argument( '--allowRename', type=trfArgClasses.argFactory(trfArgClasses.argBool), group='RAWMerge_tf', help= 'If true merged BS file will be forcibly renamed to the value of "outputBSFile" (default True)', default=trfArgClasses.argBool('True'))
def addValidationArguments(parser): parser.defineArgGroup('Validation', 'Standard job validation switches') parser.add_argument( '--ignoreFiles', '--ignoreFilters', group='Validation', type=argFactory(trfArgClasses.argList, splitter=',', runarg=False), help= 'Files containing error patterns to be ignored during logfile scans (will split on commas; use "None" to disable the standard "atlas_error_mask.db")', nargs='+') parser.add_argument( '--ignorePatterns', group='Validation', type=argFactory(trfArgClasses.argList, splitter=',', runarg=False), help= 'Regexp error patterns to be ignored during logfile scans (will be applied as a search against the whole logfile line)', nargs='+') parser.add_argument( '--ignoreErrors', type=argFactory(trfArgClasses.argBool, runarg=False), metavar="BOOL", group='Validation', help= 'Ignore ERROR lines in logfiles (use with care this can mask serious problems; --ignorePatterns is prefered)' ) parser.add_argument( '--checkEventCount', type=trfArgClasses.argFactory(trfArgClasses.argSubstepBool, defaultSubstep='all', runarg=False), help= 'Enable check of output events against input events (default: True)', group='Validation', metavar="BOOL")
def addCommonRecTrfArgs(parser): parser.defineArgGroup('Common Reco', 'Common Reconstruction Options') parser.add_argument( '--autoConfiguration', group='Common Reco', type=trfArgClasses.argFactory(trfArgClasses.argList), help='Autoconfiguration settings (whitespace or comma separated)', nargs='+', metavar='AUTOCONFKEY') parser.add_argument('--trigStream', group='Common Reco', type=trfArgClasses.argFactory(trfArgClasses.argList), help='Trigger stream setting') parser.add_argument( '--topOptions', group='Common Reco', type=trfArgClasses.argFactory(trfArgClasses.argSubstep), nargs="+", help= 'Alternative top options file for reconstruction (can be substep specific)', metavar="substep:TOPOPTIONS") parser.add_argument( '--valid', group='Common Reco', type=trfArgClasses.argFactory(trfArgClasses.argBool), help= 'Enable decorations for AOD that allow for enhanced physics validation', metavar='BOOL')
def addTestBeamArgs(parser): parser.defineArgGroup('TestBeam', 'TestBeam_tf specific options') parser.add_argument( '--testBeamConfig', type=argFactory(argString), metavar='CONFIGNAME', help='Specify a named test beam configuration. E.g. ctb, tbtile', group='TestBeam') parser.add_argument( '--Eta', type=argFactory(argFloat), help='Only set Eta if you want to simulate an eta-projective scan', group='TestBeam') parser.add_argument( '--Theta', type=argFactory(argFloat), help= 'For 90 degrees scans put Theta=+/-90. Positive theta means the beam enters from positive eta side (as defined in CTB setup!)', group='TestBeam') parser.add_argument( '--Z', type=argFactory(argFloat), help= 'Z coordinate is the distance from ATLAS center to the desired impact point. Sensitive part starts at Z=2300, ends at Z=2300+3*100+3*130+3*150+2*190=3820', group='TestBeam')
def addMyArgs(parser): # Use arggroup to get these arguments in their own sub-section (of --help) parser.defineArgGroup('EVNTMerge_tf', 'EVNT merge job specific options') parser.add_argument('--inputEVNTFile', nargs='+', type=trfArgClasses.argFactory( trfArgClasses.argEVNTFile, io='input', runarg=True, type='evnt'), help='Input EVNT file', group='EVNTMerge_tf') parser.add_argument('--outputEVNT_MRGFile', '--outputEVNTFile', type=trfArgClasses.argFactory( trfArgClasses.argEVNTFile, io='output', runarg=True, type='evnt'), help='Output merged EVNT file', group='EVNTMerge_tf') parser.add_argument( '--eventService', type=trfArgClasses.argFactory(trfArgClasses.argBool), metavar="BOOL", help='Switch AthenaMP to the Event Service configuration', group='EVNTMerge_tf')
def addMetadataArguments(parser): parser.defineArgGroup( 'Metadata', 'Metadata arguments that will be passed into the transform') parser.add_argument('--AMIConfig', '--AMI', type=argFactory(trfArgClasses.argString, runarg=False), help='Configure transform with AMI tag parameters', group="Metadata") parser.add_argument( '--AMITag', type=argFactory(trfArgClasses.argString), metavar='TAG', group="Metadata", help= 'AMI tag from which this job was defined - this option simply writes the ' 'relevant AMI tag value into the output metadata, it does not configure the job (use --AMIConfig for that)' ) parser.add_argument('--taskid', type=argFactory(trfArgClasses.argString, runarg=False), help="Task identification number", group="Metadata") parser.add_argument('--jobid', type=argFactory(trfArgClasses.argString, runarg=False), help="Job identification number", group="Metadata") parser.add_argument('--attempt', type=argFactory(trfArgClasses.argString, runarg=False), help="Job attempt number", group="Metadata")
def addDetectorArguments(parser): parser.defineArgGroup( 'Detector', 'General detector configuration options, for simulation and reconstruction' ) parser.add_argument( '--DBRelease', group='Detector', type=argFactory(trfArgClasses.argSubstep, runarg=False), metavar='substep:DBRelease', nargs='+', help= 'Use DBRelease instead of ORACLE. Give either a DBRelease tarball file (e.g., DBRelease-21.7.1.tar.gz) or cvmfs DBRelease directory (e.g., 21.7.1 or current' ) parser.add_argument('--conditionsTag', group='Detector', type=argFactory(trfArgClasses.argSubstepConditions), metavar='substep:CondTag', nargs='+', help='Conditions tag to set') parser.add_argument('--geometryVersion', group='Detector', type=argFactory(trfArgClasses.argSubstep), metavar='substep:GeoVersion', nargs='+', help='ATLAS geometry version tag') parser.add_argument('--beamType', group='Detector', type=argFactory(trfArgClasses.argString), help='Manual beam type setting') parser.add_argument('--runNumber', '--RunNumber', group='Detector', type=argFactory(trfArgClasses.argInt), help='Manual run number setting')
def addReductionArguments(parser, transform=None): parser.defineArgGroup('Reduction', 'Reduced Filetype Options') parser.add_argument('--inputNTUP_COMMONFile', type=argFactory(trfArgClasses.argNTUPFile, io='input', treeNames=['physics']), group='Reduction', help='NTUP common input file', nargs='+') parser.add_argument('--outputDNTUPFile', type=argFactory(trfArgClasses.argNTUPFile, treeNames=None), group='Reduction', help='Reduced NTUP output filename base') parser.add_argument('--outputDAODFile', type=argFactory(trfArgClasses.argPOOLFile, io='output', type='aod'), help='Output reduced AOD filename base', group='Reduction') parser.add_argument('--reductionConf', type=argFactory(trfArgClasses.argList), nargs='+', help='Configuration of reduced stream outputs', group='Reduction') parser.add_argument( '--passThrough', type=argFactory(trfArgClasses.argBool), metavar="True/False", help= 'Run the derivation framework in a pass-through mode, needed for some MC samples. Needs to be implemented in derivation JOs' )
def addHITSMergeArgs(parser): # Use arggroup to get these arguments in their own sub-section (of --help) parser.defineArgGroup('HITSMerge_tf', 'HITSMerge_tf specific options') parser.add_argument('--inputHITSFile', '--inputHitsFile', nargs='+', type=argFactory(argPOOLFile, io='input', runarg=True, type='hits'), help='Input HITS files', group='HITSMerge_tf') parser.add_argument('--outputHITS_MRGFile', '--outputHits_MRGFile', '--outputHITSFile', '--outputHitsFile', nargs='+', type=argFactory(argPOOLFile, io='output', runarg=True, type='hits'), help='Output HITS file', group='HITSMerge_tf') parser.add_argument( '--inputLogsFile', nargs='+', type=argFactory(argFile, io='input', runarg=True, type='log'), help='Input Log files', group='HITSMerge_tf' ) ## FIXME need to add code to do the log file merging.
def addFileValidationArguments(parser): parser.defineArgGroup('File Validation', 'Standard file validation switches') parser.add_argument( '--fileValidation', type=argFactory(trfArgClasses.argBool), metavar='BOOL', group='File Validation', help= 'If FALSE skip both input and output file validation (default TRUE; warning - do not use this option in production jobs!)' ) parser.add_argument( '--inputFileValidation', type=argFactory(trfArgClasses.argBool), metavar='BOOL', group='File Validation', help= 'If FALSE skip input file validation (default TRUE; warning - do not use this option in production jobs!)' ) parser.add_argument( '--outputFileValidation', type=argFactory(trfArgClasses.argBool), metavar='BOOL', group='File Validation', help= 'If FALSE skip output file validation (default TRUE; warning - do not use this option in production jobs!)' ) parser.add_argument('--parallelFileValidation', type=argFactory(trfArgClasses.argBool), metavar='BOOL', group='File Validation', help='Parallelise file validation if True')
def add_RDOtoBS_args(parser): import PyJobTransforms.trfArgClasses as trfArgClasses parser.defineArgGroup('RDOtoBS_trf', 'RDO to BS specific options') parser.add_argument('--inputRDOFile', group='RDOtoBS_trf', nargs='+', type=trfArgClasses.argFactory( trfArgClasses.argPOOLFile, io='input', runarg=True, type='rdo'), help='Input RDO file') parser.add_argument('--outputBSFile', group='RDOtoBS_trf', type=trfArgClasses.argFactory(trfArgClasses.argBSFile, io='output', runarg=True, type='bs'), help='Output BS file') parser.add_argument('--triggerConfig', group='RDOtoBS_trf', type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True), help='Trigger config to be used')
def addMyArgs(parser): # Use arggroup to get these arguments in their own sub-section (of --help) parser.defineArgGroup('FilterHit_tf', 'FilterHit_tf specific options') parser.add_argument('--inputHITSFile', '--inputHitsFile', nargs='+', type=trfArgClasses.argFactory( trfArgClasses.argPOOLFile, io='input', runarg=True, type='hits'), help='Input HITS file', group='FilterHit_tf') parser.add_argument('--outputHITS_FILTFile', '--outputHits_FILTFile', '--outputHITSFile', '--outputHitsFile', nargs='+', type=trfArgClasses.argFactory( trfArgClasses.argPOOLFile, io='output', runarg=True, type='hits'), help='Output HITS file', group='FilterHit_tf') parser.add_argument('--TruthReductionScheme', type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True), help='Truth reduction method to be used.', group='FilterHit_tf')
def addPhysValidationMergeFiles(parser): # Better to somehow auto-import this from PhysicsAnalysis/PhysicsValidation/PhysValMonitoring parser.defineArgGroup('Validation Files', 'Physics validation file options') parser.add_argument('--inputNTUP_PHYSVALFile', type=trfArgClasses.argFactory(trfArgClasses.argNTUPFile, io='input'), help='Input physics validation file', group='Validation Files', nargs='+') parser.add_argument('--outputNTUP_PHYSVAL_MRGFile', type=trfArgClasses.argFactory(trfArgClasses.argNTUPFile, io='output'), help='Output merged physics validation file', group='Validation Files')
def addDebugArgs(parser): # Use arggroup to get these arguments in their own sub-section (of --help) parser.defineArgGroup('Debug', 'Specific options related to the trigger debug recovery') parser.add_argument('--debug_stream', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True), help='Runs debug_stream analysis', group='Debug') parser.add_argument('--outputHIST_DEBUGSTREAMMONFile', nargs='+', type=trfArgClasses.argFactory(trfArgClasses.argHISTFile, io='output', runarg=True, countable=False), help='Output DEBUGSTREAMMON file', group='Debug')
def addEI_Basic_tfArgs(parser): parser.add_argument("--eidsname", type=argFactory(argString), help="Overrides input file dataset name (default: read from job environment variable INDS)", group='Event Index') parser.add_argument("--trigger", type=argFactory(argBool), help="Include trigger information (default: true)", group='Event Index') parser.add_argument("--provenance", type=argFactory(argBool), help="Include provenance information (default: true)", group='Event Index') parser.add_argument("--sendtobroker", type=argFactory(argBool), help="Send event index to message broker (default: false)", group='Event Index') parser.add_argument("--testbrk", type=argFactory(argBool), help="Use test message broker (default: false)", group='Event Index') parser.add_argument("--eifmt", type=argFactory(argInt), help="0: produce old PKL format, 1: produce new SPB format, 2: both formats. (default: 0)", group='Event Index') # internal options for T0 jobs parser.add_argument("--_taskid", type=argFactory(argString), help="TaskID (for T0 jobs usage)", group='Event Index') parser.add_argument("--_jobid", type=argFactory(argString), help="JobID (for T0 jobs usage)", group='Event Index') parser.add_argument("--_attempt", type=argFactory(argString), help="Attempt number (for T0 jobs usage)", group='Event Index')
def addMyArgs(parser): # Use arggroup to get these arguments in their own sub-section (of --help) parser.defineArgGroup('ESDMerge_tf', 'ESD merge job specific options') parser.add_argument('--inputESDFile', nargs='+', type=trfArgClasses.argFactory(trfArgClasses.argPOOLFile, io='input'), help='Input ESD file', group='ESDMerge_tf') parser.add_argument('--outputESD_MRGFile', '--outputESDFile', type=trfArgClasses.argFactory(trfArgClasses.argPOOLFile, io='output'), help='Output merged ESD file', group='ESDMerge_tf') parser.add_argument('--fastPoolMerge', type=trfArgClasses.argFactory(trfArgClasses.argBool), help='Hybrid POOL merging switch')
def addRDOValidArgs(parser): parser.defineArgGroup('DigiValid_tf', 'DigiValid_tf specific options') parser.add_argument('--inputRDOFile', nargs='+', type=argFactory(argPOOLFile, io='input'), help='Input RDO files', group='DigiValid_tf') parser.add_argument('--outputHIST_DIGIFile', nargs='+', type=argFactory(argFile, io='output'), help=' Output HIST_DIGI files', group='DigiValid_tf')
def addMyArgs(parser): # Use arggroup to get these arguments in their own sub-section (of --help) parser.defineArgGroup('ExeWrap_trf', 'ExeWrap_trf specific options') parser.add_argument('--exe', type=trfArgClasses.argFactory(trfArgClasses.argString), group='ExeWrap_trf', help='Executable to invoke') parser.add_argument('--args', type=trfArgClasses.argFactory(trfArgClasses.argList), nargs='+', group='ExeWrap_trf', help='Additonal parameters to the executor')
def addHITSValidArgs(parser): parser.defineArgGroup('SimValid_tf', 'SimValid_tf specific options') parser.add_argument('--inputHITSFile', nargs='+', type=argFactory(argPOOLFile, io='input'), help='Input HITS files', group='SimValid_tf') parser.add_argument('--outputHIST_SIMFile', nargs='+', type=argFactory(argFile, io='output'), help=' Output HIST_SIM files', group='SimValid_tf')
def addD3PDArguments(parser, pick=None, transform=None, multipleOK=False, addD3PDMRGtypes=False): parser.defineArgGroup('D3PD NTUPs', 'D3PD File Options') # listAllKnownD3PD is a list of D3PD JobProperty type objects try: from D3PDMakerConfig.D3PDProdFlags import listAllKnownD3PD for dpdWriter in listAllKnownD3PD: dpdName = dpdWriter.StreamName.replace('Stream', '') if pick == None or dpdName in pick: if addD3PDMRGtypes: parser.add_argument( '--input' + dpdName + 'File', type=argFactory(trfArgClasses.argNTUPFile, treeNames=dpdWriter.TreeNames, io='input'), group='D3PD NTUPs', metavar=dpdName.upper(), help='D3PD input {0} file )'.format(dpdName), nargs='+') parser.add_argument( '--output' + dpdName + '_MRGFile', type=argFactory(trfArgClasses.argNTUPFile, treeNames=dpdWriter.TreeNames), group='D3PD NTUPs', metavar=dpdName.upper(), help='D3PD merged output {0} file )'.format(dpdName)) else: parser.add_argument( '--output' + dpdName + 'File', type=argFactory(trfArgClasses.argNTUPFile, treeNames=dpdWriter.TreeNames, multipleOK=multipleOK), group='D3PD NTUPs', metavar=dpdName.upper(), help='D3PD output {0} file (can be made in substeps {1})' .format(dpdName, ','.join(dpdWriter.SubSteps))) # Automatically add D3PDs as data arguments of their relevant executors if transform: for executor in transform.executors: if hasattr( executor, 'substep' ) and executor.substep in dpdWriter.SubSteps: executor.outDataUpdate([dpdName]) if executor.name in dpdWriter.SubSteps: executor.outDataUpdate([dpdName]) except ImportError: msg.warning('D3PDProdFlags not available - cannot add D3PD arguments')
def addCommonSimDigTrfArgs(parser): parser.defineArgGroup('SimDigi', 'Common Simulation Options') parser.add_argument( '--DataRunNumber', type=argFactory(argInt), help='Override existing run number with this value - deprecated?', group='SimDigi') parser.add_argument( '--jobNumber', type=argFactory(argInt), help= 'The number of this job in the current RunDependentSimulation task.', group='SimDigi')
def addOverlayTrfArgs(parser): """Add common overlay command-line parser arguments.""" parser.defineArgGroup('Overlay', 'Common Overlay Options') parser.add_argument('--detectors', nargs='*', type=argFactory(argString), help='Detectors autoconfiguration string', group='Overlay') parser.add_argument('--outputRDO_SGNLFile', nargs='+', type=argFactory(argRDOFile, io='output'), help='The output RDO file of the MC signal alone', group='Overlay')
def addDataOverlayTrfArgs(parser): """Add data overlay command-line parser arguments.""" parser.defineArgGroup('DataOverlay', 'Data Overlay Options') parser.add_argument('--inputBS_SKIMFile', nargs='+', type=argFactory(argBSFile, io='input'), help='Input skimmed RAW BS for pileup overlay', group='DataOverlay') parser.add_argument( '--fSampltag', type=argFactory(argSubstep, defaultSubstep='overlay'), help= 'The cool tag for /LAR/ElecCalib/fSampl/Symmetry, see https://twiki.cern.ch/twiki/bin/viewauth/Atlas/LArCalibMCPoolCool', group='DataOverlay')
def addTriggerDBArgs(parser): # Use arggroup to get these arguments in their own sub-section (of --help) parser.defineArgGroup('TriggerDB', 'Specific options related to the trigger DB') parser.add_argument('--useDB', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True), help='read from DB for athenaHLT aka use-database', group='TriggerDB') parser.add_argument('--DBserver', type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True), help='DB name: aka athenaHLT db-server', group='TriggerDB') parser.add_argument('--DBsmkey', type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True), help='DB SMK', group='TriggerDB') parser.add_argument('--DBhltpskey', type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True), help='DB hltpskey', group='TriggerDB') parser.add_argument('--DBl1pskey', type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True), help='DB l1pskey', group='TriggerDB')
def addTrigSimArguments(parser): parser.defineArgGroup('LegacyTrigger', 'Legacy Trigger Simulation Related Options') # input and output parser.add_argument( '--outputBS_OLDFile', type=argFactory(argBSFile, io='output'), help='Output BS file after transforming to legacy payload', group='LegacyTrigger') parser.add_argument('--inputBS_OLDFile', type=argFactory(argBSFile, io='input'), help='Input BS file with legacy payload', group='LegacyTrigger') parser.add_argument('--outputBS_TRIG_OLDFile', type=argFactory(argBSFile, io='output'), help='Output BS file after legacy trigger simulation', group='LegacyTrigger') parser.add_argument('--inputBS_TRIG_OLDFile', type=argFactory(argBSFile, io='input'), help='Input BS file with legacy trigger data', group='LegacyTrigger') parser.add_argument( '--outputRDO_TRIG_OLDFile', type=argFactory(argPOOLFile, io='output'), help= 'Output RDO file after legacy trigger simulation and merging with RDO', group='LegacyTrigger') parser.add_argument( '--copyJO', group='LegacyTrigger', type=argFactory(argSubstepList, splitter=','), nargs='+', metavar='substep:PATH,PATH,...', help='Specifies one or more JO files to copy from current release, ' 'can be used with steps specifying different release') parser.add_argument( '--outputBSEformatVersion', group='LegacyTrigger', type=argFactory(argSubstep), nargs='+', metavar='substep:VERSION', help= 'Specifies eformat version name for ByteStream output, use "run1" to ' 'produce run1-compatible output.') parser.add_argument( '--outputBSEventStorageVersion', group='LegacyTrigger', type=argFactory(argSubstep), nargs='+', metavar='substep:VERSION', help= 'Specifies EventStorage version name for ByteStream output, use "run1" to ' 'produce run1-compatible output.')
def addMyArgs(parser): # Use arggroup to get these arguments in their own sub-section (of --help) parser.defineArgGroup('TAGMerge_tf', 'TAG merging specific options') parser.add_argument('--inputTAGFile', nargs='+', type=trfArgClasses.argFactory(trfArgClasses.argTAGFile, io='input'), help='Input TAG file(s)', group='TAGMerge_tf') parser.add_argument('--outputTAG_MRGFile', '--outputTAGFile', type=trfArgClasses.argFactory(trfArgClasses.argTAGFile, io='output'), help='Output merged TAG file', group='TAGMerge_tf')