Example #1
0
def main():
    parser = optparse.OptionParser(usage="usage: %prog [options] arguments")
    groupRun = optparse.OptionGroup(parser, "Run mode Options")
    groupRun.add_option("-p", "--prepare", action="store_true", help="Prepare television tasks (default)", default=True)
    groupRun.add_option("-m", "--merge", action="store_true", help="Merge output files", default=False)
    parser.add_option_group(groupRun)

    groupCommon = optparse.OptionGroup(parser, "Common Options")
    parser.add_option_group(groupCommon)

    groupMerge = optparse.OptionGroup(parser, "Merge Options","Specify the television directories as arguments.")
    parser.add_option_group(groupMerge)

    groupPrepare = optparse.OptionGroup(parser, "Prepare Options","Specify one config file as argument")
    groupPrepare.add_option("-d", "--directory", action="store", help="Main television directory", default="./")
    groupPrepare.add_option("--section", action="append", help="Only prepare the following section. May be specified multiple times for multiple sections. If not specified, all sections are prepared.", default=[])
    groupPrepare.add_option("--test", action="store_true", help="Run one task per section with one small job only.", default=False)
    groupPrepare.add_option("--prepareConfigs", default = 'None',
                            choices=['None','MUSiC'],
                            help="Create config Files on the fly for given config style")
    #groupPrepare.add_option("--local", action="store_true", help="Run the tasks on local computer.", default=False)
    #groupPrepare.add_option("--testlocal", action="store_true", help="Run only one task with one small job locally.", default=False)
    groupPrepare.add_option("-s", "--skipcheck", action="store_true", help="Skip check if grid pack is outdated.", default=False)
    parser.add_option_group(groupPrepare)
    (options, args) = parser.parse_args()
    gridFunctions.checkAndRenewVomsProxy()
    if options.merge:
        merge(options, args)
    elif options.prepare:
        prepare(options, args)
Example #2
0
def commandline_parsing():
    parser = optparse.OptionParser( description='Watchfrog helps you to care for your jobs',  usage='usage: %prog [options]' )
    parser.add_option( '-o', '--only', metavar='PATTERNS', default=None,
                       help='Only check samples matching PATTERNS (bash-like ' \
                            'patterns only, comma separated values. ' \
                            'E.g. --only QCD* ). [default: %default]' )
    parser.add_option( '-u','--user', help='Alternative username [default is HN-username]')
    parser.add_option( '--workingArea',metavar='DIR',help='The area (full or relative path) where the CRAB project directories are saved. ' \
                     'Defaults to the current working directory.'       )
    parser.add_option( '--updateInterval', default=600,help='Time between two updates for crab tasks in seconds.')
    parser.add_option( '--nCores', default=multiprocessing.cpu_count(),help='Number of cores to use [default: %default]')



    #~ parsingController = crabFunctions.CrabController(logger = mylogger)
    parsingController = crabFunctions.CrabController()
    # we need to add the parser options from other modules
    #get crab command line options
    parsingController.commandlineOptions(parser)

    (options, args ) = parser.parse_args()
    now = datetime.datetime.now()
    isodatetime = now.strftime( "%Y-%m-%d_%H.%M.%S" )
    options.isodatetime = isodatetime

    if options.workingArea:
        options.workingArea = os.path.abspath(options.workingArea)
    else:
        options.workingArea = os.path.abspath(os.getcwd())

    options.runServer = True
    # get pass before starting

    options.dblink = createDBlink()
    # check if user has valid proxy
    import gridFunctions
    import getpass
    proxytime = gridFunctions.checkVomsProxy()
    if not proxytime:
        passphrase = getpass.getpass('Please enter your GRID pass phrase:')
        gridFunctions.checkAndRenewVomsProxy( passphrase = passphrase)




    #get current user HNname
    if not options.user:
        options.user = parsingController.checkusername()

    return (options, args )
Example #3
0
def commandline_parsing( parsingController ):
    ##parse user input
    ####################################
    # The following options
    # were already present in muic_crab
    ####################################
    skimmer_dir = os.path.join( os.environ[ 'CMSSW_BASE' ], 'src/PxlSkimmer/Skimming' )
    lumi_dir    = os.path.join( skimmer_dir, 'test/lumi' )
    config_dir  = os.path.join( skimmer_dir, 'test/configs' )
    parser = optparse.OptionParser( description='Submit MUSiCSkimmer jobs for all samples listed in DATASET_FILE',  usage='usage: %prog [options] DATASET_FILE' )
    music_crabOpts = optparse.OptionGroup(parser, "Options for music_crab3")

    music_crabOpts.add_option( '-c', '--config', metavar='FILE', help='Use FILE as CMSSW config file, instead of the one declared in DATASET_FILE.\n Correspond to crab3 JobType.psetName' )
    music_crabOpts.add_option( '--ana-dir', metavar='ANADIR', default=skimmer_dir,
                       help='Directory containing the analysis. If set, ANADIR is used '\
                            'as the base directory for CONFDIR and LUMIDIR. [default: '\
                            '%default]' )
    music_crabOpts.add_option( '--config-dir', metavar='CONFDIR', default=config_dir,
                       help='Directory containing CMSSW configs. Overwrites input from '\
                            'ANADIR. [default: %default]' )
    music_crabOpts.add_option( '--lumi-dir', metavar='LUMIDIR', default=lumi_dir,
                       help='Directory containing luminosity-masks. Overwrites input '\
                            'from ANADIR. [default: %default]' )
    music_crabOpts.add_option( '-o', '--only', metavar='PATTERNS', default=None,
                       help='Only submit samples matching PATTERNS (bash-like ' \
                            'patterns only, comma separated values. ' \
                            'E.g. --only QCD* ). [default: %default]' )
    music_crabOpts.add_option( '-S', '--submit', action='store_true', default=False,
                       help='Force the submission of jobs, even if a CRAB task with the given process name already exists. [default: %default]' )
    music_crabOpts.add_option( '--dry-run', action='store_true', default=False, help='Do everything except calling CRAB or registering samples to the database.' )
    music_crabOpts.add_option( '--debug', metavar='LEVEL', default='INFO', choices=log_choices,
                       help='Set the debug level. Allowed values: ' + ', '.join( log_choices ) + ' [default: %default]' )
    #~ music_crabOpts.add_option( '--noTag', action='store_true', default=False,
    music_crabOpts.add_option( '--noTag', action='store_true', default=False,help="Do not create a tag in the skimmer repository. [default: %default]" )
    music_crabOpts.add_option( '--overrideTag', default="noTag",help="Same as noTag but with custom string replacement for the tag name. [default: %default]" )

    music_crabOpts.add_option( '-D', '--db', action='store_true', default=False,
                       help="Register all datasets at the database: 'https://cern.ch/aix3adb/'. [default: %default]" )
    #///////////////////////////////
    #// new options since crab3
    #//////////////////////////////

    # new feature alternative username
    music_crabOpts.add_option( '-u', '--user', metavar='USERNAME', help='Alternative username [default: HN-username]' )
    music_crabOpts.add_option( '-g','--globalTag', help='Override globalTag from pset')
    music_crabOpts.add_option( '--resubmit',action='store_true', default=False, help='Try to resubmit jobs instead of submit')
    music_crabOpts.add_option( '--force',action='store_true', default=False, help='Delete existing crab folder and resubmit tasks')
    music_crabOpts.add_option( '--notInDB',action='store_true', default=False, help='Only submit samples if not in aix3aDB')
    parser.add_option_group(music_crabOpts)
    ###########################################
    # new  options for General section in pset
    ##########################################
    generalOpts = optparse.OptionGroup(parser, "\n SECTION General - Options for crab3 config section General ")
    generalOpts.add_option( '--workingArea',metavar='DIR',default=os.getcwd(),help='The area (full or relative path) where to create the CRAB project directory. '
                             'If the area doesn\'t exist, CRAB will try to create it using the mkdir command' \
                             ' (without -p option). Defaults to the current working directory.'       )
    generalOpts.add_option( '-t', '--transferOutputs', action='store_true',default=True,help="Whether to transfer the output to the storage site"
                                                    'or leave it at the runtime site. (Not transferring the output might'\
                                                    ' be useful for example to avoid filling up the storage area with'\
                                                    ' useless files when the user is just doing some test.) ' )
    generalOpts.add_option( '--nolog', action='store_true',default=False,help='Whether or not to copy the cmsRun stdout /'\
                                                    'stderr to the storage site. If set to False, the last 1 MB'\
                                                    ' of each job are still available through the monitoring in '\
                                                    'the job logs files and the full logs can be retrieved from the runtime site with')
    generalOpts.add_option( '--failureLimit', help='The number of jobs that may fail permanently before the entire task is cancelled. '\
                                                'Defaults to 10% of the jobs in the task. ')
    parser.add_option_group( generalOpts )
    ########################################
    # new options for JobType in pset
    ########################################
    jobTypeOpts = optparse.OptionGroup(parser, "\n SECTION JobType - Options for crab3 config section JobType ")
    jobTypeOpts.add_option('--pyCfgParams',default =None, help="List of parameters to pass to the CMSSW parameter-set configuration file, as explained here. For example, if set to "\
    "[\'myOption\',\'param1=value1\',\'param2=value2\'], then the jobs will execute cmsRun JobType.psetName myOption param1=value1 param2=value2. ")
    jobTypeOpts.add_option('--inputFiles',help='List of private input files needed by the jobs. ')
    jobTypeOpts.add_option('--outputFiles',help='List of output files that need to be collected, besides those already specified in the output'\
                                                ' modules or TFileService of the CMSSW parameter-set configuration file.  ')
    jobTypeOpts.add_option( '--allowUndistributedCMSSW', action='store_true', default=False,
                       help='Allow using a CMSSW release potentially not available at sites. [default: %default]' )
    jobTypeOpts.add_option('--maxmemory',help=' Maximum amount of memory (in MB) a job is allowed to use. ')
    jobTypeOpts.add_option('--maxJobRuntimeMin',help="Overwrite the maxJobRuntimeMin if present in samplefile [default: 72] (set by crab)" )
    jobTypeOpts.add_option('--numcores', help="Number of requested cores per job. [default: 1]" )
    jobTypeOpts.add_option('--priority', help='Task priority among the user\'s own tasks. Higher priority tasks will be processed before lower priority.'\
                                                    ' Two tasks of equal priority will have their jobs start in an undefined order. The first five jobs in a'\
                                                    ' task are given a priority boost of 10. [default  10] ' )
    jobTypeOpts.add_option('-n','--name', default="PxlSkim" ,
                      help="Name for this analysis run (E.g. Skim Campaign Name) [default: %default]")
    jobTypeOpts.add_option('--publish',default = False,help="Switch to turn on publication of a processed sample [default: %default]")
    parser.add_option_group( jobTypeOpts )

    ####################################
    # new options for Data in pset
    ####################################
    dataOpts = optparse.OptionGroup(parser, "\n SECTION Data - Options for crab3 config section Data")
    dataOpts.add_option('--eventsPerJob',default=10000,help="Number of Events per Job for MC [default: %default]")
    dataOpts.add_option( '-d', '--inputDBS', metavar='inputDBS',default='global', help='Set DBS instance URL to use (e.g. for privately produced samples published in a local DBS).' )
    parser.add_option_group( dataOpts )

    ####################################
    # new options for Site in pset
    ####################################
    siteOpts = optparse.OptionGroup(parser, "\n SECTION Site - Options for crab3 config section Site ")
    siteOpts.add_option( '--outLFNDirBase', metavar='OUTLFNDIRBASE', default=None,
                       help="Set dCache directory for crab output to '/store/user/USERNAME/"\
                            "OUTLFNDIRBASE'. [default: 'store/user/USERNAME/PxlSkim/git-tag/']" )
    siteOpts.add_option( '-w', '--whitelist', metavar='SITES', help="Whitelist SITES in a comma separated list, e.g. 'T2_DE_RWTH,T2_US_Purdue'." )
    siteOpts.add_option( '-b', '--blacklist', metavar='SITES', help='Blacklist SITES in addition to T0,T1 separated by comma, e.g. T2_DE_RWTH,T2_US_Purdue  ' )
    siteOpts.add_option('--unitsPerJob',default="20",help="Suggests (but not impose) how many units (i.e. files, luminosity sections or events [1] -depending on the splitting mode-) to include in each job.  [default: %default]")
    siteOpts.add_option('--ignoreLocality',action='store_true',default=False,help="Set to True to allow jobs to run at any site,"
                                                        "regardless of whether the dataset is located at that site or not. "\
                                                        "Remote file access is done using Xrootd. The parameters Site.whitelist"\
                                                        " and Site.blacklist are still respected. This parameter is useful to allow "\
                                                        "jobs to run on other sites when for example a dataset is available on only one "\
                                                        "or a few sites which are very busy with jobs. It is strongly recommended "\
                                                        "to provide a whitelist of sites physically close to the input dataset's host "\
                                                        "site. This helps reduce file access latency. [default: %default]" )
    parser.add_option_group( siteOpts )

    # we need to add the parser options from other modules
    #get crab command line options
    parsingController.commandlineOptions(parser)

    (options, args ) = parser.parse_args()
    now = datetime.datetime.now()
    isodatetime = now.strftime( "%Y-%m-%d_%H.%M.%S" )
    options.isodatetime = isodatetime

    # check if user has valid proxy
    import gridFunctions
    gridFunctions.checkAndRenewVomsProxy()

    #get current user HNname
    if not options.user:
        options.user = parsingController.checkusername()

    # Set CONFDIR and LUMIDIR relative to ANADIR if ANADIR is set
    # but the other two are not.
    if not options.ana_dir == skimmer_dir:
        # ANADIR was set (it is not at its default value).
        if options.lumi_dir == lumi_dir:
            # LUMIDIR was not set (it is at its default value).
            options.lumi_dir = os.path.join( options.ana_dir, 'test/lumi' )
        if options.config_dir == config_dir:
            # CONFDIR was not set (it is at its default value).
            options.config_dir = os.path.join( options.ana_dir, 'test/configs' )

    return (options, args )