コード例 #1
0
ファイル: debugTool.py プロジェクト: nils-braun/grid-control
# |
# | Unless required by applicable law or agreed to in writing, software
# | distributed under the License is distributed on an "AS IS" BASIS,
# | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# | See the License for the specific language governing permissions and
# | limitations under the License.

import os, sys, logging
from gcSupport import Job, JobSelector, Options, Plugin, getConfig, scriptOptions
from grid_control import utils
from grid_control.datasets import DataProvider, DataSplitter
from python_compat import BytesBuffer, imap, irange, lmap, lzip

parser = Options()
parser.section('back', 'Backend debugging', '%s [<backend specifier>] ...')
parser.addBool('back', '', 'backend-list-nodes',     default=False, help='List backend nodes')
parser.addBool('back', '', 'backend-list-queues',    default=False, help='List backend queues')

parser.section('part', 'Dataset Partition debugging', '%s <path to partition file> ...')
parser.addText('part', '', 'partition-list',         default=None,  help='Select dataset partition information to display')
parser.addBool('part', '', 'partition-list-invalid', default=False, help='List invalidated dataset partitions')
parser.addBool('part', '', 'partition-check',        default=None,  help='Check dataset partition in specified work directory')

parser.section('jobs', 'Jobs debugging', '%s <config file / job file> ... ')
parser.addText('jobs', '', 'job-selector',           default='',    help='Display jobs matching selector')
parser.addBool('jobs', '', 'job-reset-attempts',     default=False, help='Reset the attempt counter')
parser.addText('jobs', '', 'job-force-state',        default='',    help='Force new job state')
parser.addText('jobs', '', 'job-show-jdl',           default='',    help='Show JDL file if available')

parser.section('data', 'Dataset debugging', '%s <dataset file> <dataset file> ...')
parser.addText('data', '', 'dataset-show-diff',      default='',    help='Show difference between datasets')
コード例 #2
0
ファイル: downloadFromSE.py プロジェクト: Fra-nk/grid-control
def parse_cmd_line():
	help_msg = '\n\nDEFAULT: The default is to download the SE file and check them with MD5 hashes.'
	help_msg += '\n * In case all files are transferred sucessfully, the job is marked'
	help_msg += '\n   as already downloaded, so that the files are not copied again.'
	help_msg += '\n * Failed transfer attempts will mark the job as failed, so that it'
	help_msg += '\n   can be resubmitted.'
	parser = Options(usage = '%s [OPTIONS] <config file>' + help_msg)

	def addBoolOpt(group, short_pair, option_base, help_base, default = False,
			option_prefix_pair = ('', 'no'), help_prefix_pair = ('', 'do not '), dest = None):
		def create_opt(idx):
			return str.join('-', option_prefix_pair[idx].split() + option_base.split())
		def create_help(idx):
			help_def = ''
			if (default and (idx == 0)) or ((not default) and (idx == 1)):
				help_def = ' [Default]'
			return help_prefix_pair[idx] + help_base + help_def
		parser.addFlag(group, short_pair, (create_opt(0), create_opt(1)), default = default, dest = dest,
			help_pair = (create_help(0), create_help(1)))

	addBoolOpt(None, 'v ', 'verify-md5',        default = True,  help_base = 'MD5 verification of SE files',
		help_prefix_pair = ('enable ', 'disable '))
	addBoolOpt(None, 'l ', 'loop',              default = False, help_base = 'loop over jobs until all files are successfully processed')
	addBoolOpt(None, 'L ', 'infinite',          default = False, help_base = 'process jobs in an infinite loop')
	addBoolOpt(None, '  ', 'shuffle',           default = False, help_base = 'shuffle download order')
	addBoolOpt(None, '  ', '',                  default = False, help_base = 'files which are already on local disk',
		option_prefix_pair = ('skip-existing', 'overwrite'), help_prefix_pair = ('skip ', 'overwrite '), dest = 'skip_existing')

	parser.section('jobs', 'Job state / flag handling')
	addBoolOpt('jobs', '  ', 'mark-dl',         default = True,  help_base = 'mark sucessfully downloaded jobs as such')
	addBoolOpt('jobs', '  ', 'mark-dl',         default = False, help_base = 'mark about sucessfully downloaded jobs',
		option_prefix_pair = ('ignore', 'use'), help_prefix_pair = ('ignore ', 'use '), dest = 'mark_ignore_dl')
	addBoolOpt('jobs', '  ', 'mark-fail',       default = True,  help_base = 'mark jobs failing verification as such')
	addBoolOpt('jobs', '  ', 'mark-empty-fail', default = False, help_base = 'mark jobs without any files as failed')

	parser.section('file', 'Local / SE file handling')
	for (option, help_base) in [
			('local-ok',   'files of successful jobs in local directory'),
			('local-fail', 'files of failed jobs in local directory'),
			('se-ok',      'files of successful jobs on SE'),
			('se-fail',    'files of failed jobs on the SE'),
		]:
		addBoolOpt('file', '  ', option, default = False, help_base = help_base,
			option_prefix_pair = ('rm', 'keep'), help_prefix_pair = ('remove ', 'keep '))

	parser.addText(None, 'o', 'output',    default = None,
		help = 'specify the local output directory')
	parser.addText(None, 'T', 'token',     default = 'VomsProxy',
		help = 'specify the access token used to determine ability to download - VomsProxy or TrivialAccessToken')
	parser.addList(None, 'S', 'selectSE',  default = None,
		help = 'specify the SE paths to process')
	parser.addText(None, 'r', 'retry',
		help = 'how often should a transfer be attempted [Default: 0]')
	parser.addText(None, 't', 'threads',   default = 0,
		help = 'how many parallel download threads should be used to download files [Default: no multithreading]')
	parser.addText(None, ' ', 'slowdown',  default = 2,
		help = 'specify time between downloads [Default: 2 sec]')
	parser.addBool(None, ' ', 'show-host', default = False,
		help = 'show SE hostname during download')

	parser.section('short', 'Shortcuts')
	parser.addFSet('short', 'm', 'move',        help = 'Move files from SE - shorthand for:'.ljust(100) + '%s',
		flag_set = '--verify-md5 --overwrite --mark-dl --use-mark-dl --mark-fail --rm-se-fail --rm-local-fail --rm-se-ok --keep-local-ok')
	parser.addFSet('short', 'c', 'copy',        help = 'Copy files from SE - shorthand for:'.ljust(100) + '%s',
		flag_set = '--verify-md5 --overwrite --mark-dl --use-mark-dl --mark-fail --rm-se-fail --rm-local-fail --keep-se-ok --keep-local-ok')
	parser.addFSet('short', 'j', 'just-copy',   help = 'Just copy files from SE - shorthand for:'.ljust(100) + '%s',
		flag_set = '--verify-md5 --skip-existing --no-mark-dl --ignore-mark-dl --no-mark-fail --keep-se-fail --keep-local-fail --keep-se-ok --keep-local-ok')
	parser.addFSet('short', 's', 'smart-copy',
		help = 'Copy correct files from SE, but remember already downloaded files and delete corrupt files - shorthand for: '.ljust(100) + '%s',
		flag_set = '--verify-md5 --mark-dl --mark-fail --rm-se-fail --rm-local-fail --keep-se-ok --keep-local-ok')
	parser.addFSet('short', 'V', 'just-verify', help = 'Just verify files on SE - shorthand for:'.ljust(100) + '%s',
		flag_set = '--verify-md5 --no-mark-dl --keep-se-fail --rm-local-fail --keep-se-ok --rm-local-ok --ignore-mark-dl')
	parser.addFSet('short', 'D', 'just-delete', help = 'Just delete all finished files on SE - shorthand for:'.ljust(100) + '%s',
		flag_set = '--skip-existing --rm-se-fail --rm-se-ok --rm-local-fail --keep-local-ok --no-mark-dl --ignore-mark-dl')

	return parser.parse()
コード例 #3
0
ファイル: datasetDBS3Add.py プロジェクト: Fra-nk/grid-control
def setup_parser():
	parser = Options(usage = '%s [OPTIONS] <config file / work directory>')
	parser.section('disc', 'Discovery options - ignored in case dbs input file is specified')
	# options that are used as config settings for InfoScanners
	parser.addText('disc', 'n', 'dataset-name-pattern', default = '',
		help = 'Specify dbs path name - Example: DataSet_@NICK@_@VAR@')
	parser.addText('disc', 'H', 'dataset-hash-keys',    default = '',
		help = 'Included additional variables in dataset hash calculation')
	parser.addText('disc', 'J', 'source-job-selector',  default = '',
		help = 'Specify dataset(s) to process')
	parser.addBool('disc', 'm', 'merge-parents',        default = False,
		help = 'Merge output files from different parent blocks into a single block [Default: Keep boundaries]')
	parser.addText('disc', 'P', 'parent-source',        default = '',
		help = 'Override parent information source - to bootstrap a reprocessing on local files')
	# options directly used by this script
	parser.addText('disc', 'T', 'datatype',             default = None,
		help = 'Supply dataset type in case cmssw report did not specify it - valid values: "mc" or "data"')
	parser.addBool('disc', 'j', 'jobhash',              default = False,
		help = 'Use hash of all config files in job for dataset key calculation')
	parser.addBool('disc', 'u', 'unique-cfg',           default = False,
		help = 'Circumvent edmConfigHash collisions so each dataset is stored with unique config information')
	parser.addText('disc', 'G', 'globaltag',          default = 'crab2_tag',
		help = 'Specify global tag')

	parser.section('proc', 'Processing mode')
	parser.addBool('proc', 'd', 'discovery',            default = False,
		help = 'Enable discovery mode - just collect file information and exit')
	parser.addText('proc', ' ', 'tempdir',              default = '',
		help = 'Override temp directory')
	parser.addBool('proc', 'i', 'no-import',            default = True, dest = 'do_import',
		help = 'Disable import of new datasets into target DBS instance - only temporary json files are created')
	parser.addBool('proc', 'I', 'incremental',          default = False,
		help = 'Skip import of existing files - Warning: this destroys coherent block structure!')
	parser.addBool('proc', 'o', 'open-blocks',          default = True, dest = 'do_close_blocks',
		help = 'Keep blocks open for addition of further files [Default: Close blocks]')
#	parser.addBool('proc', 'b', 'batch',                default = False,
#		help = 'Enable non-interactive batch mode [Default: Interactive mode]')

	parser.section('dbsi', 'DBS instance handling')
	parser.addText('dbsi', 't', 'target-instance',      default = 'https://cmsweb.cern.ch/dbs/prod/phys03',
		help = 'Specify target dbs instance url')
	parser.addText('dbsi', 's', 'source-instance',      default = 'https://cmsweb.cern.ch/dbs/prod/global',
		help = 'Specify source dbs instance url(s), where parent datasets are taken from')

	parser.addText(None, 'F', 'input-file',         default = None,
		help = 'Specify dbs input file to use instead of scanning job output')
	parser.addBool(None, 'c', 'continue-migration', default = False,
		help = 'Continue an already started migration')
#	parser.addText(None, 'D', 'display-dataset',    default = None,
#		help = 'Display information associated with dataset key(s) (accepts "all")')
#	parser.addText(None, 'C', 'display-config',     default = None,
#		help = 'Display information associated with config hash(es) (accepts "all")')
#	parser.addText(None, 'k', 'dataset-key-select', default = '',
#		help = 'Specify dataset keys to process')

	return scriptOptions(parser)
コード例 #4
0
def setup_parser():
    parser = Options(usage='%s [OPTIONS] <config file / work directory>')
    parser.section(
        'disc',
        'Discovery options - ignored in case dbs input file is specified')
    # options that are used as config settings for InfoScanners
    parser.addText(
        'disc',
        'n',
        'dataset-name-pattern',
        default='',
        help='Specify dbs path name - Example: DataSet_@NICK@_@VAR@')
    parser.addText(
        'disc',
        'H',
        'dataset-hash-keys',
        default='',
        help='Included additional variables in dataset hash calculation')
    parser.addText('disc',
                   'J',
                   'source-job-selector',
                   default='',
                   help='Specify dataset(s) to process')
    parser.addBool(
        'disc',
        'm',
        'merge-parents',
        default=False,
        help=
        'Merge output files from different parent blocks into a single block [Default: Keep boundaries]'
    )
    parser.addText(
        'disc',
        'P',
        'parent-source',
        default='',
        help=
        'Override parent information source - to bootstrap a reprocessing on local files'
    )
    # options directly used by this script
    parser.addText(
        'disc',
        'T',
        'datatype',
        default=None,
        help=
        'Supply dataset type in case cmssw report did not specify it - valid values: "mc" or "data"'
    )
    parser.addBool(
        'disc',
        'j',
        'jobhash',
        default=False,
        help='Use hash of all config files in job for dataset key calculation')
    parser.addBool(
        'disc',
        'u',
        'unique-cfg',
        default=False,
        help=
        'Circumvent edmConfigHash collisions so each dataset is stored with unique config information'
    )
    parser.addText('disc',
                   'G',
                   'globaltag',
                   default='crab2_tag',
                   help='Specify global tag')

    parser.section('proc', 'Processing mode')
    parser.addBool(
        'proc',
        'd',
        'discovery',
        default=False,
        help='Enable discovery mode - just collect file information and exit')
    parser.addText('proc',
                   ' ',
                   'tempdir',
                   default='',
                   help='Override temp directory')
    parser.addBool(
        'proc',
        'i',
        'no-import',
        default=True,
        dest='do_import',
        help=
        'Disable import of new datasets into target DBS instance - only temporary json files are created'
    )
    parser.addBool(
        'proc',
        'I',
        'incremental',
        default=False,
        help=
        'Skip import of existing files - Warning: this destroys coherent block structure!'
    )
    parser.addBool(
        'proc',
        'o',
        'open-blocks',
        default=True,
        dest='do_close_blocks',
        help=
        'Keep blocks open for addition of further files [Default: Close blocks]'
    )
    #	parser.addBool('proc', 'b', 'batch',                default = False,
    #		help = 'Enable non-interactive batch mode [Default: Interactive mode]')

    parser.section('dbsi', 'DBS instance handling')
    parser.addText('dbsi',
                   't',
                   'target-instance',
                   default='https://cmsweb.cern.ch/dbs/prod/phys03',
                   help='Specify target dbs instance url')
    parser.addText(
        'dbsi',
        's',
        'source-instance',
        default='https://cmsweb.cern.ch/dbs/prod/global',
        help=
        'Specify source dbs instance url(s), where parent datasets are taken from'
    )

    parser.addText(
        None,
        'F',
        'input-file',
        default=None,
        help='Specify dbs input file to use instead of scanning job output')
    parser.addBool(None,
                   'c',
                   'continue-migration',
                   default=False,
                   help='Continue an already started migration')
    #	parser.addText(None, 'D', 'display-dataset',    default = None,
    #		help = 'Display information associated with dataset key(s) (accepts "all")')
    #	parser.addText(None, 'C', 'display-config',     default = None,
    #		help = 'Display information associated with config hash(es) (accepts "all")')
    #	parser.addText(None, 'k', 'dataset-key-select', default = '',
    #		help = 'Specify dataset keys to process')

    return scriptOptions(parser)
コード例 #5
0
# | limitations under the License.

import sys
from datasetListFromX import addDatasetListOptions, discoverDataset
from gcSupport import Options, scriptOptions, utils

parser = Options(usage = '%s [OPTIONS] <config file / work directory>')
parser.addText(None, 'J', 'job-selector', dest = 'external job selector', default = '',
	help = 'Specify which jobs to process')
parser.addText(None, 'i', 'info-scanner',
	help = 'Specify which info scanner to run')
parser.addText(None, 'm', 'event-mode',   dest = 'mode',                  default = 'CMSSW-Out',
	help = 'Specify how to determine events - available: [CMSSW-Out], CMSSW-In, DataMod')
parser.addText(None, 'l', 'lfn',          dest = 'lfn marker',            default = '/store/',
	help = 'Assume everything starting with marker to be a logical file name')
parser.addBool(None, 'c', 'config',       dest = 'include config infos',  default = False,
	help = 'CMSSW specific: Add configuration data to metadata')
parser.addBool(None, 'p', 'parents',      dest = 'include parent infos',  default = False,
	help = 'CMSSW specific: Add parent infos to metadata')
addDatasetListOptions(parser)
options = scriptOptions(parser, arg_keys = ['dataset'])

# Positional parameters override options
if len(options.args) == 0:
	utils.exitWithUsage(parser.usage())
tmp = {'cmssw-out': 'CMSSW_EVENTS_WRITE', 'cmssw-in': 'CMSSW_EVENTS_READ', 'datamod': 'MAX_EVENTS'}
if options.opts.info_scanner:
	options.config_dict['scanner'] = options.opts.info_scanner.replace(',', ' ')
options.config_dict['events key'] = tmp.get(options.config_dict['mode'].lower(), '')
sys.exit(discoverDataset('GCProvider', options.config_dict))
コード例 #6
0
ファイル: parameterList.py プロジェクト: whahmad/grid-control
import os, sys, random
from gcSupport import Options, getConfig, scriptOptions, utils
from grid_control.datasets import DataSplitter
from grid_control.parameters import ParameterAdapter, ParameterInfo, ParameterMetadata, ParameterSource
from python_compat import ifilter, imap, izip, lfilter, lmap, md5_hex, set, sorted

random.seed(0)

parser = Options(usage='%s [OPTIONS] <parameter definition>')
parser.addAccu(None,
               'c',
               'collapse',
               default=0,
               help='Do not collapse dataset infos in display')
parser.addBool(None, 'a', 'active', default=False, help='Show activity state')
parser.addBool(None,
               'd',
               'disabled',
               default=False,
               help='Show disabled parameter sets')
parser.addBool(None,
               'f',
               'force-intervention',
               default=False,
               help='Simulate dataset intervention')
parser.addBool(None,
               'I',
               'intervention',
               default=False,
               help='Display intervention tasks')
コード例 #7
0
# |
# | Unless required by applicable law or agreed to in writing, software
# | distributed under the License is distributed on an "AS IS" BASIS,
# | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# | See the License for the specific language governing permissions and
# | limitations under the License.

import os, sys
from gcSupport import ClassSelector, FileInfoProcessor, JobClass, Options, getCMSSWInfo, initGC, scriptOptions, utils
from grid_control.datasets import DataSplitter
from grid_control_cms.lumi_tools import formatLumi, mergeLumi, parseLumiFilter
from python_compat import imap, irange, lmap, set, sorted

parser = Options()
parser.section('expr', 'Manipulate lumi filter expressions', '%s <lumi filter expression>')
parser.addBool('expr', 'G', 'gc',            default = False, help = 'Output grid-control compatible lumi expression')
parser.addBool('expr', 'J', 'json',          default = False, help = 'Output JSON file with lumi expression')
parser.addBool('expr', 'F', 'full',          default = False, help = 'Output JSON file with full expression')

parser.section('calc', 'Options which allow luminosity related calculations', '%s <config file>')
parser.addText('calc', 'O', 'output-dir',    default = None,  help = 'Set output directory (default: work directory)')
parser.addBool('calc', 'g', 'job-gc',        default = False, help = 'Output grid-control compatible lumi expression for processed lumi sections')
parser.addBool('calc', 'j', 'job-json',      default = False, help = 'Output JSON file with processed lumi sections')
parser.addBool('calc', 'e', 'job-events',    default = False, help = 'Get number of events processed')
parser.addBool('calc', 'p', 'parameterized', default = False, help = 'Use output file name to categorize output (useful for parameterized tasks)')
parser.addBool('calc', ' ', 'replace',   default = 'job_%d_', help = 'Pattern to replace for parameterized jobs (default: job_%%d_')
options = scriptOptions(parser)

def outputGC(lumis, stream = sys.stdout):
	stream.write('%s\n' % utils.wrapList(formatLumi(lumis), 60, ',\n'))
コード例 #8
0
ファイル: report.py プロジェクト: nils-braun/grid-control
# |
# |     http://www.apache.org/licenses/LICENSE-2.0
# |
# | Unless required by applicable law or agreed to in writing, software
# | distributed under the License is distributed on an "AS IS" BASIS,
# | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# | See the License for the specific language governing permissions and
# | limitations under the License.

import sys
from gcSupport import Activity, JobSelector, Options, Plugin, displayPluginList, getConfig, getPluginList, scriptOptions, utils

parser = Options(usage='%s [OPTIONS] <config file>')
parser.addBool(None,
               'L',
               'report-list',
               default=False,
               help='List available report classes')
parser.addBool(None,
               'T',
               'use-task',
               default=False,
               help='Forward task information to report')
parser.addText(None, 'R', 'report', default='GUIReport')
parser.addText(None, 'J', 'job-selector', default=None)
parser.addText(None, ' ', 'string', default='')
options = scriptOptions(parser)

Report = Plugin.getClass('Report')

if options.opts.report_list:
コード例 #9
0
ファイル: report.py プロジェクト: Fra-nk/grid-control
# | you may not use this file except in compliance with the License.
# | You may obtain a copy of the License at
# |
# |     http://www.apache.org/licenses/LICENSE-2.0
# |
# | Unless required by applicable law or agreed to in writing, software
# | distributed under the License is distributed on an "AS IS" BASIS,
# | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# | See the License for the specific language governing permissions and
# | limitations under the License.

import sys
from gcSupport import Activity, JobSelector, Options, Plugin, displayPluginList, getConfig, getPluginList, scriptOptions, utils

parser = Options(usage = '%s [OPTIONS] <config file>')
parser.addBool(None, 'L', 'report-list',  default = False, help = 'List available report classes')
parser.addBool(None, 'T', 'use-task',     default = False, help = 'Forward task information to report')
parser.addText(None, 'R', 'report',       default = 'GUIReport')
parser.addText(None, 'J', 'job-selector', default = None)
parser.addText(None, ' ', 'string',       default = '')
options = scriptOptions(parser)

Report = Plugin.getClass('Report')

if options.opts.report_list:
	sys.stderr.write('Available report classes:\n')
	displayPluginList(getPluginList('Report'))

if len(options.args) != 1:
	utils.exitWithUsage(parser.usage())
コード例 #10
0
# | Unless required by applicable law or agreed to in writing, software
# | distributed under the License is distributed on an "AS IS" BASIS,
# | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# | See the License for the specific language governing permissions and
# | limitations under the License.

import sys
from datasetListFromX import addDatasetListOptions, discoverDataset
from gcSupport import Options, scriptOptions, utils

parser = Options(usage = '%s [OPTIONS] <config file / work directory>')
parser.addText(None, 'J', 'job-selector', dest = 'external job selector', default = '',
	help = 'Specify which jobs to process')
parser.addText(None, 'm', 'event-mode',   dest = 'mode',                  default = 'CMSSW-Out',
	help = 'Specify how to determine events - available: [CMSSW-Out], CMSSW-In, DataMod')
parser.addText(None, 'l', 'lfn',          dest = 'lfn marker',            default = '/store/',
	help = 'Assume everything starting with marker to be a logical file name')
parser.addBool(None, 'c', 'config',       dest = 'include config infos',  default = False,
	help = 'CMSSW specific: Add configuration data to metadata')
parser.addBool(None, 'p', 'parents',      dest = 'include parent infos',  default = False,
	help = 'CMSSW specific: Add parent infos to metadata')
addDatasetListOptions(parser)
options = scriptOptions(parser, arg_keys = ['dataset'])

# Positional parameters override options
if len(options.args) == 0:
	utils.exitWithUsage(parser.usage())
tmp = {'cmssw-out': 'CMSSW_EVENTS_WRITE', 'cmssw-in': 'CMSSW_EVENTS_READ', 'datamod': 'MAX_EVENTS'}
options.config_dict['events key'] = tmp.get(options.config_dict['mode'].lower(), '')
sys.exit(discoverDataset('GCProvider', options.config_dict))
コード例 #11
0
ファイル: lumiInfo.py プロジェクト: whahmad/grid-control
# | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# | See the License for the specific language governing permissions and
# | limitations under the License.

import os, sys
from gcSupport import ClassSelector, FileInfoProcessor, JobClass, Options, getCMSSWInfo, initGC, scriptOptions, utils
from grid_control.datasets import DataSplitter
from grid_control_cms.lumi_tools import formatLumi, mergeLumi, parseLumiFilter
from python_compat import imap, irange, lmap, set, sorted

parser = Options()
parser.section('expr', 'Manipulate lumi filter expressions',
               '%s <lumi filter expression>')
parser.addBool('expr',
               'G',
               'gc',
               default=False,
               help='Output grid-control compatible lumi expression')
parser.addBool('expr',
               'J',
               'json',
               default=False,
               help='Output JSON file with lumi expression')
parser.addBool('expr',
               'F',
               'full',
               default=False,
               help='Output JSON file with full expression')

parser.section('calc', 'Options which allow luminosity related calculations',
               '%s <config file>')
コード例 #12
0
# | distributed under the License is distributed on an "AS IS" BASIS,
# | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# | See the License for the specific language governing permissions and
# | limitations under the License.

import os, sys, random
from gcSupport import Options, getConfig, scriptOptions, utils
from grid_control.datasets import DataSplitter
from grid_control.parameters import ParameterInfo, ParameterMetadata, ParameterSource
from python_compat import ifilter, imap, irange, izip, lfilter, lmap, md5_hex, set, sorted

random.seed(0)

parser = Options(usage = '%s [OPTIONS] <parameter definition>')
parser.addAccu(None, 'c', 'collapse',           default = 0,     help = 'Do not collapse dataset infos in display')
parser.addBool(None, 'a', 'active',             default = False, help = 'Show activity state')
parser.addBool(None, 'd', 'disabled',           default = False, help = 'Show disabled parameter sets')
parser.addBool(None, 'f', 'force-intervention', default = False, help = 'Simulate dataset intervention')
parser.addBool(None, 'I', 'intervention',       default = False, help = 'Display intervention tasks')
parser.addBool(None, 'l', 'list-parameters',    default = False, help = 'Display parameter list')
parser.addBool(None, 'L', 'show-sources',       default = False, help = 'Show parameter sources')
parser.addBool(None, 's', 'static',             default = False, help = 'Assume a static parameterset')
parser.addBool(None, 't', 'untracked',          default = False, help = 'Display untracked variables')
parser.addBool(None, 'T', 'persistent',         default = False, help = 'Work with persistent paramters')
parser.addList(None, 'p', 'parameter',          default = [],    help = 'Specify parameters')
parser.addText(None, 'D', 'dataset',            default = '',    help = 'Add dataset splitting (use "True" to simulate a dataset)')
parser.addText(None, 'M', 'manager',            default = None,  help = 'Select parameter source manager')
parser.addText(None, 'o', 'output',             default = '',    help = 'Show only specified parameters')
parser.addText(None, 'S', 'save',               default = '',    help = 'Saves information to specified file')
parser.addText(None, 'V', 'visible',            default = '',    help = 'Set visible variables')
options = scriptOptions(parser)
コード例 #13
0
def parse_cmd_line():
    help_msg = '\n\nDEFAULT: The default is to download the SE file and check them with MD5 hashes.'
    help_msg += '\n * In case all files are transferred sucessfully, the job is marked'
    help_msg += '\n   as already downloaded, so that the files are not copied again.'
    help_msg += '\n * Failed transfer attempts will mark the job as failed, so that it'
    help_msg += '\n   can be resubmitted.'
    parser = Options(usage='%s [OPTIONS] <config file>' + help_msg)

    def addBoolOpt(group,
                   short_pair,
                   option_base,
                   help_base,
                   default=False,
                   option_prefix_pair=('', 'no'),
                   help_prefix_pair=('', 'do not '),
                   dest=None):
        def create_opt(idx):
            return str.join(
                '-', option_prefix_pair[idx].split() + option_base.split())

        def create_help(idx):
            help_def = ''
            if (default and (idx == 0)) or ((not default) and (idx == 1)):
                help_def = ' [Default]'
            return help_prefix_pair[idx] + help_base + help_def

        parser.addFlag(group,
                       short_pair, (create_opt(0), create_opt(1)),
                       default=default,
                       dest=dest,
                       help_pair=(create_help(0), create_help(1)))

    addBoolOpt(None,
               'v ',
               'verify-md5',
               default=True,
               help_base='MD5 verification of SE files',
               help_prefix_pair=('enable ', 'disable '))
    addBoolOpt(
        None,
        'l ',
        'loop',
        default=False,
        help_base='loop over jobs until all files are successfully processed')
    addBoolOpt(None,
               'L ',
               'infinite',
               default=False,
               help_base='process jobs in an infinite loop')
    addBoolOpt(None,
               '  ',
               'shuffle',
               default=False,
               help_base='shuffle download order')
    addBoolOpt(None,
               '  ',
               '',
               default=False,
               help_base='files which are already on local disk',
               option_prefix_pair=('skip-existing', 'overwrite'),
               help_prefix_pair=('skip ', 'overwrite '),
               dest='skip_existing')

    parser.section('jobs', 'Job state / flag handling')
    addBoolOpt('jobs',
               '  ',
               'mark-dl',
               default=True,
               help_base='mark sucessfully downloaded jobs as such')
    addBoolOpt('jobs',
               '  ',
               'mark-dl',
               default=False,
               help_base='mark about sucessfully downloaded jobs',
               option_prefix_pair=('ignore', 'use'),
               help_prefix_pair=('ignore ', 'use '),
               dest='mark_ignore_dl')
    addBoolOpt('jobs',
               '  ',
               'mark-fail',
               default=True,
               help_base='mark jobs failing verification as such')
    addBoolOpt('jobs',
               '  ',
               'mark-empty-fail',
               default=False,
               help_base='mark jobs without any files as failed')

    parser.section('file', 'Local / SE file handling')
    for (option, help_base) in [
        ('local-ok', 'files of successful jobs in local directory'),
        ('local-fail', 'files of failed jobs in local directory'),
        ('se-ok', 'files of successful jobs on SE'),
        ('se-fail', 'files of failed jobs on the SE'),
    ]:
        addBoolOpt('file',
                   '  ',
                   option,
                   default=False,
                   help_base=help_base,
                   option_prefix_pair=('rm', 'keep'),
                   help_prefix_pair=('remove ', 'keep '))

    parser.addText(None,
                   'o',
                   'output',
                   default=None,
                   help='specify the local output directory')
    parser.addText(
        None,
        'T',
        'token',
        default='VomsProxy',
        help=
        'specify the access token used to determine ability to download - VomsProxy or TrivialAccessToken'
    )
    parser.addList(None,
                   'S',
                   'selectSE',
                   default=None,
                   help='specify the SE paths to process')
    parser.addText(
        None,
        'r',
        'retry',
        help='how often should a transfer be attempted [Default: 0]')
    parser.addText(
        None,
        't',
        'threads',
        default=0,
        help=
        'how many parallel download threads should be used to download files [Default: no multithreading]'
    )
    parser.addText(None,
                   ' ',
                   'slowdown',
                   default=2,
                   help='specify time between downloads [Default: 2 sec]')
    parser.addBool(None,
                   ' ',
                   'show-host',
                   default=False,
                   help='show SE hostname during download')

    parser.section('short', 'Shortcuts')
    parser.addFSet(
        'short',
        'm',
        'move',
        help='Move files from SE - shorthand for:'.ljust(100) + '%s',
        flag_set=
        '--verify-md5 --overwrite --mark-dl --use-mark-dl --mark-fail --rm-se-fail --rm-local-fail --rm-se-ok --keep-local-ok'
    )
    parser.addFSet(
        'short',
        'c',
        'copy',
        help='Copy files from SE - shorthand for:'.ljust(100) + '%s',
        flag_set=
        '--verify-md5 --overwrite --mark-dl --use-mark-dl --mark-fail --rm-se-fail --rm-local-fail --keep-se-ok --keep-local-ok'
    )
    parser.addFSet(
        'short',
        'j',
        'just-copy',
        help='Just copy files from SE - shorthand for:'.ljust(100) + '%s',
        flag_set=
        '--verify-md5 --skip-existing --no-mark-dl --ignore-mark-dl --no-mark-fail --keep-se-fail --keep-local-fail --keep-se-ok --keep-local-ok'
    )
    parser.addFSet(
        'short',
        's',
        'smart-copy',
        help=
        'Copy correct files from SE, but remember already downloaded files and delete corrupt files - shorthand for: '
        .ljust(100) + '%s',
        flag_set=
        '--verify-md5 --mark-dl --mark-fail --rm-se-fail --rm-local-fail --keep-se-ok --keep-local-ok'
    )
    parser.addFSet(
        'short',
        'V',
        'just-verify',
        help='Just verify files on SE - shorthand for:'.ljust(100) + '%s',
        flag_set=
        '--verify-md5 --no-mark-dl --keep-se-fail --rm-local-fail --keep-se-ok --rm-local-ok --ignore-mark-dl'
    )
    parser.addFSet(
        'short',
        'D',
        'just-delete',
        help='Just delete all finished files on SE - shorthand for:'.ljust(100)
        + '%s',
        flag_set=
        '--skip-existing --rm-se-fail --rm-se-ok --rm-local-fail --keep-local-ok --no-mark-dl --ignore-mark-dl'
    )

    return parser.parse()
コード例 #14
0
# |
# | Unless required by applicable law or agreed to in writing, software
# | distributed under the License is distributed on an "AS IS" BASIS,
# | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# | See the License for the specific language governing permissions and
# | limitations under the License.

import os, sys
from gcSupport import Options, getConfig, scriptOptions, utils
from grid_control.datasets import DataProvider, DatasetError
from grid_control.utils import thread_tools
from python_compat import imap, itemgetter, izip, lmap, lzip, set, sort_inplace, sorted

usage = '%s [OPTIONS] <DBS dataset path> | <dataset cache file>' % sys.argv[0]
parser = Options(usage)
parser.addBool(None, 'l', 'list-datasets',  default = False, help = 'Show list of all datasets in query / file')
parser.addBool(None, 'b', 'list-blocks',    default = False, help = 'Show list of blocks of the dataset(s)')
parser.addBool(None, 'f', 'list-files',     default = False, help = 'Show list of all files grouped according to blocks')
parser.addBool(None, 's', 'list-storage',   default = False, help = 'Show list of locations where data is stored')
parser.addBool(None, 'm', 'metadata',       default = False, help = 'Get metadata infomation of dataset files')
parser.addBool(None, 'M', 'block-metadata', default = False, help = 'Get common metadata infomation of dataset blocks')
parser.addBool(None, 'O', 'ordered',        default = False, help = 'Sort dataset blocks and files')
parser.addText(None, 'p', 'provider',       default = '',    help = 'Default dataset provider')
parser.addText(None, 'C', 'settings',       default = '',    help = 'Specify config file as source of detailed dataset settings')
parser.addText(None, 'S', 'save',           default = '',    help = 'Saves dataset information to specified file')
parser.addBool(None, 'i', 'info',           default = False, help = 'Gives machine readable info of given dataset(s)')
parser.addBool(None, 'c', 'config-entry',   default = False, help = 'Gives config file entries to run over given dataset(s)')
parser.addBool(None, 'n', 'config-nick',    default = False, help = 'Use dataset path to derive nickname in case it it undefined')
parser.addText(None, 'L', 'location',  default = 'hostname', help = 'Format of location information')
options = scriptOptions(parser)