def __init__(self, config, name): super(CMSSWLight, self).__init__(config, name) self._script_fpi = Result(path_rel='gc-run.cmssw-light.sh', path_abs=get_path_share('gc-run.cmssw-light.sh', pkg='grid_control_cms')) self._update_map_error_code2msg( get_path_share('gc-run.cmssw-light.sh', pkg='grid_control_cms'))
def get_file_list(self): yield get_path_share('mon.dashboard.sh', pkg='grid_control_cms') for fn in ('DashboardAPI.py', 'Logger.py', 'apmon.py', 'report.py'): yield get_path_share('..', 'DashboardAPI', fn, pkg='grid_control_cms')
def _se_runcmd(cmd, *urls, **kwargs): def _clean_se_path(url): return url.replace('dir://', 'file://') url_iter = imap(_clean_se_path, imap(_norm_se_path, urls)) return LocalProcess(get_path_share('gc-storage-tool'), cmd, *url_iter, **kwargs)
def _get_sandbox_file_list(self, task, sm_list): # Prepare all input files dep_list = set(ichain(imap(lambda x: x.get_dependency_list(), [task] + sm_list))) dep_fn_list = lmap(lambda dep: resolve_path('env.%s.sh' % dep, lmap(lambda pkg: get_path_share('', pkg=pkg), os.listdir(get_path_pkg()))), dep_list) task_config_dict = dict_union(self._remote_event_handler.get_mon_env_dict(), *imap(lambda x: x.get_task_dict(), [task] + sm_list)) task_config_dict.update({'GC_DEPFILES': str.join(' ', dep_list), 'GC_USERNAME': self._token.get_user_name(), 'GC_WMS_NAME': self._name}) task_config_str_list = DictFormat(escape_strings=True).format( task_config_dict, format='export %s%s%s\n') vn_alias_dict = dict(izip(self._remote_event_handler.get_mon_env_dict().keys(), self._remote_event_handler.get_mon_env_dict().keys())) vn_alias_dict.update(task.get_var_alias_map()) vn_alias_str_list = DictFormat(delimeter=' ').format(vn_alias_dict, format='%s%s%s\n') # Resolve wildcards in task input files def _get_task_fn_list(): for fpi in task.get_sb_in_fpi_list(): matched = glob.glob(fpi.path_abs) if matched != []: for match in matched: yield match else: yield fpi.path_abs return lchain([self._remote_event_handler.get_file_list(), dep_fn_list, _get_task_fn_list(), [ VirtualFile('_config.sh', sorted(task_config_str_list)), VirtualFile('_varmap.dat', sorted(vn_alias_str_list))]])
def __init__(self, config, name): # Determine ROOT path from previous settings / environment / config file def _check_root_dn(loc, obj): if os.path.isdir(obj): return obj raise ConfigError( 'Either set environment variable "ROOTSYS" or set option "root path"!' ) self._root_dn = config.get_dn('root path', os.environ.get('ROOTSYS', ''), persistent=True, on_change=TriggerInit('sandbox'), on_valid=_check_root_dn) logging.getLogger('task').info('Using the following ROOT path: %s', self._root_dn) # Special handling for executables bundled with ROOT self._executable = config.get('executable', on_change=TriggerInit('sandbox')) exe_full = os.path.join(self._root_dn, 'bin', self._executable.lstrip('/')) self._is_builtin = os.path.exists(exe_full) if self._is_builtin: config.set('send executable', 'False') # store resolved built-in executable path? # Apply default handling from UserTask UserTask.__init__(self, config, name) self._update_map_error_code2msg(get_path_share('gc-run.root.sh')) # TODO: Collect lib files needed by executable self._lib_fn_list = []
def _get_sandbox_file_list(self, task, sm_list): # Prepare all input files dep_list = set(ichain(imap(lambda x: x.get_dependency_list(), [task] + sm_list))) dep_fn_list = lmap(lambda dep: resolve_path('env.%s.sh' % dep, lmap(lambda pkg: get_path_share('', pkg=pkg), os.listdir(get_path_pkg()))), dep_list) task_config_dict = dict_union(self._remote_event_handler.get_mon_env_dict(), *imap(lambda x: x.get_task_dict(), [task] + sm_list)) task_config_dict.update({'GC_DEPFILES': str.join(' ', dep_list), 'GC_USERNAME': self._token.get_user_name(), 'GC_WMS_NAME': self._name}) task_config_str_list = DictFormat(escape_strings=True).format( task_config_dict, format='export %s%s%s\n') vn_alias_dict = dict(izip(self._remote_event_handler.get_mon_env_dict().keys(), self._remote_event_handler.get_mon_env_dict().keys())) vn_alias_dict.update(task.get_var_alias_map()) vn_alias_str_list = DictFormat(delimeter=' ').format(vn_alias_dict, format='%s%s%s\n') # Resolve wildcards in task input files def _get_task_fn_list(): for fpi in task.get_sb_in_fpi_list(): matched = glob.glob(fpi.path_abs) if matched != []: for match in matched: yield match else: yield fpi.path_abs return lchain([self._remote_event_handler.get_file_list(), dep_fn_list, _get_task_fn_list(), [ VirtualFile('_config.sh', sorted(task_config_str_list)), VirtualFile('_varmap.dat', sorted(vn_alias_str_list))]])
def _parse_cmd_line(cmd_line_args): # grid-control command line parser parser = Options(usage='%s [OPTIONS] <config file>', add_help_option=False) parser.add_bool(None, ' ', 'debug', default=False) parser.add_bool(None, ' ', 'help-conf', default=False) parser.add_bool(None, ' ', 'help-confmin', default=False) parser.add_bool(None, 'c', 'continuous', default=False) parser.add_bool(None, 'h', 'help', default=False) parser.add_bool(None, 'i', 'init', default=False) parser.add_bool(None, 'q', 'resync', default=False) parser.add_bool(None, 's', 'no-submission', default=True, dest='submission') parser.add_bool(None, 'G', 'gui', default=False, dest='gui_ansi') parser.add_accu(None, 'v', 'verbose') parser.add_list(None, 'l', 'logging') parser.add_list(None, 'o', 'override') parser.add_text(None, 'a', 'action') parser.add_text(None, 'd', 'delete') parser.add_text(None, 'C', 'cancel') parser.add_text(None, 'J', 'job-selector') parser.add_text(None, 'n', 'jobs') parser.add_text(None, 'm', 'max-retry') parser.add_text(None, ' ', 'reset') parser.add_bool(None, ' ', 'debug-console', False) # undocumented debug option parser.add_list(None, ' ', 'debug-trace') # undocumented debug option # Deprecated options - refer to new report script instead for (sopt, lopt) in [('-r', 'report'), ('-R', 'site-report'), ('-T', 'time-report'), ('-M', 'task-report'), ('-D', 'detail-report'), ('', 'help-vars')]: parser.add_bool(None, sopt, lopt, default=False, dest='old_report') (opts, args, _) = parser.parse(args=cmd_line_args) opts.gui = None if opts.gui_ansi: opts.gui = 'ANSIGUI' opts.continuous = opts.continuous or None # either True or None # Display help if opts.help: parser.exit_with_usage(msg=SafeFile( get_path_share('help.txt')).read_close(), show_help=False) # Require single config file argument if len(args) == 0: parser.exit_with_usage(msg='Config file not specified!') elif len(args) > 1: parser.exit_with_usage(msg='Invalid command line arguments: %r' % cmd_line_args) # Warn about deprecated report options if opts.old_report: deprecated( 'Please use the more versatile report tool in the scripts directory!' ) # Configure preliminary logging logging.getLogger().setLevel(max(1, logging.DEFAULT - opts.verbose)) return (opts, args)
def __init__(self, config, name): LocalWMS.__init__(self, config, name, submit_exec=get_path_share('gc-host.sh'), check_executor=CheckJobsMissingState( config, HostCheckJobs(config)), cancel_executor=HostCancelJobs(config))
def _get_submit_proc(self, jobnum, sandbox, job_name, reqs): (stdout, stderr) = (os.path.join(sandbox, 'gc.stdout'), os.path.join(sandbox, 'gc.stderr')) submit_args = list(self._submit_opt_list) submit_args.extend(shlex.split(self._get_submit_arguments(jobnum, job_name, reqs, sandbox, stdout, stderr))) submit_args.append(get_path_share('gc-local.sh')) submit_args.extend(shlex.split(self._get_job_arguments(jobnum, sandbox))) return LocalProcess(self._submit_exec, *submit_args)
def __init__(self, config, name, check_executor, cancel_executor): WMS.__init__(self, config, name) for executor in [check_executor, cancel_executor]: executor.setup(self._log) (self._check_executor, self._cancel_executor) = (check_executor, cancel_executor) if self._name != self.__class__.__name__.upper(): self._log.info('Using batch system: %s (%s)', self.__class__.__name__, self._name) else: self._log.info('Using batch system: %s', self._name) self._runlib = config.get_work_path('gc-run.lib') fp = SafeFile(self._runlib, 'w') content = SafeFile(get_path_share('gc-run.lib')).read() fp.write( content.replace('__GC_VERSION__', __import__('grid_control').__version__)) fp.close() self._path_output = config.get_work_path('output') self._path_file_cache = config.get_work_path('files') ensure_dir_exists(self._path_output, 'output directory') self._path_fail = config.get_work_path('fail') # Initialise access token and storage managers # UI -> SE -> WN self._sm_se_in = config.get_plugin('se input manager', 'SEStorageManager', cls=StorageManager, bind_kwargs={'tags': [self]}, pargs=('se', 'se input', 'SE_INPUT')) self._sm_sb_in = config.get_plugin('sb input manager', 'LocalSBStorageManager', cls=StorageManager, bind_kwargs={'tags': [self]}, pargs=('sandbox', 'sandbox', 'SB_INPUT')) # UI <- SE <- WN self._sm_se_out = config.get_plugin('se output manager', 'SEStorageManager', cls=StorageManager, bind_kwargs={'tags': [self]}, pargs=('se', 'se output', 'SE_OUTPUT')) self._sm_sb_out = None self._token = config.get_composited_plugin(['proxy', 'access token'], 'TrivialAccessToken', 'MultiAccessToken', cls=AccessToken, bind_kwargs={ 'inherit': True, 'tags': [self] }) self._output_fn_list = None
def _get_submit_proc(self, jobnum, sandbox, job_name, reqs): (stdout, stderr) = (os.path.join(sandbox, 'gc.stdout'), os.path.join(sandbox, 'gc.stderr')) submit_args = list(self._submit_opt_list) submit_args.extend( shlex.split( self._get_submit_arguments(jobnum, job_name, reqs, sandbox, stdout, stderr))) submit_args.append(get_path_share('gc-local.sh')) submit_args.extend( shlex.split(self._get_job_arguments(jobnum, sandbox))) return LocalProcess(self._submit_exec, *submit_args)
def _parse_cmd_line(cmd_line_args): # grid-control command line parser parser = Options(usage='%s [OPTIONS] <config file>', add_help_option=False) parser.add_bool(None, ' ', 'debug', default=False) parser.add_bool(None, ' ', 'help-conf', default=False) parser.add_bool(None, ' ', 'help-confmin', default=False) parser.add_bool(None, 'c', 'continuous', default=False) parser.add_bool(None, 'h', 'help', default=False) parser.add_bool(None, 'i', 'init', default=False) parser.add_bool(None, 'q', 'resync', default=False) parser.add_bool(None, 's', 'no-submission', default=True, dest='submission') parser.add_bool(None, 'G', 'gui', default=False, dest='gui_ansi') parser.add_accu(None, 'v', 'verbose') parser.add_list(None, 'l', 'logging') parser.add_list(None, 'o', 'override') parser.add_text(None, 'a', 'action') parser.add_text(None, 'd', 'delete') parser.add_text(None, 'C', 'cancel') parser.add_text(None, 'J', 'job-selector') parser.add_text(None, 'n', 'jobs') parser.add_text(None, 'm', 'max-retry') parser.add_text(None, ' ', 'reset') parser.add_bool(None, ' ', 'debug-console', False) # undocumented debug option parser.add_list(None, ' ', 'debug-trace') # undocumented debug option # Deprecated options - refer to new report script instead for (sopt, lopt) in [('-r', 'report'), ('-R', 'site-report'), ('-T', 'time-report'), ('-M', 'task-report'), ('-D', 'detail-report'), ('', 'help-vars')]: parser.add_bool(None, sopt, lopt, default=False, dest='old_report') (opts, args, _) = parser.parse(args=cmd_line_args) opts.gui = None if opts.gui_ansi: opts.gui = 'ANSIGUI' opts.continuous = opts.continuous or None # either True or None # Display help if opts.help: parser.exit_with_usage(msg=SafeFile(get_path_share('help.txt')).read_close(), show_help=False) # Require single config file argument if len(args) == 0: parser.exit_with_usage(msg='Config file not specified!') elif len(args) > 1: parser.exit_with_usage(msg='Invalid command line arguments: %r' % cmd_line_args) # Warn about deprecated report options if opts.old_report: deprecated('Please use the more versatile report tool in the scripts directory!') # Configure preliminary logging logging.getLogger().setLevel(max(1, logging.DEFAULT - opts.verbose)) return (opts, args)
def _match_sites(self, endpoint): activity = Activity('Discovering available WMS services - testing %s' % endpoint) check_arg_list = ['-a'] if endpoint: check_arg_list.extend(['-e', endpoint]) check_arg_list.append(get_path_share('null.jdl')) proc = LocalProcess(self._job_list_match_exec, *check_arg_list) result = [] for line in proc.stdout.iter(timeout=3): if line.startswith(' - '): result.append(line[3:].strip()) activity.finish() if proc.status(timeout=0) is None: self._wms_timeout_dict[endpoint] = self._wms_timeout_dict.get(endpoint, 0) + 1 if self._wms_timeout_dict.get(endpoint, 0) > 10: # remove endpoints after 10 failures self._wms_list_all.remove(endpoint) return [] return result
def _get_script_and_fn_list(self, task): # resolve file paths for different pool types # handle gc executable separately (script_cmd, sb_in_fn_list) = ('', []) if self._remote_type in (PoolType.SSH, PoolType.GSISSH): for target in imap(lambda d_s_t: d_s_t[2], self._get_in_transfer_info_list(task)): if 'gc-run.sh' in target: script_cmd = os.path.join(self._get_remote_output_dn(), target) else: sb_in_fn_list.append(os.path.join(self._get_remote_output_dn(), target)) else: for source in imap(lambda d_s_t: d_s_t[1], self._get_in_transfer_info_list(task)): if 'gc-run.sh' in source: script_cmd = source else: sb_in_fn_list.append(source) if self._universe.lower() == 'docker': script_cmd = './gc-run.sh' sb_in_fn_list.append(get_path_share('gc-run.sh')) return (script_cmd, sb_in_fn_list)
def gc_run(args=None, intro=True): # display the 'grid-control' logo and version if intro and not os.environ.get('GC_DISABLE_INTRO'): sys.stdout.write(SafeFile(get_path_share('logo.txt'), 'r').read_close()) sys.stdout.write('Revision: %s\n' % get_version()) pyver = (sys.version_info[0], sys.version_info[1]) if pyver < (2, 3): deprecated('This python version (%d.%d) is not supported anymore!' % pyver) atexit.register(lambda: sys.stdout.write('\n')) # main try... except block to catch exceptions and show error message try: return _gc_run(args) except SystemExit: # avoid getting caught for Python < 2.5 abort(True) raise except Exception: # coverage overrides sys.excepthook abort(True) gc_excepthook(*sys.exc_info()) sys.exit(os.EX_SOFTWARE)
def gc_run(args=None, intro=True): # display the 'grid-control' logo and version if intro and not os.environ.get('GC_DISABLE_INTRO'): sys.stdout.write( SafeFile(get_path_share('logo.txt'), 'r').read_close()) sys.stdout.write('Revision: %s\n' % get_version()) pyver = (sys.version_info[0], sys.version_info[1]) if pyver < (2, 3): deprecated('This python version (%d.%d) is not supported anymore!' % pyver) atexit.register(lambda: sys.stdout.write('\n')) # main try... except block to catch exceptions and show error message try: return _gc_run(args) except SystemExit: # avoid getting caught for Python < 2.5 abort(True) raise except Exception: # coverage overrides sys.excepthook abort(True) gc_excepthook(*sys.exc_info()) sys.exit(os.EX_SOFTWARE)
def _match_sites(self, endpoint): activity = Activity('Discovering available WMS services - testing %s' % endpoint) check_arg_list = ['-a'] if endpoint: check_arg_list.extend(['-e', endpoint]) check_arg_list.append(get_path_share('null.jdl')) proc = LocalProcess(self._job_list_match_exec, *check_arg_list) result = [] for line in proc.stdout.iter(timeout=3): if line.startswith(' - '): result.append(line[3:].strip()) activity.finish() if proc.status(timeout=0) is None: self._wms_timeout_dict[endpoint] = self._wms_timeout_dict.get( endpoint, 0) + 1 if self._wms_timeout_dict.get( endpoint, 0) > 10: # remove endpoints after 10 failures self._wms_list_all.remove(endpoint) return [] return result
def __init__(self, config, name, check_executor, cancel_executor): WMS.__init__(self, config, name) for executor in [check_executor, cancel_executor]: executor.setup(self._log) (self._check_executor, self._cancel_executor) = (check_executor, cancel_executor) if self._name != self.__class__.__name__.upper(): self._log.info('Using batch system: %s (%s)', self.__class__.__name__, self._name) else: self._log.info('Using batch system: %s', self._name) self._runlib = config.get_work_path('gc-run.lib') fp = SafeFile(self._runlib, 'w') content = SafeFile(get_path_share('gc-run.lib')).read() fp.write(content.replace('__GC_VERSION__', __import__('grid_control').__version__)) fp.close() self._path_output = config.get_work_path('output') self._path_file_cache = config.get_work_path('files') ensure_dir_exists(self._path_output, 'output directory') self._path_fail = config.get_work_path('fail') # Initialise access token and storage managers # UI -> SE -> WN self._sm_se_in = config.get_plugin('se input manager', 'SEStorageManager', cls=StorageManager, bind_kwargs={'tags': [self]}, pargs=('se', 'se input', 'SE_INPUT')) self._sm_sb_in = config.get_plugin('sb input manager', 'LocalSBStorageManager', cls=StorageManager, bind_kwargs={'tags': [self]}, pargs=('sandbox', 'sandbox', 'SB_INPUT')) # UI <- SE <- WN self._sm_se_out = config.get_plugin('se output manager', 'SEStorageManager', cls=StorageManager, bind_kwargs={'tags': [self]}, pargs=('se', 'se output', 'SE_OUTPUT')) self._sm_sb_out = None self._token = config.get_composited_plugin(['proxy', 'access token'], 'TrivialAccessToken', 'MultiAccessToken', cls=AccessToken, bind_kwargs={'inherit': True, 'tags': [self]}) self._output_fn_list = None
def __init__(self, config, name): # Determine ROOT path from previous settings / environment / config file def _check_root_dn(loc, obj): if os.path.isdir(obj): return obj raise ConfigError('Either set environment variable "ROOTSYS" or set option "root path"!') self._root_dn = config.get_dn('root path', os.environ.get('ROOTSYS', ''), persistent=True, on_change=TriggerInit('sandbox'), on_valid=_check_root_dn) logging.getLogger('task').info('Using the following ROOT path: %s', self._root_dn) # Special handling for executables bundled with ROOT self._executable = config.get('executable', on_change=TriggerInit('sandbox')) exe_full = os.path.join(self._root_dn, 'bin', self._executable.lstrip('/')) self._is_builtin = os.path.exists(exe_full) if self._is_builtin: config.set('send executable', 'False') # store resolved built-in executable path? # Apply default handling from UserTask UserTask.__init__(self, config, name) self._update_map_error_code2msg(get_path_share('gc-run.root.sh')) # TODO: Collect lib files needed by executable self._lib_fn_list = []
def __init__(self, config, name): config.set('se input timeout', '0:30') config.set('application', 'cmsRun', section='dashboard') config.set('dataset provider', 'DBS3Provider') config.set('dataset splitter', 'EventBoundarySplitter') config.set('dataset processor', 'LumiDataProcessor', '+=') config.set('partition processor', 'BasicPartitionProcessor', '-=') config.set('partition processor', 'LFNPartitionProcessor LumiPartitionProcessor CMSSWPartitionProcessor', '+=') self._needed_vn_set = set() SCRAMTask.__init__(self, config, name) self._uii = UserInputInterface() # Setup file path informations self._cmsrun_output_files = ['cmssw.dbs.tar.gz'] if self._do_gzip_std_output: self._cmsrun_output_files.append('cmssw.log.gz') self._script_fpi = Result(path_rel='gc-run.cmssw.sh', path_abs=get_path_share('gc-run.cmssw.sh', pkg='grid_control_cms')) if self._scram_project != 'CMSSW': raise ConfigError('Project area contains no CMSSW project') self._old_release_top = None if self._project_area: scram_arch_env_path = os.path.join(self._project_area, '.SCRAM', self._scram_arch, 'Environment') self._old_release_top = self._parse_scram_file(scram_arch_env_path).get('RELEASETOP') self._update_map_error_code2msg( get_path_share('gc-run.cmssw.sh', pkg='grid_control_cms')) self._project_area_tarball_on_se = config.get_bool(['se runtime', 'se project area'], True) self._project_area_tarball = config.get_work_path('cmssw-project-area.tar.gz') # Prolog / Epilog script support - warn about old syntax self.prolog = TaskExecutableWrapper(config, 'prolog', '') self.epilog = TaskExecutableWrapper(config, 'epilog', '') if config.get_fn_list('executable', []) != []: raise ConfigError('Prefix executable and argument options with either prolog or epilog!') self.arguments = config.get('arguments', '') # Get cmssw config files and check their existance # Check that for dataset jobs the necessary placeholders are in the config file if not self._has_dataset: self._events_per_job = config.get('events per job', '0') # this can be a variable like @USER_EVENTS@! self._needed_vn_set.add('MAX_EVENTS') fragment = config.get_fn('instrumentation fragment', get_path_share('fragmentForCMSSW.py', pkg='grid_control_cms')) self._config_fn_list = self._process_config_file_list(config, config.get_fn_list('config file', self._get_config_file_default()), fragment, auto_prepare=config.get_bool('instrumentation', True), must_prepare=self._has_dataset) # Create project area tarball if self._project_area and not os.path.exists(self._project_area_tarball): config.set_state(True, 'init', detail='sandbox') # Information about search order for software environment self._cmssw_search_dict = self._get_cmssw_path_list(config) if config.get_state('init', detail='sandbox'): msg = 'CMSSW tarball already exists! Do you want to regenerate it?' if os.path.exists(self._project_area_tarball) and not self._uii.prompt_bool(msg, True): return # Generate CMSSW tarball if self._project_area: create_tarball(_match_files(self._project_area, self._project_area_matcher, self._always_matcher.create_matcher(''), self._project_area_base_fn), name=self._project_area_tarball) if self._project_area_tarball_on_se: config.set_state(True, 'init', detail='storage')
def _get_in_transfer_info_list(self, task): return [ ('GC Runtime', get_path_share('gc-run.sh'), 'gc-run.sh'), ('GC Runtime library', self._runlib, 'gc-run.lib'), ('GC Sandbox', self._get_sandbox_name(task), 'gc-sandbox.tar.gz'), ]
def get_script(self): yield get_path_share('mon.dashboard.sh', pkg='grid_control_cms')
def __init__(self, config, name): config.set('se input timeout', '0:30') config.set('application', 'cmsRun', section='dashboard') config.set('dataset provider', 'DBS3Provider') config.set('dataset splitter', 'EventBoundarySplitter') config.set('dataset processor', 'LumiDataProcessor', '+=') config.set('partition processor', 'BasicPartitionProcessor', '-=') config.set( 'partition processor', 'LFNPartitionProcessor LumiPartitionProcessor CMSSWPartitionProcessor', '+=') self._needed_vn_set = set() SCRAMTask.__init__(self, config, name) self._uii = UserInputInterface() # Setup file path informations self._cmsrun_output_files = ['cmssw.dbs.tar.gz'] if self._do_gzip_std_output: self._cmsrun_output_files.append('cmssw.log.gz') self._script_fpi = Result(path_rel='gc-run.cmssw.sh', path_abs=get_path_share( 'gc-run.cmssw.sh', pkg='grid_control_cms')) if self._scram_project != 'CMSSW': raise ConfigError('Project area contains no CMSSW project') self._old_release_top = None if self._project_area: scram_arch_env_path = os.path.join(self._project_area, '.SCRAM', self._scram_arch, 'Environment') self._old_release_top = self._parse_scram_file( scram_arch_env_path).get('RELEASETOP') self._update_map_error_code2msg( get_path_share('gc-run.cmssw.sh', pkg='grid_control_cms')) self._project_area_tarball_on_se = config.get_bool( ['se runtime', 'se project area'], True) self._project_area_tarball = config.get_work_path( 'cmssw-project-area.tar.gz') # Prolog / Epilog script support - warn about old syntax self.prolog = TaskExecutableWrapper(config, 'prolog', '') self.epilog = TaskExecutableWrapper(config, 'epilog', '') if config.get_fn_list('executable', []) != []: raise ConfigError( 'Prefix executable and argument options with either prolog or epilog!' ) self.arguments = config.get('arguments', '') # Get cmssw config files and check their existance # Check that for dataset jobs the necessary placeholders are in the config file if not self._has_dataset: self._events_per_job = config.get('events per job', '0') # this can be a variable like @USER_EVENTS@! self._needed_vn_set.add('MAX_EVENTS') fragment = config.get_fn( 'instrumentation fragment', get_path_share('fragmentForCMSSW.py', pkg='grid_control_cms')) self._config_fn_list = self._process_config_file_list( config, config.get_fn_list('config file', self._get_config_file_default()), fragment, auto_prepare=config.get_bool('instrumentation', True), must_prepare=self._has_dataset) # Create project area tarball if self._project_area and not os.path.exists( self._project_area_tarball): config.set_state(True, 'init', detail='sandbox') # Information about search order for software environment self._cmssw_search_dict = self._get_cmssw_path_list(config) if config.get_state('init', detail='sandbox'): msg = 'CMSSW tarball already exists! Do you want to regenerate it?' if os.path.exists( self._project_area_tarball) and not self._uii.prompt_bool( msg, True): return # Generate CMSSW tarball if self._project_area: create_tarball(_match_files( self._project_area, self._project_area_matcher, self._always_matcher.create_matcher(''), self._project_area_base_fn), name=self._project_area_tarball) if self._project_area_tarball_on_se: config.set_state(True, 'init', detail='storage')
def get_file_list(self): yield get_path_share('mon.dashboard.sh', pkg='grid_control_cms') for fn in ('DashboardAPI.py', 'Logger.py', 'apmon.py', 'report.py'): yield get_path_share('..', 'DashboardAPI', fn, pkg='grid_control_cms')
def get_sb_in_fpi_list(self): return UserTask.get_sb_in_fpi_list(self) + self._lib_fn_list + [ Result(path_abs=get_path_share('gc-run.root.sh'), path_rel='gc-run.root.sh')]
def __init__(self, config, name): # Read configuration options and init vars NamedPlugin.__init__(self, config, name) init_sandbox = TriggerInit('sandbox') self._var_checker = NoVarCheck(config) # Task requirements # Move this into parameter manager? jobs_config = config.change_view(view_class='TaggedConfigView', add_sections=['jobs'], add_tags=[self]) self.wall_time = jobs_config.get_time('wall time', on_change=None) self._cpu_time = jobs_config.get_time('cpu time', self.wall_time, on_change=None) self._cores = jobs_config.get_int(['cores', 'cpus'], 1, on_change=None) self._memory = jobs_config.get_int('memory', -1, on_change=None) self._job_timeout = jobs_config.get_time('node timeout', -1, on_change=init_sandbox) # Compute / get task ID self._task_id = config.get('task id', 'GC' + md5_hex(str(time.time()))[:12], persistent=True) self._task_date = config.get('task date', time.strftime('%Y-%m-%d'), persistent=True, on_change=init_sandbox) self._task_time = config.get('task time', time.strftime('%H%M%S'), persistent=True, on_change=init_sandbox) task_name_generator = config.get_plugin('task name generator', 'DefaultTaskName', cls=TaskNamePlugin) self._task_name = task_name_generator.get_name(self) self._task_config_name = config.get_config_name() self._job_name_generator = config.get_plugin('job name generator', 'DefaultJobName', cls=JobNamePlugin) # Storage setup storage_config = config.change_view(view_class='TaggedConfigView', set_classes=None, set_names=None, add_sections=['storage'], add_tags=[self]) scratch_space_used = storage_config.get_int('scratch space used', 5000, on_change=init_sandbox) lz_space_used = storage_config.get_int('landing zone space used', 100, on_change=init_sandbox) self._task_var_dict = { # Space limits 'SCRATCH_UL': scratch_space_used, 'SCRATCH_LL': storage_config.get_int('scratch space left', 1, on_change=init_sandbox), 'LANDINGZONE_UL': lz_space_used, 'LANDINGZONE_LL': storage_config.get_int('landing zone space left', 1, on_change=init_sandbox), } storage_config.set('se output pattern', 'job_@GC_JOB_ID@_@X@') self._se_min_size = storage_config.get_int('se min size', -1, on_change=init_sandbox) self._disk_min = max(scratch_space_used, lz_space_used) self._sb_in_fn_list = config.get_path_list('input files', [], on_change=init_sandbox) self._sb_out_fn_list = config.get_list('output files', [], on_change=init_sandbox) self._do_gzip_std_output = config.get_bool('gzip output', True, on_change=init_sandbox) self._subst_files = config.get_list('subst files', [], on_change=init_sandbox) self._dependencies = lmap(str.lower, config.get_list('depends', [], on_change=init_sandbox)) # Get error messages from gc-run.lib comments self.map_error_code2msg = {} self._update_map_error_code2msg(get_path_share('gc-run.lib')) # Init parameter source manager psrc_repository = {} self._setup_repository(config, psrc_repository) pfactory = config.get_plugin('internal parameter factory', 'BasicParameterFactory', cls=ParameterFactory, bind_kwargs={'tags': [self], 'inherit': True}) self._source = config.get_plugin('parameter adapter', 'TrackedParameterAdapter', cls=ParameterAdapter, pargs=(pfactory.get_psrc(psrc_repository),)) self._log.log(logging.DEBUG3, 'Using parameter adapter %s', repr(self._source)) self._log.info('Current task ID: %s', self._task_id) self._log.info('Task started on: %s', self._task_date)
def _get_in_transfer_info_list(self, task): return [ ('GC Runtime', get_path_share('gc-run.sh'), 'gc-run.sh'), ('GC Runtime library', self._runlib, 'gc-run.lib'), ('GC Sandbox', self._get_sandbox_name(task), 'gc-sandbox.tar.gz'), ]
def get_sb_in_fpi_list(self): return UserTask.get_sb_in_fpi_list(self) + self._lib_fn_list + [ Result(path_abs=get_path_share('gc-run.root.sh'), path_rel='gc-run.root.sh')]
def get_script(self): yield get_path_share('mon.dashboard.sh', pkg='grid_control_cms')
def __init__(self, config, name): # Read configuration options and init vars NamedPlugin.__init__(self, config, name) init_sandbox = TriggerInit('sandbox') self._var_checker = NoVarCheck(config) # Task requirements # Move this into parameter manager? jobs_config = config.change_view(view_class='TaggedConfigView', add_sections=['jobs'], add_tags=[self]) self.wall_time = jobs_config.get_time('wall time', on_change=None) self._cpu_time = jobs_config.get_time('cpu time', self.wall_time, on_change=None) self._cores = jobs_config.get_int(['cores', 'cpus'], 1, on_change=None) self._memory = jobs_config.get_int('memory', -1, on_change=None) self._job_timeout = jobs_config.get_time('node timeout', -1, on_change=init_sandbox) # Compute / get task ID self._task_id = config.get('task id', 'GC' + md5_hex(str(time.time()))[:12], persistent=True) self._task_date = config.get('task date', time.strftime('%Y-%m-%d'), persistent=True, on_change=init_sandbox) self._task_time = config.get('task time', time.strftime('%H%M%S'), persistent=True, on_change=init_sandbox) task_name_generator = config.get_plugin('task name generator', 'DefaultTaskName', cls=TaskNamePlugin) self._task_name = task_name_generator.get_name(self) self._task_config_name = config.get_config_name() self._job_name_generator = config.get_plugin('job name generator', 'DefaultJobName', cls=JobNamePlugin) # Storage setup storage_config = config.change_view(view_class='TaggedConfigView', set_classes=None, set_names=None, add_sections=['storage'], add_tags=[self]) scratch_space_used = storage_config.get_int('scratch space used', 5000, on_change=init_sandbox) lz_space_used = storage_config.get_int('landing zone space used', 100, on_change=init_sandbox) self._task_var_dict = { # Space limits 'SCRATCH_UL': scratch_space_used, 'SCRATCH_LL': storage_config.get_int('scratch space left', 1, on_change=init_sandbox), 'LANDINGZONE_UL': lz_space_used, 'LANDINGZONE_LL': storage_config.get_int('landing zone space left', 1, on_change=init_sandbox), } storage_config.set('se output pattern', 'job_@GC_JOB_ID@_@X@') self._se_min_size = storage_config.get_int('se min size', -1, on_change=init_sandbox) self._disk_min = max(scratch_space_used, lz_space_used) self._sb_in_fn_list = config.get_path_list('input files', [], on_change=init_sandbox) self._sb_out_fn_list = config.get_list('output files', [], on_change=init_sandbox) self._do_gzip_std_output = config.get_bool('gzip output', True, on_change=init_sandbox) self._subst_files = config.get_list('subst files', [], on_change=init_sandbox) self._dependencies = lmap( str.lower, config.get_list('depends', [], on_change=init_sandbox)) # Get error messages from gc-run.lib comments self.map_error_code2msg = {} self._update_map_error_code2msg(get_path_share('gc-run.lib')) # Init parameter source manager psrc_repository = {} self._setup_repository(config, psrc_repository) pfactory = config.get_plugin('internal parameter factory', 'BasicParameterFactory', cls=ParameterFactory, bind_kwargs={ 'tags': [self], 'inherit': True }) self._source = config.get_plugin( 'parameter adapter', 'TrackedParameterAdapter', cls=ParameterAdapter, pargs=(pfactory.get_psrc(psrc_repository), )) self._log.log(logging.DEBUG3, 'Using parameter adapter %s', repr(self._source)) self._log.info('Current task ID: %s', self._task_id) self._log.info('Task started on: %s', self._task_date)
def __init__(self, config, name): LocalWMS.__init__(self, config, name, submit_exec=get_path_share('gc-host.sh'), check_executor=CheckJobsMissingState(config, HostCheckJobs(config)), cancel_executor=HostCancelJobs(config))