def get_interval_info(self): # overwrite for check/submit/fetch intervals if self._remote_type in (PoolType.SSH, PoolType.GSISSH): return Result(wait_on_idle=30, wait_between_steps=5) elif self._remote_type == PoolType.SPOOL: return Result(wait_on_idle=60, wait_between_steps=10) else: return Result(wait_on_idle=20, wait_between_steps=5)
def get_sb_in_fpi_list(self): # Get files for input sandbox fpi_list = (SCRAMTask.get_sb_in_fpi_list(self) + self.prolog.get_sb_in_fpi_list() + self.epilog.get_sb_in_fpi_list()) for config_file in self._config_fn_list: fpi_list.append(Result(path_abs=config_file, path_rel=os.path.basename(config_file))) if self._project_area and not self._project_area_tarball_on_se: fpi_list.append(Result(path_abs=self._project_area_tarball, path_rel=os.path.basename(self._project_area_tarball))) return fpi_list + [self._script_fpi]
def __init__(self, config, name): super(CMSSWLight, self).__init__(config, name) self._script_fpi = Result(path_rel='gc-run.cmssw-light.sh', path_abs=get_path_share('gc-run.cmssw-light.sh', pkg='grid_control_cms')) self._update_map_error_code2msg( get_path_share('gc-run.cmssw-light.sh', pkg='grid_control_cms'))
def __init__(self, config, name, wmsList): WMS.__init__(self, config, name) self._defaultWMS = wmsList[0] defaultT = self._defaultWMS.getTimings() self._timing = Result(waitOnIdle=defaultT.waitOnIdle, waitBetweenSteps=defaultT.waitBetweenSteps) self._wmsMap = { self._defaultWMS.getObjectName().lower(): self._defaultWMS } for wmsEntry in wmsList[1:]: wmsObj = wmsEntry self._wmsMap[wmsObj.getObjectName().lower()] = wmsObj wmsT = wmsObj.getTimings() self._timing.waitOnIdle = max(self._timing.waitOnIdle, wmsT.waitOnIdle) self._timing.waitBetweenSteps = max(self._timing.waitBetweenSteps, wmsT.waitBetweenSteps) self._brokerWMS = config.getPlugin('wms broker', 'RandomBroker', cls=Broker, inherit=True, tags=[self], pargs=('wms', 'wms', self._wmsMap.keys))
def __init__(self, config, name, backend_list): WMS.__init__(self, config, name) self._default_backend = backend_list[0] default_timing = self._default_backend.get_interval_info() self._timing = Result( wait_on_idle=default_timing.wait_on_idle, wait_between_steps=default_timing.wait_between_steps) self._map_backend_name2backend = { self._default_backend.get_object_name().lower(): self._default_backend } for backend_entry in backend_list[1:]: backend = backend_entry self._map_backend_name2backend[ backend.get_object_name().lower()] = backend wms_timing = backend.get_interval_info() self._timing.wait_on_idle = max(self._timing.wait_on_idle, wms_timing.wait_on_idle) self._timing.wait_between_steps = max( self._timing.wait_between_steps, wms_timing.wait_between_steps) self._broker_wms = config.get_plugin( 'wms broker', 'RandomBroker', cls=Broker, bind_kwargs={ 'inherit': True, 'tags': [self] }, pargs=('wms', 'wms', self._map_backend_name2backend.keys))
def get_description(self, jobnum=None): # (task name, job name) job_name = None if jobnum is not None: job_name = self._job_name_generator.get_name(task=self, task_id=self._task_id, jobnum=jobnum) return Result(task_id=self._task_id, task_name=self._task_name, job_id=jobnum, job_name=job_name)
def script_parse(self, arg_keys=None, verbose_short='v'): self.add_bool(None, None, 'parseable', default=False, help='Output tabular data in parseable format') self.add_bool(None, None, 'pivot', default=False, help='Output pivoted tabular data') self.add_text(None, None, 'textwidth', default=100, help='Output tabular data with selected width') self.add_accu(None, verbose_short, 'verbose', default=0, help='Increase verbosity') self.add_list(None, None, 'logging', help='Increase verbosity') (opts, args, config_dict) = self.parse(arg_keys=arg_keys) logging.getLogger().setLevel(logging.DEFAULT - opts.verbose) for (logger_name, logger_level) in parse_logging_args(opts.logging): logging.getLogger(logger_name).setLevel(LogLevelEnum.str2enum(logger_level)) if opts.parseable: ConsoleTable.table_mode = 'ParseableTable' GCStreamHandler.pop_std_stream() elif opts.pivot: ConsoleTable.table_mode = 'Pivot' ConsoleTable.wraplen = int(opts.textwidth) return Result(opts=opts, args=args, config_dict=config_dict, parser=self)
def _create_fpi(fn): return Result(path_abs=fn, path_rel=os.path.basename(fn))
def get_interval_info(self): # Return (waitIdle, wait) return Result(wait_on_idle=self._wait_idle, wait_between_steps=self._wait_work)
def __init__(self, config, name): config.set('se input timeout', '0:30') config.set('application', 'cmsRun', section='dashboard') config.set('dataset provider', 'DBS3Provider') config.set('dataset splitter', 'EventBoundarySplitter') config.set('dataset processor', 'LumiDataProcessor', '+=') config.set('partition processor', 'BasicPartitionProcessor', '-=') config.set( 'partition processor', 'LFNPartitionProcessor LumiPartitionProcessor CMSSWPartitionProcessor', '+=') self._needed_vn_set = set() SCRAMTask.__init__(self, config, name) self._uii = UserInputInterface() # Setup file path informations self._cmsrun_output_files = ['cmssw.dbs.tar.gz'] if self._do_gzip_std_output: self._cmsrun_output_files.append('cmssw.log.gz') self._script_fpi = Result(path_rel='gc-run.cmssw.sh', path_abs=get_path_share( 'gc-run.cmssw.sh', pkg='grid_control_cms')) if self._scram_project != 'CMSSW': raise ConfigError('Project area contains no CMSSW project') self._old_release_top = None if self._project_area: scram_arch_env_path = os.path.join(self._project_area, '.SCRAM', self._scram_arch, 'Environment') self._old_release_top = self._parse_scram_file( scram_arch_env_path).get('RELEASETOP') self._update_map_error_code2msg( get_path_share('gc-run.cmssw.sh', pkg='grid_control_cms')) self._project_area_tarball_on_se = config.get_bool( ['se runtime', 'se project area'], True) self._project_area_tarball = config.get_work_path( 'cmssw-project-area.tar.gz') # Prolog / Epilog script support - warn about old syntax self.prolog = TaskExecutableWrapper(config, 'prolog', '') self.epilog = TaskExecutableWrapper(config, 'epilog', '') if config.get_fn_list('executable', []) != []: raise ConfigError( 'Prefix executable and argument options with either prolog or epilog!' ) self.arguments = config.get('arguments', '') # Get cmssw config files and check their existance # Check that for dataset jobs the necessary placeholders are in the config file if not self._has_dataset: self._events_per_job = config.get('events per job', '0') # this can be a variable like @USER_EVENTS@! self._needed_vn_set.add('MAX_EVENTS') fragment = config.get_fn( 'instrumentation fragment', get_path_share('fragmentForCMSSW.py', pkg='grid_control_cms')) self._config_fn_list = self._process_config_file_list( config, config.get_fn_list('config file', self._get_config_file_default()), fragment, auto_prepare=config.get_bool('instrumentation', True), must_prepare=self._has_dataset) # Create project area tarball if self._project_area and not os.path.exists( self._project_area_tarball): config.set_state(True, 'init', detail='sandbox') # Information about search order for software environment self._cmssw_search_dict = self._get_cmssw_path_list(config) if config.get_state('init', detail='sandbox'): msg = 'CMSSW tarball already exists! Do you want to regenerate it?' if os.path.exists( self._project_area_tarball) and not self._uii.prompt_bool( msg, True): return # Generate CMSSW tarball if self._project_area: create_tarball(_match_files( self._project_area, self._project_area_matcher, self._always_matcher.create_matcher(''), self._project_area_base_fn), name=self._project_area_tarball) if self._project_area_tarball_on_se: config.set_state(True, 'init', detail='storage')
def _get_resync_result(resync_info_iter): resync_result = Result(pnum_list_redo=[], pnum_list_disable=[]) resync_result.partition_iter = _convert_resync_info_iter( resync_info_iter, resync_result.pnum_list_redo, resync_result.pnum_list_disable) return resync_result
def get_sb_in_fpi_list(self): if self._executable_send and self._executable: return [Result(path_abs=self._executable, path_rel=os.path.basename(self._executable))] return []
def _get_resync_result(resync_info_iter): resync_result = Result(pnum_list_redo=[], pnum_list_disable=[]) resync_result.partition_iter = _convert_resync_info_iter(resync_info_iter, resync_result.pnum_list_redo, resync_result.pnum_list_disable) return resync_result
def get_sb_in_fpi_list(self): return UserTask.get_sb_in_fpi_list(self) + self._lib_fn_list + [ Result(path_abs=get_path_share('gc-run.root.sh'), path_rel='gc-run.root.sh')]
def get_interval_info(self): return Result(wait_on_idle=60, wait_between_steps=10)
def get_interval_info(self): """Return suggested Idle/Active polling interval""" return Result(wait_on_idle=60, wait_between_steps=10)