def get_file_list(gc_base_path='../..', **kwargs): gc_base_path = clean_path(gc_base_path) sys.path.append(clean_path(os.path.join(gc_base_path, 'packages'))) if len(sys.argv) > 1: def _iter_all_files(): for fn in sys.argv[1:]: yield clean_path(fn) def _match_file(*args, **kwargs): return True else: def _iter_all_files(): for entry in ['scripts', 'packages', 'testsuite']: if os.path.exists(os.path.join(gc_base_path, entry)): for (root, _, files) in os.walk(os.path.join(gc_base_path, entry)): for fn in files: yield os.path.join(root, fn) yield os.path.join(gc_base_path, 'go.py') yield os.path.join(gc_base_path, 'GC') _match_file = match_file for fn in imap(lambda fn: relpath(clean_path(fn), gc_base_path), _iter_all_files()): if _match_file(fn, **kwargs): yield (os.path.join(gc_base_path, fn), fn)
def _get_version(self, value): old_wd = os.getcwd() os.chdir(clean_path(value)) git_proc = LocalProcess('git', 'rev-parse', '--short', 'HEAD') version = git_proc.get_output(timeout=10, raise_errors=False) os.chdir(old_wd) return version.strip() or 'undefined'
def _get_version(self, value): svn_proc = LocalProcess('svnversion', clean_path(value)) version = svn_proc.get_output(timeout=10, raise_errors=False).strip().lower() # different SVN versions yield different output for unversioned directories: if ('exported' in version) or ('unversioned' in version): version = None return version or 'undefined'
def __init__(self, config, datasource_name): InfoScanner.__init__(self, config, datasource_name) self._path = config.get('source directory', '.') self._timeout = config.get_int('source timeout', 120) self._trim = config.get_bool('source trim local', True) self._recurse = config.get_bool('source recurse', False) if '://' not in self._path: self._path = 'file://' + self._path (prot, path) = self._path.split('://') self._path = prot + '://' + clean_path(path)
def _get_git_version(path): from grid_control.utils.process_base import LocalProcess from grid_control.utils import clean_path path = clean_path(path) old_wd = os.getcwd() os.chdir(path) git_proc = LocalProcess('git', 'rev-parse', '--short', 'HEAD') version = git_proc.get_output(timeout=10, raise_errors=False) os.chdir(old_wd) return version.strip() or None
def get_dataset_config(opts, args): dataset = str.join('\n', args) if (not opts.provider) and not os.path.exists(clean_path(dataset.split()[0])): opts.provider = 'DBS3Provider' elif not opts.provider: opts.provider = 'ListProvider' config_dict = {'dbs blacklist T1 *': 'False', 'remove empty blocks *': 'False', 'remove empty files *': 'False', 'location format *': opts.location, 'nickname check collision *': 'False', 'dataset location filter': opts.location_filter, 'dataset *': dataset, 'dataset provider *': opts.provider} if opts.metadata or opts.list_metadata or opts.list_metadata_common: config_dict['lumi filter *'] = '-' config_dict['keep lumi metadata *'] = 'True' return gc_create_config(config_file=opts.settings, config_dict={'dataset': config_dict})
def _get_cmssw_path_list(self, config): result = [] path_cmssw_user = config.get(['cmssw dir', 'vo software dir'], '') if path_cmssw_user: path_cmssw_local = os.path.abspath(clean_path(path_cmssw_user)) if os.path.exists(path_cmssw_local): path_cmssw_user = path_cmssw_local if path_cmssw_user: result.append(('CMSSW_DIR_USER', path_cmssw_user)) if self._old_release_top: path_scram_project = os.path.normpath('%s/../../../../' % self._old_release_top) result.append(('CMSSW_DIR_PRO', path_scram_project)) self._log.info('Local jobs will try to use the CMSSW software located here:') for idx, loc in enumerate(result): self._log.info(' %i) %s', idx + 1, loc[1]) if result: self._log.info('') return result
def __init__(self, config, name, job_db, task=None): # needed in destructor: self._task_info = {} self._output_fn = None Report.__init__(self, config, name, job_db, task) task_id = 'Unknown' name = 'Unknown' if task: desc = task.get_description() task_id = desc.task_id name = desc.task_name self._task_info = { 'task id': task_id, 'name': name, } output_dn = clean_path(config.get('report file directory', on_change=None)) self._output_fn = os.path.join(output_dn, 'states' + task_id + '.json')
def get_dataset_config(opts, args): dataset = str.join('\n', args) if (not opts.provider) and not os.path.exists( clean_path(dataset.split()[0])): opts.provider = 'DBS3Provider' elif not opts.provider: opts.provider = 'ListProvider' config_dict = { 'dbs blacklist T1 *': 'False', 'remove empty blocks *': 'False', 'remove empty files *': 'False', 'location format *': opts.location, 'nickname check collision *': 'False', 'dataset location filter': opts.location_filter, 'dataset *': dataset, 'dataset provider *': opts.provider } if opts.metadata or opts.list_metadata or opts.list_metadata_common: config_dict['lumi filter *'] = '-' config_dict['keep lumi metadata *'] = 'True' return gc_create_config(config_file=opts.settings, config_dict={'dataset': config_dict})
def _iter_all_files(): for fn in sys.argv[1:]: yield clean_path(fn)