Exemplo n.º 1
0
    def __init__(self, config, name, check_executor, cancel_executor):
        WMS.__init__(self, config, name)
        for executor in [check_executor, cancel_executor]:
            executor.setup(self._log)
        (self._check_executor, self._cancel_executor) = (check_executor,
                                                         cancel_executor)

        if self._name != self.__class__.__name__.upper():
            self._log.info('Using batch system: %s (%s)',
                           self.__class__.__name__, self._name)
        else:
            self._log.info('Using batch system: %s', self._name)

        self._runlib = config.get_work_path('gc-run.lib')
        fp = SafeFile(self._runlib, 'w')
        content = SafeFile(get_path_share('gc-run.lib')).read()
        fp.write(
            content.replace('__GC_VERSION__',
                            __import__('grid_control').__version__))
        fp.close()
        self._path_output = config.get_work_path('output')
        self._path_file_cache = config.get_work_path('files')
        ensure_dir_exists(self._path_output, 'output directory')
        self._path_fail = config.get_work_path('fail')

        # Initialise access token and storage managers

        # UI -> SE -> WN
        self._sm_se_in = config.get_plugin('se input manager',
                                           'SEStorageManager',
                                           cls=StorageManager,
                                           bind_kwargs={'tags': [self]},
                                           pargs=('se', 'se input',
                                                  'SE_INPUT'))
        self._sm_sb_in = config.get_plugin('sb input manager',
                                           'LocalSBStorageManager',
                                           cls=StorageManager,
                                           bind_kwargs={'tags': [self]},
                                           pargs=('sandbox', 'sandbox',
                                                  'SB_INPUT'))
        # UI <- SE <- WN
        self._sm_se_out = config.get_plugin('se output manager',
                                            'SEStorageManager',
                                            cls=StorageManager,
                                            bind_kwargs={'tags': [self]},
                                            pargs=('se', 'se output',
                                                   'SE_OUTPUT'))
        self._sm_sb_out = None

        self._token = config.get_composited_plugin(['proxy', 'access token'],
                                                   'TrivialAccessToken',
                                                   'MultiAccessToken',
                                                   cls=AccessToken,
                                                   bind_kwargs={
                                                       'inherit': True,
                                                       'tags': [self]
                                                   })
        self._output_fn_list = None
Exemplo n.º 2
0
def _sort_python_compat_lines(fn):
    output_line_list = []
    output_set = set()
    import_section = False
    for line in SafeFile(fn).iter_close():
        if line.startswith('from') or line.startswith('import'):
            import_section = True
        elif line.strip():
            if import_section:
                output_list = lfilter(lambda x: x.strip() != '', output_set)
                output_list.sort(
                    key=lambda l: (not l.startswith('import'),
                                   ('python_compat' in l), 'testfwk' not in l,
                                   lmap(lambda x: x.split('.'), l.split())))
                for output_line in output_list:
                    output_line_list.append(output_line)
                output_line_list.append('\n\n')
                output_set = set()
            import_section = False
        if not import_section:
            output_line_list.append(line)
        else:
            output_set.add(line)
    if import_section:
        output_list = lfilter(lambda x: x.strip() != '', output_set)
        output_list.sort(
            key=lambda l: (not l.startswith('import'), 'python_compat' not in
                           l, lmap(lambda x: x.split('.'), l.split())))
        for output_line in output_list:
            output_line_list.append(output_line)

    fp = open(fn, 'w')
    for output_line in output_line_list:
        fp.write(output_line)
Exemplo n.º 3
0
def _sort_from_lines(fn):
    # sort 'from' order
    replacement_str_pair_list = []
    raw = SafeFile(fn).read_close()
    for import_line in ifilter(lambda line: line.startswith('from '),
                               raw.splitlines()):
        try:
            _from, _source, _import, _what = import_line.split(None, 3)
            assert _from == 'from'
            assert _import == 'import'
            _comment = None
            if '#' in _what:
                _what, _comment = lmap(str.strip, _what.split('#', 1))
            import_list = sorted(imap(str.strip, _what.split(',')))
            new_import_line = 'from %s import %s' % (
                _source, str.join(', ', import_list))
            if _comment is not None:
                new_import_line += '  # ' + _comment
            replacement_str_pair_list.append((import_line, new_import_line))
        except:
            logging.warning('%s: %s', fn, import_line)
            raise
    for (old, new) in replacement_str_pair_list:
        raw = raw.replace(old, new)
    open(fn, 'w').write(raw)
Exemplo n.º 4
0
    def _read_jobs(self, job_limit):
        ensure_dir_exists(self._path_db, 'job database directory', JobError)

        candidates = []
        for job_fn in fnmatch.filter(os.listdir(self._path_db), 'job_*.txt'):
            try:  # 2xsplit is faster than regex
                jobnum = int(job_fn.split(".")[0].split("_")[1])
            except Exception:
                clear_current_exception()
                continue
            candidates.append((jobnum, job_fn))

        (job_map, max_job_len) = ({}, len(candidates))
        activity = Activity('Reading job infos')
        idx = 0
        for (jobnum, job_fn) in sorted(candidates):
            idx += 1
            if jobnum >= job_limit >= 0:
                self._log.info(
                    'Stopped reading job infos at job #%d out of %d available job files, '
                    + 'since the limit of %d jobs is reached', jobnum,
                    len(candidates), job_limit)
                break
            try:
                job_fn_full = os.path.join(self._path_db, job_fn)
                data = self._fmt.parse(SafeFile(job_fn_full).iter_close())
                job_obj = self._create_job_obj(job_fn_full, data)
            except Exception:
                raise JobError('Unable to process job file %r' % job_fn_full)
            job_map[jobnum] = job_obj
            activity.update('Reading job infos %d [%d%%]' %
                            (idx, (100.0 * idx) / max_job_len))
        activity.finish()
        return job_map
Exemplo n.º 5
0
 def save_to_file_iter(path, block_iter, strip_metadata=False):
     # Save dataset information in 'ini'-style => 10x faster to r/w than cPickle
     if os.path.dirname(path):
         ensure_dir_exists(os.path.dirname(path), 'dataset cache directory')
     return with_file_iter(
         SafeFile(path, 'w'), lambda fp: DataProvider.save_to_stream(
             fp, block_iter, strip_metadata))
Exemplo n.º 6
0
 def _update_map_error_code2msg(self, fn):
     # Read comments with error codes at the beginning of file: # <code> - description
     for line in ifilter(lambda x: x.startswith('#'),
                         SafeFile(fn).iter_close()):
         tmp = lmap(str.strip, line.lstrip('#').split(' - ', 1))
         if tmp[0].isdigit() and (len(tmp) == 2):
             self.map_error_code2msg[int(tmp[0])] = tmp[1]
 def process(self, dn):
     fn = os.path.join(dn, 'job.info')
     try:
         if not os.path.exists(fn):
             raise JobResultError('Job result file %r does not exist' % fn)
         try:
             info_content = SafeFile(fn).read_close()
         except Exception:
             raise JobResultError('Unable to read job result file %r' % fn)
         if not info_content:
             raise JobResultError('Job result file %r is empty' % fn)
         data = self._df.parse(info_content,
                               key_parser={None: str})  # impossible to fail
         try:
             jobnum = data.pop('JOBID')
             exit_code = data.pop('EXITCODE')
             message = data.pop('MESSAGE', None)
             return {
                 JobResult.JOBNUM: jobnum,
                 JobResult.EXITCODE: exit_code,
                 JobResult.MESSAGE: message,
                 JobResult.RAW: data
             }
         except Exception:
             raise JobResultError('Job result file %r is incomplete' % fn)
     except Exception:
         raise JobResultError('Unable to process output directory %r' % dn)
Exemplo n.º 8
0
 def __init__(self, config_fn_list):
     from grid_control_settings import Settings
     for config_fn in config_fn_list:
         exec_wrapper(
             SafeFile(resolve_path(config_fn, ['.'])).read_close(),
             {'Settings': Settings})
     DictConfigFiller.__init__(self, Settings.get_config_dict())
Exemplo n.º 9
0
def main():
    for (fn, fnrel) in get_file_list.get_file_list(show_type_list=['py'],
                                                   show_external=True,
                                                   show_testsuite=False):
        ident_map = {}
        blacklist = ['python_compat.py', '/htcondor_wms/', 'xmpp']
        if any(imap(lambda name: name in fn, blacklist)):
            continue
        for line in SafeFile(fn).iter_close():
            ident = line.replace(line.lstrip(), '')
            ident_level = ident.count('\t')
            line = line.strip()
            if line.startswith('def ') or line.startswith('class '):
                ident_map[ident_level] = line.split(':')[0]
                for other_ident_level in list(ident_map):
                    if other_ident_level > ident_level:
                        ident_map.pop(other_ident_level)
            parent_ident = ident_level - 1
            while (parent_ident not in ident_map) and (parent_ident > 0):
                parent_ident = parent_ident - 1
            parent = ident_map.get(parent_ident, '')
            if line.startswith('def ') and parent.startswith('def '):
                if not line.startswith('def _'):
                    logging.warning('nested function missing prefix: %r %r',
                                    fnrel, ident_map)
Exemplo n.º 10
0
def parse_lumi_filter(lumi_str):
    if lumi_str == '':
        return None

    run_lumi_range_list = []
    from grid_control.config import ConfigError
    for token in imap(str.strip, lumi_str.split(',')):
        token = lmap(str.strip, token.split('|'))
        if True in imap(str.isalpha,
                        token[0].lower().replace('min', '').replace('max',
                                                                    '')):
            if len(token) == 1:
                token.append('')
            try:
                json_fn = os.path.normpath(
                    os.path.expandvars(os.path.expanduser(token[0].strip())))
                run_lumi_range_list.extend(
                    parse_lumi_from_json(
                        SafeFile(json_fn).read_close(), token[1]))
            except Exception:
                raise ConfigError(
                    'Could not process lumi filter file: %r (filter: %r)' %
                    tuple(token))
        else:
            try:
                run_lumi_range_list.append(parse_lumi_from_str(token[0]))
            except Exception:
                raise ConfigError(
                    'Could not process lumi filter expression:\n\t%s' %
                    token[0])
    return merge_lumi_list(run_lumi_range_list)
Exemplo n.º 11
0
    def _get_jobs_output(self, gc_id_jobnum_list):
        # Get output of jobs and yield output dirs
        if len(gc_id_jobnum_list) == 0:
            raise StopIteration

        root_dn = os.path.join(self._path_output, 'tmp')
        try:
            if len(gc_id_jobnum_list) == 1:
                # For single jobs create single subdir
                tmp_dn = os.path.join(root_dn,
                                      md5_hex(gc_id_jobnum_list[0][0]))
            else:
                tmp_dn = root_dn
            ensure_dir_exists(tmp_dn)
        except Exception:
            raise BackendError(
                'Temporary path "%s" could not be created.' % tmp_dn,
                BackendError)

        map_gc_id2jobnum = dict(gc_id_jobnum_list)
        jobs = self._write_wms_id_list(gc_id_jobnum_list)

        activity = Activity('retrieving %d job outputs' %
                            len(gc_id_jobnum_list))
        proc = LocalProcess(self._output_exec, '--noint', '--logfile',
                            '/dev/stderr', '-i', jobs, '--dir', tmp_dn)

        # yield output dirs
        todo = map_gc_id2jobnum.values()
        current_jobnum = None
        for line in imap(str.strip, proc.stdout.iter(timeout=60)):
            if line.startswith(tmp_dn):
                todo.remove(current_jobnum)
                output_dn = line.strip()
                unpack_wildcard_tar(self._log, output_dn)
                yield (current_jobnum, output_dn)
                current_jobnum = None
            else:
                current_jobnum = map_gc_id2jobnum.get(self._create_gc_id(line),
                                                      current_jobnum)
        exit_code = proc.status(timeout=0, terminate=True)
        activity.finish()

        if exit_code != 0:
            if 'Keyboard interrupt raised by user' in proc.stderr.read(
                    timeout=0):
                remove_files([jobs, root_dn])
                raise StopIteration
            else:
                self._log.log_process(proc,
                                      files={'jobs': SafeFile(jobs).read()})
            self._log.error('Trying to recover from error ...')
            for dn in os.listdir(root_dn):
                yield (None, os.path.join(root_dn, dn))

        # return unretrievable jobs
        for jobnum in todo:
            yield (jobnum, None)

        remove_files([jobs, tmp_dn])
Exemplo n.º 12
0
def _parse_cmd_line(cmd_line_args):
    # grid-control command line parser
    parser = Options(usage='%s [OPTIONS] <config file>', add_help_option=False)
    parser.add_bool(None, ' ', 'debug', default=False)
    parser.add_bool(None, ' ', 'help-conf', default=False)
    parser.add_bool(None, ' ', 'help-confmin', default=False)
    parser.add_bool(None, 'c', 'continuous', default=False)
    parser.add_bool(None, 'h', 'help', default=False)
    parser.add_bool(None, 'i', 'init', default=False)
    parser.add_bool(None, 'q', 'resync', default=False)
    parser.add_bool(None,
                    's',
                    'no-submission',
                    default=True,
                    dest='submission')
    parser.add_bool(None, 'G', 'gui', default=False, dest='gui_ansi')
    parser.add_accu(None, 'v', 'verbose')
    parser.add_list(None, 'l', 'logging')
    parser.add_list(None, 'o', 'override')
    parser.add_text(None, 'a', 'action')
    parser.add_text(None, 'd', 'delete')
    parser.add_text(None, 'C', 'cancel')
    parser.add_text(None, 'J', 'job-selector')
    parser.add_text(None, 'n', 'jobs')
    parser.add_text(None, 'm', 'max-retry')
    parser.add_text(None, ' ', 'reset')
    parser.add_bool(None, ' ', 'debug-console',
                    False)  # undocumented debug option
    parser.add_list(None, ' ', 'debug-trace')  # undocumented debug option
    # Deprecated options - refer to new report script instead
    for (sopt, lopt) in [('-r', 'report'), ('-R', 'site-report'),
                         ('-T', 'time-report'), ('-M', 'task-report'),
                         ('-D', 'detail-report'), ('', 'help-vars')]:
        parser.add_bool(None, sopt, lopt, default=False, dest='old_report')

    (opts, args, _) = parser.parse(args=cmd_line_args)
    opts.gui = None
    if opts.gui_ansi:
        opts.gui = 'ANSIGUI'
    opts.continuous = opts.continuous or None  # either True or None
    # Display help
    if opts.help:
        parser.exit_with_usage(msg=SafeFile(
            get_path_share('help.txt')).read_close(),
                               show_help=False)
    # Require single config file argument
    if len(args) == 0:
        parser.exit_with_usage(msg='Config file not specified!')
    elif len(args) > 1:
        parser.exit_with_usage(msg='Invalid command line arguments: %r' %
                               cmd_line_args)
    # Warn about deprecated report options
    if opts.old_report:
        deprecated(
            'Please use the more versatile report tool in the scripts directory!'
        )
    # Configure preliminary logging
    logging.getLogger().setLevel(max(1, logging.DEFAULT - opts.verbose))
    return (opts, args)
Exemplo n.º 13
0
		def _log_tar():
			tar = tarfile.TarFile.open(self._fn, 'a')
			for key, value in record.files.items():
				if os.path.exists(value):
					value = SafeFile(value).read_close()
				file_obj = VirtualFile(os.path.join(entry, key), [value])
				info, handle = file_obj.get_tar_info()
				tar.addfile(info, handle)
				handle.close()
			tar.close()
Exemplo n.º 14
0
def main():
	os.chdir(os.path.join(os.path.dirname(__file__), '../..'))
	os.system('git rev-parse --short HEAD > .git_version')
	os.system('git log | grep git-svn > .svn_version')
	for line in SafeFile('.svn_version').iter_close():
		svn_version = int(line.split('@')[1].split()[0])
		break
	git_version = SafeFile('.git_version').read_close().strip()
	svn_version += 1
	logging.critical('%s %s', svn_version, git_version)
	os.unlink('.svn_version')
	os.unlink('.git_version')
	fn = 'packages/grid_control/__init__.py'
	line_list = SafeFile(fn).read().splitlines()
	fp = SafeFile(fn, 'w')
	for line in line_list:
		if line.startswith('__version__'):
			version_tuple = (svn_version / 1000, (svn_version / 100) % 10, svn_version % 100, git_version)
			line = "__version__ = '%d.%d.%d (%s)'" % version_tuple
		fp.write(line + '\n')
	fp.close()
Exemplo n.º 15
0
def logfile_decode(fn):
    def _decode_stream(fp):
        for line in fp.readlines():
            if line.startswith('(B64) '):
                buffer_obj = BytesBuffer(
                    base64.b64decode(line.replace('(B64) ', '')))
                line = gzip.GzipFile(fileobj=buffer_obj).read().decode('ascii')
            logging.getLogger('script').info(line.rstrip())

    if fn.endswith('.gz'):
        with_file(GZipTextFile(fn, 'r'), _decode_stream)
    else:
        with_file(SafeFile(fn), _decode_stream)
Exemplo n.º 16
0
def _sort_import_lines(fn):
    # sort 'import' order
    replacement_str_pair_list = []
    raw = SafeFile(fn).read_close()
    for import_line in ifilter(lambda line: line.startswith('import '),
                               raw.splitlines()):
        import_list = sorted(imap(
            str.strip,
            import_line.replace('import ', '').split(',')),
                             key=lambda x: (len(x), x))
        replacement_str_pair_list.append(
            (import_line, 'import %s' % str.join(', ', import_list)))
    for (old, new) in replacement_str_pair_list:
        raw = raw.replace(old, new)
    open(fn, 'w').write(raw)
Exemplo n.º 17
0
 def _log_disabled_jobs(self):
     disabled = self.job_db.get_job_list(ClassSelector(JobClass.DISABLED))
     try:
         with_file(SafeFile(self._disabled_jobs_logfile, 'w'),
                   lambda fp: fp.write(str.join('\n', imap(str, disabled))))
     except Exception:
         raise JobError('Could not write disabled jobs to file %s!' %
                        self._disabled_jobs_logfile)
     if disabled:
         self._log.log_time(logging.WARNING,
                            'There are %d disabled jobs in this task!',
                            len(disabled))
         self._log.log_time(
             logging.DEBUG,
             'Please refer to %s for a complete list of disabled jobs.',
             self._disabled_jobs_logfile)
Exemplo n.º 18
0
    def _iter_blocks_raw(self):
        def _filter_block(block):
            if self._filter:
                return self._filter in '/%s#' % DataProvider.get_block_id(
                    block)
            return True

        try:
            fp = SafeFile(self._filename)
        except Exception:
            raise DatasetError('Unable to open dataset file %s' %
                               repr(self._filename))
        for block in self._create_blocks(fp.iter_close()):
            if _filter_block(block):
                self._raise_on_abort()
                yield block
Exemplo n.º 19
0
 def _display_logfile(dn, fn):
     full_fn = os.path.join(dn, fn)
     if os.path.exists(full_fn):
         try:
             if fn.endswith('.gz'):
                 fp = gzip.open(full_fn)
                 content = bytes2str(fp.read())
                 fp.close()
             else:
                 content = SafeFile(full_fn).read_close()
             self._log.error(fn + '\n' + content + '-' * 50)
         except Exception:
             self._log.exception('Unable to display %s', fn)
             clear_current_exception()
     else:
         self._log.error('Log file does not exist: %s', fn)
Exemplo n.º 20
0
def lumi_calc(opts, work_dn, jobnum_list, reader):
    # Lumi filter calculations
    (map_sample2run_info_dict, map_sample2input_events,
     map_sample2output_events) = process_jobs(opts, work_dn, jobnum_list,
                                              reader)

    activity = Activity('Simplifying lumi sections')
    map_sample2run_lumi_range = {}
    for sample in map_sample2run_info_dict:
        for run in map_sample2run_info_dict[sample]:
            for lumi in map_sample2run_info_dict[sample][run]:
                map_sample2run_lumi_range.setdefault(sample, []).append(
                    ([run, lumi], [run, lumi]))
    for sample in map_sample2run_info_dict:
        map_sample2run_lumi_range[sample] = merge_lumi_list(
            map_sample2run_lumi_range[sample])
    activity.finish()

    for sample, lumi_list in map_sample2run_lumi_range.items():
        if opts.job_events:
            if map_sample2output_events.get(sample):
                LOG.info('')
            display_dict_list = lmap(
                lambda pfn: {
                    0: pfn,
                    1: map_sample2output_events[sample][pfn]
                }, map_sample2output_events.get(sample, {}))
            if display_dict_list:
                display_dict_list.append('=')
            display_dict_list += [{
                0: 'Processed in total',
                1: map_sample2input_events.get(sample)
            }]
            ConsoleTable.create([(0, ''), (1, '#Events')],
                                display_dict_list,
                                title='Sample: %s' % sample)
        if opts.job_json:
            json_fn = os.path.join(opts.output_dir or work_dn,
                                   'processed_%s.json' % sample)
            with_file(SafeFile(json_fn, 'w'),
                      partial(write_lumi_json, lumi_list))
            LOG.info('Saved processed lumi sections in %s', json_fn)
        if opts.job_gc:
            LOG.info('\nList of processed lumisections\n' + '-' * 30)
            write_lumi_gc(lumi_list)
        LOG.info('')
Exemplo n.º 21
0
    def _fill_content_deep(self, config_fn, search_path_list,
                           content_configfile):
        log = logging.getLogger(
            ('config.%s' % get_file_name(config_fn)).rstrip('.').lower())
        log.log(logging.INFO1, 'Reading config file %s', config_fn)
        config_fn = resolve_path(config_fn,
                                 search_path_list,
                                 exception_type=ConfigError)
        config_str_list = list(SafeFile(config_fn).iter_close())

        # Single pass, non-recursive list retrieval
        tmp_content_configfile = {}
        self._fill_content_shallow(config_fn, config_str_list,
                                   search_path_list, tmp_content_configfile)

        def _get_list_shallow(section, option):
            for (opt, value, _) in tmp_content_configfile.get(section, []):
                if opt == option:
                    for entry in parse_list(value, None):
                        yield entry

        search_path_list_new = [os.path.dirname(config_fn)]
        # Add entries from include statement recursively
        for include_fn in _get_list_shallow('global', 'include'):
            self._fill_content_deep(include_fn,
                                    search_path_list + search_path_list_new,
                                    content_configfile)
        # Process all other entries in current file
        self._fill_content_shallow(config_fn, config_str_list,
                                   search_path_list, content_configfile)
        # Override entries in current config file
        for override_fn in _get_list_shallow('global', 'include override'):
            self._fill_content_deep(override_fn,
                                    search_path_list + search_path_list_new,
                                    content_configfile)
        # Filter special global options
        if content_configfile.get('global', []):

            def _ignore_includes(opt_v_s_tuple):
                return opt_v_s_tuple[0] not in ['include', 'include override']

            content_configfile['global'] = lfilter(
                _ignore_includes, content_configfile['global'])
        return search_path_list + search_path_list_new
Exemplo n.º 22
0
 def __init__(self, config, name):
     LocalEventHandler.__init__(self, config, name)
     self._source_jid = config.get('source jid', on_change=None)
     self._target_jid = config.get('target jid', on_change=None)
     password_fn = config.get_fn('source password file')
     os.chmod(password_fn, stat.S_IRUSR)
     # password in variable name removes it from debug log!
     self._source_password = SafeFile(password_fn).read_close().strip()
     try:  # xmpp contains many deprecated constructs
         import warnings
         warnings.simplefilter('ignore', DeprecationWarning)
     except Exception:
         clear_current_exception()
     self._xmpp = ignore_exception(Exception, None, __import__, 'xmpp')
     if self._xmpp is None:
         try:
             import grid_control_gui.xmpp
             self._xmpp = grid_control_gui.xmpp
         except Exception:
             raise Exception('Unable to load jabber library!')
Exemplo n.º 23
0
    def _submit_job(self, jobnum, task):
        # Submit job and yield (jobnum, WMS ID, other data)
        jdl_fd, jdl_fn = tempfile.mkstemp('.jdl')
        try:
            jdl_line_list = self._make_jdl(jobnum, task)
            safe_write(os.fdopen(jdl_fd, 'w'), jdl_line_list)
        except Exception:
            remove_files([jdl_fn])
            raise BackendError('Could not write jdl data to %s.' % jdl_fn)

        try:
            submit_arg_list = []
            for key_value in filter_dict(self._submit_args_dict,
                                         value_filter=identity).items():
                submit_arg_list.extend(key_value)
            submit_arg_list.append(jdl_fn)

            activity = Activity('submitting job %d' % jobnum)
            proc = LocalProcess(self._submit_exec, '--nomsg', '--noint',
                                '--logfile', '/dev/stderr', *submit_arg_list)

            wms_id = None
            stripped_stdout_iter = imap(str.strip,
                                        proc.stdout.iter(timeout=60))
            for line in ifilter(lambda x: x.startswith('http'),
                                stripped_stdout_iter):
                wms_id = line
            exit_code = proc.status(timeout=0, terminate=True)

            activity.finish()

            if (exit_code != 0) or (wms_id is None):
                if self._explain_error(proc, exit_code):
                    pass
                else:
                    self._log.log_process(
                        proc, files={'jdl': SafeFile(jdl_fn).read()})
        finally:
            remove_files([jdl_fn])
        job_data = {'jdl': str.join('', jdl_line_list)}
        return (jobnum, self._create_gc_id(wms_id), job_data)
Exemplo n.º 24
0
def gc_run(args=None, intro=True):
    # display the 'grid-control' logo and version
    if intro and not os.environ.get('GC_DISABLE_INTRO'):
        sys.stdout.write(
            SafeFile(get_path_share('logo.txt'), 'r').read_close())
        sys.stdout.write('Revision: %s\n' % get_version())
    pyver = (sys.version_info[0], sys.version_info[1])
    if pyver < (2, 3):
        deprecated('This python version (%d.%d) is not supported anymore!' %
                   pyver)
    atexit.register(lambda: sys.stdout.write('\n'))

    # main try... except block to catch exceptions and show error message
    try:
        return _gc_run(args)
    except SystemExit:  # avoid getting caught for Python < 2.5
        abort(True)
        raise
    except Exception:  # coverage overrides sys.excepthook
        abort(True)
        gc_excepthook(*sys.exc_info())
        sys.exit(os.EX_SOFTWARE)
Exemplo n.º 25
0
 def _log_tar(only_print=False):
     # self._log.info('tar: %s' % self._fn)
     sleep_when_cannot_accept_jobs = False
     message = ""
     if not only_print:
         tar = tarfile.TarFile.open(self._fn, 'a')
     for key, value in record.files.items():
         if "The CREAM service cannot accept jobs at the moment" in value:
             sleep_when_cannot_accept_jobs = True
             message = "The CREAM service cannot accept jobs at the moment"
         elif "Unable to connect to" in value:
             sleep_when_cannot_accept_jobs = True
             message = value
         value = os.linesep.join([s for s in value.splitlines() if s])
         if only_print:
             self._log.info('\n\tkey: "%s"\n\tvalue: "%s"' %
                            (key, value))
         else:
             if value.startswith('\n'):
                 value = value[1:]
             if os.path.exists(value):
                 value = SafeFile(value).read_close()
             # self._log.info('\tvirtual file: "%s"' % os.path.join(entry, key))
             file_obj = VirtualFile(os.path.join(entry, key), [value])
             info, handle = file_obj.get_tar_info()
             # self._log.info('\tinfo: "%s"' % info)
             # self._log.info('\thandle: "%s"' % handle)
             tar.addfile(info, handle)
             handle.close()
     if not only_print:
         tar.close()
     if sleep_when_cannot_accept_jobs:
         from grid_control.utils.activity import Activity
         activity = Activity(
             message +
             '. Waiting before trying to delegate proxy again...')
         time.sleep(900)
         activity.finish()
Exemplo n.º 26
0
 def _config_store_backup(self, source, target, fragment_path=None):
     content = SafeFile(source).read_close()
     if fragment_path:
         self._log.info('Instrumenting... %s', os.path.basename(source))
         content += SafeFile(fragment_path).read_close()
     SafeFile(target, 'w').write_close(content)
Exemplo n.º 27
0
 def _config_is_instrumented(self, fn):
     cfg = SafeFile(fn).read_close()
     for tag in self._needed_vn_set:
         if (not '__%s__' % tag in cfg) and (not '@%s@' % tag in cfg):
             return False
     return True
Exemplo n.º 28
0
 def save_to_disk(self, filename):
     with_file(SafeFile(filename, 'w'), partial(pickle.dump, self))
Exemplo n.º 29
0
def deprecated(text):
    log = logging.getLogger('console')
    log.critical('\n%s\n[DEPRECATED] %s',
                 SafeFile(get_path_share('fail.txt')).read_close(), text)
    if not UserInputInterface().prompt_bool('Do you want to continue?', False):
        sys.exit(os.EX_TEMPFAIL)
Exemplo n.º 30
0
 def _check_write_stack_log():
     if os.path.exists('gc_debug_stack.log'):
         with_file(
             SafeFile('gc_debug_stack.log', 'w'), lambda fp: DebugInterface(
                 stream=fp).show_stack(thread_id='all'))