Пример #1
0
	def _parse_status(self, value, default):
		if any(imap(lambda x: x in value, ['E', 'e'])):
			return Job.UNKNOWN
		if any(imap(lambda x: x in value, ['h', 's', 'S', 'T', 'w'])):
			return Job.QUEUED
		if any(imap(lambda x: x in value, ['r', 't'])):
			return Job.RUNNING
		return Job.READY
Пример #2
0
 def _parse_status(self, value, default):
     if any(imap(lambda x: x in value, ['E', 'e'])):
         return Job.UNKNOWN
     if any(imap(lambda x: x in value, ['h', 's', 'S', 'T', 'w'])):
         return Job.QUEUED
     if any(imap(lambda x: x in value, ['r', 't'])):
         return Job.RUNNING
     return Job.READY
Пример #3
0
 def _parse_status(self, value, default):
     if any(imap(value.__contains__, ['E', 'e'])):
         return Job.UNKNOWN
     if any(imap(value.__contains__, ['h', 's', 'S', 'T', 'w'])):
         return Job.QUEUED
     if any(imap(value.__contains__, ['r', 't'])):
         return Job.RUNNING
     return Job.READY
Пример #4
0
	def _parse_status(self, value, default):
		if any(imap(value.__contains__, ['E', 'e'])):
			return Job.UNKNOWN
		if any(imap(value.__contains__, ['h', 's', 'S', 'T', 'w'])):
			return Job.QUEUED
		if any(imap(value.__contains__, ['r', 't'])):
			return Job.RUNNING
		return Job.READY
Пример #5
0
def match_file_type(fn, show_type_list, no_links):
    link_blacklist = ['downloadFromSE', 'lumiInfo', 'gcTool', 'gcSettings']
    if no_links and any(imap(lambda pat: pat in fn, link_blacklist)):
        return False
    show_type_list = show_type_list or [
        'py', 'sh', 'lib', 'conf', 'txt', 'json', 'cfg', 'rst'
    ]
    # Filetype check
    if fn.split('/')[-1].startswith('gc-') and ('.' not in fn):
        return 'sh'
    if any(imap(fn.endswith, show_type_list)):
        for ftcand in show_type_list:
            if fn.endswith(ftcand):
                return ftcand
Пример #6
0
def main():
    for (fn, fnrel) in get_file_list.get_file_list(show_type_list=['py'],
                                                   show_external=True,
                                                   show_testsuite=False):
        ident_map = {}
        blacklist = ['python_compat.py', '/htcondor_wms/', 'xmpp']
        if any(imap(lambda name: name in fn, blacklist)):
            continue
        for line in SafeFile(fn).iter_close():
            ident = line.replace(line.lstrip(), '')
            ident_level = ident.count('\t')
            line = line.strip()
            if line.startswith('def ') or line.startswith('class '):
                ident_map[ident_level] = line.split(':')[0]
                for other_ident_level in list(ident_map):
                    if other_ident_level > ident_level:
                        ident_map.pop(other_ident_level)
            parent_ident = ident_level - 1
            while (parent_ident not in ident_map) and (parent_ident > 0):
                parent_ident = parent_ident - 1
            parent = ident_map.get(parent_ident, '')
            if line.startswith('def ') and parent.startswith('def '):
                if not line.startswith('def _'):
                    logging.warning('nested function missing prefix: %r %r',
                                    fnrel, ident_map)
Пример #7
0
def match_external(fn):
    external_pat_list = [
        '/xmpp/',
        '/requests/',
        '/DashboardAPI/',
        'python/logging/',
        '/DLSAPI/',
        '/DLSAPI_v1/',
        '/DLSAPI_v2/',
        '/DBSAPI/',
        '/DBSAPI_v1/',
        '/DBSAPI_v2/',
    ]
    external_fn_list = [
        'docs/ExampleC1_production.py',
        'docs/examples/ExampleC1_production.py',
        'python/textwrap.py',
        'python/optparse.py',
        'packages/grid_control_cms/Lexicon.py',
        'packages/Lexicon.py',
        'packages/python_compat_json.py',
        'packages/json.py',
        'packages/python_compat_popen2.py',
        'packages/popen2.py',
        'python/popen2.py',
        'packages/python_compat_tarfile.py',
        'packages/tarfile.py',
        'python/tarfile.py',
        'packages/pc_tarfile.py',
        'packages/python_compat_urllib2.py',
        'packages/python_compat.py',
    ]
    if any(imap(lambda pat: pat in fn, external_pat_list)):
        return True
    return fn in external_fn_list
Пример #8
0
def _check_imported_use(fn, list_from, code_str):
    # remove import lines for usage check
    def _is_import_or_comment(line):
        line = line.lstrip()
        return line.startswith('#') or line.startswith(
            'from ') or line.startswith('import ')

    code_str = str.join(
        '\n',
        ifilter(lambda line: not _is_import_or_comment(line),
                code_str.splitlines()))

    for imported in list_from:
        if ' as ' in imported:
            imported = imported.split(' as ')[1]

        def _chk(fmt):
            code_piece = fmt % imported
            return code_piece in code_str

        if any(
                imap(_chk, [
                    '%s(', '%s.', 'raise %s', '(%s)', '=%s', ' = %s', ' != %s',
                    'return %s', ', %s)', '(%s, ', 'except %s', ' %s,',
                    '\t%s,', '%s, [', '%s]', 'or %s', '%s not in'
                ])):
            continue
        if imported in ['backends', 'datasets']:
            continue
        logging.warning('%s superflous %r', fn, imported)
Пример #9
0
 def _accept_lumi(self, block, fi, idx_runs, idx_lumi, lumi_filter):
     if (idx_runs is None) or (idx_lumi is None):
         return True
     return any(
         imap(
             lambda run_lumi: select_lumi(run_lumi, lumi_filter),
             izip(fi[DataProvider.Metadata][idx_runs],
                  fi[DataProvider.Metadata][idx_lumi])))
Пример #10
0
def getNodeParent(cls):
	cls_old = None
	while True:
		if (cls == cls_old) or any(imap(lambda x: x in cls.__bases__, [Plugin, ConfigurablePlugin, NamedPlugin])):
			break
		try:
			cls = cls.__bases__[0]
		except Exception:
			break
	return cls
Пример #11
0
	def process(self, pNum, splitInfo, result):
		locations = self._filter.filterList(splitInfo.get(DataSplitter.Locations))
		if self._preference:
			if not locations: # [] or None
				locations = self._preference
			elif any(imap(lambda x: x in self._preference, locations)): # preferred location available
				locations = lfilter(lambda x: x in self._preference, locations)
		if self._reqs and (locations is not None):
			result[ParameterInfo.REQS].append((WMS.STORAGE, locations))
		if self._disable:
			result[ParameterInfo.ACTIVE] = result[ParameterInfo.ACTIVE] and (locations != [])
Пример #12
0
def _get_node_parent(cls):
	cls_old = None
	base_cls_list = [Plugin, ConfigurablePlugin, NamedPlugin]
	while True:
		if (cls == cls_old) or any(imap(cls.__bases__.__contains__, base_cls_list)):
			break
		try:
			cls = cls.__bases__[0]
		except Exception:
			clear_current_exception()
			break
	return cls
Пример #13
0
def getNodeParent(cls):
    cls_old = None
    while True:
        if (cls == cls_old) or any(
                imap(lambda x: x in cls.__bases__,
                     [Plugin, ConfigurablePlugin, NamedPlugin])):
            break
        try:
            cls = cls.__bases__[0]
        except Exception:
            break
    return cls
Пример #14
0
	def isInteractive(self, option, default):
		if isinstance(option, list):
			user_option_exists = any(imap(lambda opt: opt in self.getOptions(), option))
		else:
			user_option_exists = option in self.getOptions()
		# global switch to enable / disable interactive option queries
		config_interactive = self.changeView(interfaceClass = TypedConfigInterface,
			viewClass = SimpleConfigView, setSections = ['interactive'])
		if self._interactive_enabled is None:
			self._interactive_enabled = config_interactive.getBool('default', True, onChange = None)
		icfg = config_interactive.getBool(appendOption(option, 'interactive'), self._interactive_enabled and default, onChange = None)
		return icfg and not user_option_exists
Пример #15
0
def logging_setup(config):
    # Apply configuration to logging setup
    if config.get_bool('debug mode', False, on_change=None):
        config.set('level', 'NOTSET', '?=')
        config.set('detail lower limit', 'NOTSET', '?=')
        config.set('detail upper limit', 'NOTSET', '?=')
        config.set('abort handler', 'stdout debug_file', '?=')
        config.set_int('abort code context', 2, '?=')
        config.set_int('abort variables', 1000, '?=')
        config.set_int('abort file stack', 2, '?=')
        config.set_int('abort tree', 2, '?=')

    do_display_logger = config.get_bool('display logger',
                                        False,
                                        on_change=None)

    # Find logger names in options
    logger_names_set = set()
    for option in config.get_option_list():
        if any(
                imap(option.startswith,
                     ['debug mode', 'display logger', 'activity stream'])):
            pass
        elif option.count(' ') == 0:
            logger_names_set.add('')
        else:
            logger_names_set.add(option.split(' ')[0].strip())
    logger_names = sorted(logger_names_set)
    logger_names.reverse()
    for logger_name in logger_names:
        _logging_create_handlers(config, logger_name)

    logging.getLogger().addHandler(
        ProcessArchiveHandler(config.get_work_path('error.tar')))

    if do_display_logger:
        dump_log_setup(logging.WARNING)

    # Setup activity logs
    GCStreamHandler.push_std_stream(
        config.get_plugin(['activity stream', 'activity stream stdout'],
                          'DefaultActivityMonitor',
                          cls=ActivityMonitor,
                          require_plugin=False,
                          pargs=(sys.stdout, ),
                          on_change=None,
                          pkwargs={'register_callback': True}),
        config.get_plugin(['activity stream', 'activity stream stderr'],
                          'DefaultActivityMonitor',
                          cls=ActivityMonitor,
                          require_plugin=False,
                          pargs=(sys.stderr, ),
                          on_change=None))
Пример #16
0
def _get_node_parent(cls):
    cls_old = None
    base_cls_list = [Plugin, ConfigurablePlugin, NamedPlugin]
    while True:
        if (cls == cls_old) or any(
                imap(cls.__bases__.__contains__, base_cls_list)):
            break
        try:
            cls = cls.__bases__[0]
        except Exception:
            clear_current_exception()
            break
    return cls
Пример #17
0
	def process(self, pnum, partition_info, result):
		locations = self._filter.filter_list(partition_info.get(DataSplitter.Locations))
		if self._preference:
			if not locations:  # [] or None
				locations = self._preference
			elif any(imap(self._preference.__contains__, locations)):  # preferred location available
				locations = lfilter(self._preference.__contains__, locations)
		if (partition_info.get(DataSplitter.Locations) is None) and not locations:
			return
		if self._reqs and (locations is not None):
			result[ParameterInfo.REQS].append((WMS.STORAGE, locations))
		if self._disable:
			result[ParameterInfo.ACTIVE] = result[ParameterInfo.ACTIVE] and (locations != [])
Пример #18
0
def getVersion():
    try:
        proc_ver = LocalProcess('svnversion', '-c', pathPKG())
        version = proc_ver.get_output(timeout=10).strip()
        if version != '':
            assert (any(imap(str.isdigit, version)))
            proc_branch = LocalProcess('svn info', pathPKG())
            if 'stable' in proc_branch.get_output(timeout=10):
                return '%s - stable' % version
            return '%s - testing' % version
    except Exception:
        pass
    return __import__('grid_control').__version__ + ' or later'
Пример #19
0
def getVersion():
	try:
		proc_ver = LocalProcess('svnversion', '-c', pathPKG())
		version = proc_ver.get_output(timeout = 10).strip()
		if version != '':
			assert(any(imap(str.isdigit, version)))
			proc_branch = LocalProcess('svn info', pathPKG())
			if 'stable' in proc_branch.get_output(timeout = 10):
				return '%s - stable' % version
			return '%s - testing' % version
	except Exception:
		pass
	return __import__('grid_control').__version__ + ' or later'
def download_job(opts, work_dn, status_mon, job_db, job_obj, jobnum):
	if job_obj.get('download') == 'True' and not opts.mark_ignore_dl:
		return status_mon.register_job_result(jobnum, 'All files already downloaded',
			JobDownloadStatus.JOB_ALREADY)

	# Read the file hash entries from job info file
	fi_list = FileInfoProcessor().process(os.path.join(work_dn, 'output', 'job_%d' % jobnum)) or []
	is_download_failed = False
	if not fi_list:
		if opts.mark_empty_fail:
			is_download_failed = True
		else:
			return status_mon.register_job_result(jobnum, 'Job has no output files',
				JobDownloadStatus.JOB_NO_OUTPUT)

	download_result_list = []
	progress = ProgressActivity('Processing output files', len(fi_list))
	for (fi_idx, fi) in enumerate(fi_list):
		progress.update_progress(fi_idx, msg='Processing output file %r' % fi[FileInfo.NameDest])
		download_result_list.append(download_single_file(opts, jobnum, fi_idx, fi, status_mon))
	progress.finish()

	is_download_failed = is_download_failed or any(imap(download_result_list.__contains__, [
		FileDownloadStatus.FILE_TIMEOUT, FileDownloadStatus.FILE_HASH_FAILED,
		FileDownloadStatus.FILE_TRANSFER_FAILED, FileDownloadStatus.FILE_MKDIR_FAILED]))
	is_download_success = all(imap([FileDownloadStatus.FILE_OK,
		FileDownloadStatus.FILE_EXISTS].__contains__, download_result_list))

	# Ignore the first opts.retry number of failed jobs
	retry_count = int(job_obj.get('download attempt', 0))
	if fi_list and is_download_failed and opts.retry and (retry_count < int(opts.retry)):
		set_job_prop(job_db, jobnum, job_obj, 'download attempt', str(retry_count + 1))
		return status_mon.register_job_result(jobnum, 'Download attempt #%d failed' % retry_count + 1,
			JobDownloadStatus.RETRY)

	delete_files(opts, jobnum, fi_list, is_download_failed)

	if is_download_failed:
		if opts.mark_fail:
			# Mark job as failed to trigger resubmission
			job_obj.state = Job.FAILED
			job_db.commit(jobnum, job_obj)
		status_mon.register_job_result(jobnum, 'Download failed', JobDownloadStatus.JOB_FAILED)
	elif is_download_success:
		if opts.mark_dl:
			# Mark as downloaded
			set_job_prop(job_db, jobnum, job_obj, 'download', 'True')
		status_mon.register_job_result(jobnum, 'Download successful', JobDownloadStatus.JOB_OK)
	else:
		# eg. because of SE blacklist
		status_mon.register_job_result(jobnum, 'Download incomplete', JobDownloadStatus.JOB_INCOMPLETE)
Пример #21
0
	def is_interactive(self, option, default):
		option_list_all = self.get_option_list()
		if isinstance(option, list):
			user_option_exists = any(imap(option_list_all.__contains__, option))
		else:
			user_option_exists = option in option_list_all
		# global switch to enable / disable interactive option queries
		config_interactive = self.change_view(interface_cls=TypedConfigInterface,
			view_class=SimpleConfigView, set_sections=['interactive'])
		if self._interactive_enabled is None:
			self._interactive_enabled = config_interactive.get_bool('default', True, on_change=None)
		icfg = config_interactive.get_bool(join_config_locations(option, 'interactive'),
			self._interactive_enabled and default, on_change=None)
		return icfg and not user_option_exists
Пример #22
0
 def process(self, pNum, splitInfo, result):
     locations = self._filter.filterList(
         splitInfo.get(DataSplitter.Locations))
     if self._preference:
         if not locations:  # [] or None
             locations = self._preference
         elif any(imap(lambda x: x in self._preference,
                       locations)):  # preferred location available
             locations = lfilter(lambda x: x in self._preference, locations)
     if self._reqs and (locations is not None):
         result[ParameterInfo.REQS].append((WMS.STORAGE, locations))
     if self._disable:
         result[ParameterInfo.ACTIVE] = result[ParameterInfo.ACTIVE] and (
             locations != [])
Пример #23
0
def sort_file(fn):
	fp = open(fn)
	fn_lines_all = fp.readlines()
	fp.close()
	if not lfilter(lambda x: x, fn_lines_all):
		return
	fn_lines = iter(fn_lines_all)

	fn_depth_map = {-1: []}
	for line in fn_lines:
		old_depth = max(fn_depth_map)

		if not line.strip():  # empty lines - belong to current depth
			fn_depth_map[old_depth].append(line)
			continue
		keyword_list = ['classmethod', 'staticmethod', 'make_enum', '# <global-state>', '# <alias>']

		def _match_keywords(keyword):
			return (keyword in line) and (not 'return %s' % keyword in line)
		if any(lmap(_match_keywords, keyword_list)):
			fn_depth_map[old_depth].append(line)
			continue
		cur_depth = line.replace(line.lstrip(), '').count('\t')  # indent depth

		if cur_depth >= old_depth:  # handle same depth
			fn_depth_map.setdefault(cur_depth, []).append(line)
			continue

		while True:
			depth = max(fn_depth_map)
			if depth <= cur_depth:
				break
			fn_depth_map.setdefault(depth - 1, []).append(fn_depth_map.pop(depth))
		fn_depth_map.setdefault(cur_depth, []).append(line)

	fn_depth_list = sorted(fn_depth_map)
	fn_depth_list.reverse()
	for depth in fn_depth_list:
		if depth > 0:
			fn_depth_map[depth - 1].append(fn_depth_map.pop(depth))

	fp = open(fn + '.unsorted', 'w')
	fp.write(open(fn).read())
	fp.close()
	unfold(open(fn, 'w'), collect_and_sort_onelevel(merge(fn_depth_map[0]), True))
	os.unlink(fn + '.unsorted')
Пример #24
0
 def isInteractive(self, option, default):
     if isinstance(option, list):
         user_option_exists = any(
             imap(lambda opt: opt in self.getOptions(), option))
     else:
         user_option_exists = option in self.getOptions()
     # global switch to enable / disable interactive option queries
     config_interactive = self.changeView(
         interfaceClass=TypedConfigInterface,
         viewClass=SimpleConfigView,
         setSections=['interactive'])
     if self._interactive_enabled is None:
         self._interactive_enabled = config_interactive.getBool(
             'default', True, onChange=None)
     icfg = config_interactive.getBool(appendOption(option, 'interactive'),
                                       self._interactive_enabled
                                       and default,
                                       onChange=None)
     return icfg and not user_option_exists
Пример #25
0
 def is_interactive(self, option, default):
     option_list_all = self.get_option_list()
     if isinstance(option, list):
         user_option_exists = any(imap(option_list_all.__contains__,
                                       option))
     else:
         user_option_exists = option in option_list_all
     # global switch to enable / disable interactive option queries
     config_interactive = self.change_view(
         interface_cls=TypedConfigInterface,
         view_class=SimpleConfigView,
         set_sections=['interactive'])
     if self._interactive_enabled is None:
         self._interactive_enabled = config_interactive.get_bool(
             'default', True, on_change=None)
     icfg = config_interactive.get_bool(join_config_locations(
         option, 'interactive'),
                                        self._interactive_enabled
                                        and default,
                                        on_change=None)
     return icfg and not user_option_exists
Пример #26
0
def logging_setup(config):
	# Apply configuration to logging setup
	if config.get_bool('debug mode', False, on_change=None):
		config.set('level', 'NOTSET', '?=')
		config.set('detail lower limit', 'NOTSET', '?=')
		config.set('detail upper limit', 'NOTSET', '?=')
		config.set('abort handler', 'stdout debug_file', '?=')
		config.set_int('abort code context', 2, '?=')
		config.set_int('abort variables', 1000, '?=')
		config.set_int('abort file stack', 2, '?=')
		config.set_int('abort tree', 2, '?=')

	do_display_logger = config.get_bool('display logger', False, on_change=None)

	# Find logger names in options
	logger_names_set = set()
	for option in config.get_option_list():
		if any(imap(option.startswith, ['debug mode', 'display logger', 'activity stream'])):
			pass
		elif option.count(' ') == 0:
			logger_names_set.add('')
		else:
			logger_names_set.add(option.split(' ')[0].strip())
	logger_names = sorted(logger_names_set)
	logger_names.reverse()
	for logger_name in logger_names:
		logging_create_handlers(config, logger_name)

	logging.getLogger().addHandler(ProcessArchiveHandler(config.get_work_path('error.tar')))

	if do_display_logger:
		dump_log_setup(logging.WARNING)

	# Setup activity logs
	GCStreamHandler.push_std_stream(
		config.get_plugin(['activity stream', 'activity stream stdout'], 'DefaultActivityMonitor',
			cls=ActivityMonitor, require_plugin=False, pargs=(sys.stdout,), on_change=None,
			pkwargs={'register_callback': True}),
		config.get_plugin(['activity stream', 'activity stream stderr'], 'DefaultActivityMonitor',
			cls=ActivityMonitor, require_plugin=False, pargs=(sys.stderr,), on_change=None))
Пример #27
0
	def enabled(self):
		return any(imap(lambda x: x > 0, [self._wt_factor, self._ct_factor, self._mem_factor,
			self._wt_offset, self._ct_offset, self._mem_offset]))
Пример #28
0
 def enabled(self):
     return any(
         imap(lambda x: x > 0, [
             self._wtfactor, self._ctfactor, self._memfactor,
             self._wtoffset, self._ctoffset, self._memoffset
         ]))
Пример #29
0
	def _is_not_ignored_vn(self, vn):
		vn_list = ['GC_', 'SEED_', 'DATASET', 'FILE_NAMES', 'JOB_RANDOM', 'SKIP_EVENTS', 'MAX_EVENTS']
		return ('NICK' in vn) or not any(imap(vn.startswith, vn_list))
Пример #30
0
		def is_on_list(line, lst):
			return any(imap(lambda entry: entry in line, lst))
Пример #31
0
	def enabled(self):
		return any(imap(lambda x: x > 0, [self._wtfactor, self._ctfactor, self._memfactor,
			self._wtoffset, self._ctoffset, self._memoffset]))
Пример #32
0
		def _is_on_list(line, lst):
			return any(imap(line.__contains__, lst))
def accepted_se(opts, fi):
	return any(imap(fi[FileInfo.Path].__contains__, opts.select_se)) or not opts.select_se
Пример #34
0
def download_job(opts, work_dn, status_mon, job_db, job_obj, jobnum):
    if job_obj.get('download') == 'True' and not opts.mark_ignore_dl:
        return status_mon.register_job_result(jobnum,
                                              'All files already downloaded',
                                              JobDownloadStatus.JOB_ALREADY)

    # Read the file hash entries from job info file
    fi_list = FileInfoProcessor().process(
        os.path.join(work_dn, 'output', 'job_%d' % jobnum)) or []
    is_download_failed = False
    if not fi_list:
        if opts.mark_empty_fail:
            is_download_failed = True
        else:
            return status_mon.register_job_result(
                jobnum, 'Job has no output files',
                JobDownloadStatus.JOB_NO_OUTPUT)

    download_result_list = []
    progress = ProgressActivity('Processing output files', len(fi_list))
    for (fi_idx, fi) in enumerate(fi_list):
        progress.update_progress(fi_idx,
                                 msg='Processing output file %r' %
                                 fi[FileInfo.NameDest])
        download_result_list.append(
            download_single_file(opts, jobnum, fi_idx, fi, status_mon))
    progress.finish()

    is_download_failed = is_download_failed or any(
        imap(download_result_list.__contains__, [
            FileDownloadStatus.FILE_TIMEOUT,
            FileDownloadStatus.FILE_HASH_FAILED,
            FileDownloadStatus.FILE_TRANSFER_FAILED,
            FileDownloadStatus.FILE_MKDIR_FAILED
        ]))
    is_download_success = all(
        imap([FileDownloadStatus.FILE_OK,
              FileDownloadStatus.FILE_EXISTS].__contains__,
             download_result_list))

    # Ignore the first opts.retry number of failed jobs
    retry_count = int(job_obj.get('download attempt', 0))
    if fi_list and is_download_failed and opts.retry and (retry_count < int(
            opts.retry)):
        set_job_prop(job_db, jobnum, job_obj, 'download attempt',
                     str(retry_count + 1))
        return status_mon.register_job_result(
            jobnum, 'Download attempt #%d failed' % retry_count + 1,
            JobDownloadStatus.RETRY)

    delete_files(opts, jobnum, fi_list, is_download_failed)

    if is_download_failed:
        if opts.mark_fail:
            # Mark job as failed to trigger resubmission
            job_obj.state = Job.FAILED
            job_db.commit(jobnum, job_obj)
        status_mon.register_job_result(jobnum, 'Download failed',
                                       JobDownloadStatus.JOB_FAILED)
    elif is_download_success:
        if opts.mark_dl:
            # Mark as downloaded
            set_job_prop(job_db, jobnum, job_obj, 'download', 'True')
        status_mon.register_job_result(jobnum, 'Download successful',
                                       JobDownloadStatus.JOB_OK)
    else:
        # eg. because of SE blacklist
        status_mon.register_job_result(jobnum, 'Download incomplete',
                                       JobDownloadStatus.JOB_INCOMPLETE)
Пример #35
0
def accepted_se(opts, fi):
    return any(imap(fi[FileInfo.Path].__contains__,
                    opts.select_se)) or not opts.select_se
Пример #36
0
 def _accept_run(self, block, fi, idx_runs, lumi_filter):
     if idx_runs is None:
         return True
     return any(
         imap(lambda run: select_run(run, lumi_filter),
              fi[DataProvider.Metadata][idx_runs]))
Пример #37
0
	def _accept_run(self, block, fi, idx_runs, lumi_filter):
		if idx_runs is None:
			return True
		return any(imap(lambda run: select_run(run, lumi_filter), fi[DataProvider.Metadata][idx_runs]))
Пример #38
0
	def _accept_lumi(self, block, fi, idx_runs, idx_lumi, lumi_filter):
		if (idx_runs is None) or (idx_lumi is None):
			return True
		return any(imap(lambda run_lumi: select_lumi(run_lumi, lumi_filter),
			izip(fi[DataProvider.Metadata][idx_runs], fi[DataProvider.Metadata][idx_lumi])))