Exemplo n.º 1
0
def lumi_calc(opts, work_dn, jobnum_list, reader):
	# Lumi filter calculations
	(map_sample2run_info_dict, map_sample2input_events, map_sample2output_events) = process_jobs(opts,
		work_dn, jobnum_list, reader)

	activity = Activity('Simplifying lumi sections')
	map_sample2run_lumi_range = {}
	for sample in map_sample2run_info_dict:
		for run in map_sample2run_info_dict[sample]:
			for lumi in map_sample2run_info_dict[sample][run]:
				map_sample2run_lumi_range.setdefault(sample, []).append(([run, lumi], [run, lumi]))
	for sample in map_sample2run_info_dict:
		map_sample2run_lumi_range[sample] = merge_lumi_list(map_sample2run_lumi_range[sample])
	activity.finish()

	for sample, lumi_list in map_sample2run_lumi_range.items():
		if opts.job_events:
			if map_sample2output_events.get(sample):
				LOG.info('')
			display_dict_list = lmap(lambda pfn: {0: pfn, 1: map_sample2output_events[sample][pfn]},
				map_sample2output_events.get(sample, {}))
			if display_dict_list:
				display_dict_list.append('=')
			display_dict_list += [{0: 'Processed in total', 1: map_sample2input_events.get(sample)}]
			ConsoleTable.create([(0, ''), (1, '#Events')], display_dict_list,
				title='Sample: %s' % sample)
		if opts.job_json:
			json_fn = os.path.join(opts.output_dir or work_dn, 'processed_%s.json' % sample)
			with_file(SafeFile(json_fn, 'w'), partial(write_lumi_json, lumi_list))
			LOG.info('Saved processed lumi sections in %s', json_fn)
		if opts.job_gc:
			LOG.info('\nList of processed lumisections\n' + '-' * 30)
			write_lumi_gc(lumi_list)
		LOG.info('')
Exemplo n.º 2
0
def logfile_decode(fn):
	def _decode_stream(fp):
		for line in fp.readlines():
			if line.startswith('(B64) '):
				buffer_obj = BytesBuffer(base64.b64decode(line.replace('(B64) ', '')))
				line = gzip.GzipFile(fileobj=buffer_obj).read().decode('ascii')
			logging.getLogger('script').info(line.rstrip())
	if fn.endswith('.gz'):
		with_file(GZipTextFile(fn, 'r'), _decode_stream)
	else:
		with_file(SafeFile(fn), _decode_stream)
Exemplo n.º 3
0
	def _log_disabled_jobs(self):
		disabled = self.job_db.get_job_list(ClassSelector(JobClass.DISABLED))
		try:
			with_file(SafeFile(self._disabled_jobs_logfile, 'w'),
				lambda fp: fp.write(str.join('\n', imap(str, disabled))))
		except Exception:
			raise JobError('Could not write disabled jobs to file %s!' % self._disabled_jobs_logfile)
		if disabled:
			self._log.log_time(logging.WARNING, 'There are %d disabled jobs in this task!', len(disabled))
			self._log.log_time(logging.DEBUG,
				'Please refer to %s for a complete list of disabled jobs.', self._disabled_jobs_logfile)
Exemplo n.º 4
0
def logfile_decode(fn):
    def _decode_stream(fp):
        for line in fp.readlines():
            if line.startswith('(B64) '):
                buffer_obj = BytesBuffer(
                    base64.b64decode(line.replace('(B64) ', '')))
                line = gzip.GzipFile(fileobj=buffer_obj).read().decode('ascii')
            logging.getLogger('script').info(line.rstrip())

    if fn.endswith('.gz'):
        with_file(GZipTextFile(fn, 'r'), _decode_stream)
    else:
        with_file(SafeFile(fn), _decode_stream)
Exemplo n.º 5
0
 def _log_disabled_jobs(self):
     disabled = self.job_db.get_job_list(ClassSelector(JobClass.DISABLED))
     try:
         with_file(SafeFile(self._disabled_jobs_logfile, 'w'),
                   lambda fp: fp.write(str.join('\n', imap(str, disabled))))
     except Exception:
         raise JobError('Could not write disabled jobs to file %s!' %
                        self._disabled_jobs_logfile)
     if disabled:
         self._log.log_time(logging.WARNING,
                            'There are %d disabled jobs in this task!',
                            len(disabled))
         self._log.log_time(
             logging.DEBUG,
             'Please refer to %s for a complete list of disabled jobs.',
             self._disabled_jobs_logfile)
Exemplo n.º 6
0
def lumi_calc(opts, work_dn, jobnum_list, reader):
    # Lumi filter calculations
    (map_sample2run_info_dict, map_sample2input_events,
     map_sample2output_events) = process_jobs(opts, work_dn, jobnum_list,
                                              reader)

    activity = Activity('Simplifying lumi sections')
    map_sample2run_lumi_range = {}
    for sample in map_sample2run_info_dict:
        for run in map_sample2run_info_dict[sample]:
            for lumi in map_sample2run_info_dict[sample][run]:
                map_sample2run_lumi_range.setdefault(sample, []).append(
                    ([run, lumi], [run, lumi]))
    for sample in map_sample2run_info_dict:
        map_sample2run_lumi_range[sample] = merge_lumi_list(
            map_sample2run_lumi_range[sample])
    activity.finish()

    for sample, lumi_list in map_sample2run_lumi_range.items():
        if opts.job_events:
            if map_sample2output_events.get(sample):
                LOG.info('')
            display_dict_list = lmap(
                lambda pfn: {
                    0: pfn,
                    1: map_sample2output_events[sample][pfn]
                }, map_sample2output_events.get(sample, {}))
            if display_dict_list:
                display_dict_list.append('=')
            display_dict_list += [{
                0: 'Processed in total',
                1: map_sample2input_events.get(sample)
            }]
            ConsoleTable.create([(0, ''), (1, '#Events')],
                                display_dict_list,
                                title='Sample: %s' % sample)
        if opts.job_json:
            json_fn = os.path.join(opts.output_dir or work_dn,
                                   'processed_%s.json' % sample)
            with_file(SafeFile(json_fn, 'w'),
                      partial(write_lumi_json, lumi_list))
            LOG.info('Saved processed lumi sections in %s', json_fn)
        if opts.job_gc:
            LOG.info('\nList of processed lumisections\n' + '-' * 30)
            write_lumi_gc(lumi_list)
        LOG.info('')
Exemplo n.º 7
0
	def read_from_disk(filename):
		return with_file(SafeFile(filename), pickle.load)
Exemplo n.º 8
0
	def _write_file(self, fn, msg=None, **kwargs):
		def _write_msg_view(fp):
			if msg is not None:
				fp.write(msg)
			self._view.write(fp, **kwargs)
		with_file(SafeFile(fn, 'w'), _write_msg_view)
Exemplo n.º 9
0
	def commit(self, jobnum, job_obj):
		with_file(SafeFile(os.path.join(self._path_db, 'job_%d.txt' % jobnum), 'w'),
			lambda fp: fp.writelines(self._fmt.format(self._serialize_job_obj(job_obj))))
		self._job_map[jobnum] = job_obj
Exemplo n.º 10
0
 def _check_write_stack_log():
     if os.path.exists('gc_debug_stack.log'):
         with_file(
             SafeFile('gc_debug_stack.log', 'w'), lambda fp: DebugInterface(
                 stream=fp).show_stack(thread_id='all'))
Exemplo n.º 11
0
 def read_from_disk(filename):
     return with_file(SafeFile(filename), pickle.load)
Exemplo n.º 12
0
 def save_to_disk(self, filename):
     with_file(SafeFile(filename, 'w'), partial(pickle.dump, self))
Exemplo n.º 13
0
	def _check_write_stack_log():
		if os.path.exists('gc_debug_stack.log'):
			with_file(SafeFile('gc_debug_stack.log', 'w'),
				lambda fp: DebugInterface(stream=fp).show_stack(thread_id='all'))
Exemplo n.º 14
0
def dump_dbs3_json(dn, block_dump_iter):
    for block_dump in block_dump_iter:
        block_dump_fn = block_dump['block']['block_name'].strip('/').replace(
            '/', '_') + '.json'
        with_file(SafeFile(os.path.join(dn, block_dump_fn), 'w'),
                  partial(json.dump, block_dump))
Exemplo n.º 15
0
	def _show_image(self, name, buffer):
		with_file(SafeFile(name, 'wb'), lambda fp: fp.write(buffer.getvalue()))
Exemplo n.º 16
0
def dump_dbs3_json(dn, block_dump_iter):
	for block_dump in block_dump_iter:
		block_dump_fn = block_dump['block']['block_name'].strip('/').replace('/', '_') + '.json'
		with_file(SafeFile(os.path.join(dn, block_dump_fn), 'w'), partial(json.dump, block_dump))
Exemplo n.º 17
0
 def _show_image(self, name, buffer):
     with_file(SafeFile(name, 'wb'), lambda fp: fp.write(buffer.getvalue()))
Exemplo n.º 18
0
	def save_to_disk(self, filename):
		with_file(SafeFile(filename, 'w'), partial(pickle.dump, self))
Exemplo n.º 19
0
 def commit(self, jobnum, job_obj):
     with_file(
         SafeFile(os.path.join(self._path_db, 'job_%d.txt' % jobnum), 'w'),
         lambda fp: fp.writelines(
             self._fmt.format(self._serialize_job_obj(job_obj))))
     self._job_map[jobnum] = job_obj