def _iter_datasource_items(self, item, metadata_dict, entries, location_list, obj_dict): if ('GC_TASK' in obj_dict) and ('GC_JOBNUM' in metadata_dict): job_env_dict = obj_dict['GC_TASK'].get_job_dict(metadata_dict['GC_JOBNUM']) for (key_new, key_old) in obj_dict['GC_TASK'].get_var_alias_map().items(): job_env_dict[key_new] = job_env_dict.get(key_old) metadata_dict.update(filter_dict(job_env_dict, key_filter=lambda k: k not in self._ignore_vars)) yield (item, metadata_dict, entries, location_list, obj_dict)
def _check_map_name2key(self, map_key2name, map_key2metadata_dict): # Find name <-> key collisions map_type2name2key_list = {} for (key, name) in map_key2name.items(): if len(key) == 1: key_type = 'dataset' else: key_type = 'block' map_type2name2key_list.setdefault(key_type, {}).setdefault(name, []).append(key) collision = False map_key_type2vn_list = { 'dataset': self._hash_input_set_dataset, 'block': self._hash_input_set_dataset + self._hash_input_set_block } for (key_type, vn_list) in map_key_type2vn_list.items(): for (name, key_list) in map_type2name2key_list.get(key_type, {}).items(): if len(key_list) > 1: self._log.warn('Multiple %s keys are mapped to the name %s!', key_type, repr(name)) for idx, key in enumerate(sorted(key_list)): self._log.warn('\tCandidate #%d with key %r:', idx + 1, str.join('#', key)) metadata_dict = map_key2metadata_dict[key] for (vn, value) in filter_dict(metadata_dict, key_filter=vn_list.__contains__).items(): self._log.warn('\t\t%s = %s', vn, value) collision = True if self._interactive_assignment and collision: if not UserInputInterface().prompt_bool('Do you want to continue?', False): sys.exit(os.EX_OK)
def execute(self, wms_id_list): # yields list of (wms_id, job_status, job_info) exc = ExceptionCollector() for wms_id in wms_id_list: try: job_info = filter_dict( dict(self._status_fun(wms_id)), value_filter=lambda v: v not in ['', '0']) job_info[CheckInfo.RAW_STATUS] = job_info.pop('status', '').lower() if 'destination' in job_info: try: dest_info = job_info['destination'].split('/', 1) job_info[CheckInfo.SITE] = dest_info[0].strip() job_info[CheckInfo.QUEUE] = dest_info[1].strip() except Exception: clear_current_exception() yield (wms_id, self._status_map.get(job_info[CheckInfo.RAW_STATUS], Job.UNKNOWN), job_info) except Exception: exc.collect() if abort(): break exc.raise_any( BackendError('Encountered errors while checking job status'))
def _publish(self, job_obj, jobnum, task_id, usermsg): (_, backend, wms_id) = job_obj.gc_id.split('.', 2) dash_id = '%s_%s' % (jobnum, wms_id) if 'http' not in job_obj.gc_id: dash_id = '%s_https://%s:/%s' % (jobnum, backend, wms_id) msg = dict_union({'taskId': task_id, 'jobId': dash_id, 'sid': wms_id}, *usermsg) DashboardAPI(task_id, dash_id).publish(**filter_dict(msg, value_filter=identity))
def _set_msg(self, **kwargs): self._msg_dict.update( filter_dict(kwargs, value_filter=lambda value: value is not None)) for callback in Activity.callbacks: callback() if self._log: self._logger.log(self._level, self.get_msg())
def get_job_content(self, jobnum, pnum=None): if pnum is None: pnum = jobnum if jobnum is None: raise APIError('Unable to process job number None!') result = {ParameterInfo.ACTIVE: True, ParameterInfo.REQS: []} result['GC_JOB_ID'] = jobnum result['GC_PARAM'] = pnum self._psrc.fill_parameter_content(pnum, result) return filter_dict(result, value_filter=lambda x: x != '')
def _iter_datasource_items(self, item, metadata_dict, entries, location_list, obj_dict): if ('GC_TASK' in obj_dict) and ('GC_JOBNUM' in metadata_dict): job_env_dict = obj_dict['GC_TASK'].get_job_dict( metadata_dict['GC_JOBNUM']) for (key_new, key_old) in obj_dict['GC_TASK'].get_var_alias_map().items(): job_env_dict[key_new] = job_env_dict.get(key_old) metadata_dict.update( filter_dict(job_env_dict, key_filter=lambda k: k not in self._ignore_vars)) yield (item, metadata_dict, entries, location_list, obj_dict)
def _publish(self, job_obj, jobnum, task_id, usermsg): (_, backend, wms_id) = job_obj.gc_id.split('.', 2) dash_id = '%s_%s' % (jobnum, wms_id) if 'http' not in job_obj.gc_id: dash_id = '%s_https://%s:/%s' % (jobnum, backend, wms_id) msg = dict_union({ 'taskId': task_id, 'jobId': dash_id, 'sid': wms_id }, *usermsg) DashboardAPI( task_id, dash_id).publish(**filter_dict(msg, value_filter=identity))
def _submit_job(self, jobnum, task): # Submit job and yield (jobnum, WMS ID, other data) jdl_fd, jdl_fn = tempfile.mkstemp('.jdl') try: jdl_line_list = self._make_jdl(jobnum, task) safe_write(os.fdopen(jdl_fd, 'w'), jdl_line_list) except Exception: remove_files([jdl_fn]) raise BackendError('Could not write jdl data to %s.' % jdl_fn) try: submit_arg_list = [] for key_value in filter_dict(self._submit_args_dict, value_filter=identity).items(): submit_arg_list.extend(key_value) submit_arg_list.append(jdl_fn) activity = Activity('submitting job %d' % jobnum) proc = LocalProcess(self._submit_exec, '--nomsg', '--noint', '--logfile', '/dev/stderr', *submit_arg_list) wms_id = None stripped_stdout_iter = imap(str.strip, proc.stdout.iter(timeout=60)) for line in ifilter(lambda x: x.startswith('http'), stripped_stdout_iter): wms_id = line exit_code = proc.status(timeout=0, terminate=True) activity.finish() if (exit_code != 0) or (wms_id is None): if self._explain_error(proc, exit_code): pass else: self._log.log_process( proc, files={'jdl': SafeFile(jdl_fn).read()}) finally: remove_files([jdl_fn]) job_data = {'jdl': str.join('', jdl_line_list)} return (jobnum, self._create_gc_id(wms_id), job_data)
def _submit_job(self, jobnum, task): # Submit job and yield (jobnum, WMS ID, other data) jdl_fd, jdl_fn = tempfile.mkstemp('.jdl') try: jdl_line_list = self._make_jdl(jobnum, task) safe_write(os.fdopen(jdl_fd, 'w'), jdl_line_list) except Exception: remove_files([jdl_fn]) raise BackendError('Could not write jdl data to %s.' % jdl_fn) try: submit_arg_list = [] for key_value in filter_dict(self._submit_args_dict, value_filter=identity).items(): submit_arg_list.extend(key_value) submit_arg_list.append(jdl_fn) activity = Activity('submitting job %d' % jobnum) proc = LocalProcess(self._submit_exec, '--nomsg', '--noint', '--logfile', '/dev/stderr', *submit_arg_list) wms_id = None stripped_stdout_iter = imap(str.strip, proc.stdout.iter(timeout=60)) for line in ifilter(lambda x: x.startswith('http'), stripped_stdout_iter): wms_id = line exit_code = proc.status(timeout=0, terminate=True) activity.finish() if (exit_code != 0) or (wms_id is None): if self._explain_error(proc, exit_code): pass else: self._log.log_process(proc, files={'jdl': SafeFile(jdl_fn).read()}) finally: remove_files([jdl_fn]) job_data = {'jdl': str.join('', jdl_line_list)} return (jobnum, self._create_gc_id(wms_id), job_data)
def _set_msg(self, **kwargs): self._msg_dict.update(filter_dict(kwargs, value_filter=lambda value: value is not None)) for callback in Activity.callbacks: callback() if self._log: self._logger.log(self._level, self.get_msg())