def fill(self, container): searchPaths = [] for configFile in self._configFiles: configContent = {} searchPaths.extend( self._fillContentWithIncludes(configFile, [os.getcwd()], configContent)) # Store config settings for section in configContent: # Allow very basic substitutions with %(option)s syntax def getOptValue(option, value, source): return (option, value) substDict = dict( ichain([ ismap(getOptValue, configContent.get('default', [])), ismap(getOptValue, configContent.get(section, [])) ])) for (option, value, source) in configContent[section]: # Protection for non-interpolation "%" in value try: value = ( value.replace('%', '\x01').replace('\x01(', '%(') % substDict).replace('\x01', '%') except Exception: raise ConfigError( 'Unable to interpolate value %r with %r' % (value, substDict)) self._addEntry(container, section, option, value, source) searchString = str.join(' ', UniqueList(searchPaths)) if self._addSearchPath: self._addEntry(container, 'global', 'plugin paths+', searchString, str.join(',', self._configFiles))
def _wrap_head(self, head, lendict): def _get_aligned_dict(keys, lendict, maxlen): edges = [] while len(keys): offset = 2 (tmp, keys) = (keys[:keys.index(None)], keys[keys.index(None) + 1:]) for key in tmp: left = max(0, maxlen - sum(imap(lambda k: lendict[k] + 3, tmp))) for edge in edges: if (edge > offset + lendict[key]) and (edge - (offset + lendict[key]) < left): lendict[key] += edge - (offset + lendict[key]) left -= edge - (offset + lendict[key]) break edges.append(offset + lendict[key]) offset += lendict[key] + 3 return lendict def _get_head_key(key, name): # Wrap and align columns return key def _get_padded_key_len(key, length): return (key, length + 2) headwrap = list(self._get_good_partition(ismap(_get_head_key, head), dict(ismap(_get_padded_key_len, lendict.items())), self._wrap_len)) lendict = _get_aligned_dict(headwrap, lendict, self._wrap_len) return (headwrap, lendict)
def _wrap_head(self, head, lendict): def _get_aligned_dict(keys, lendict, maxlen): edges = [] while len(keys): offset = 2 (tmp, keys) = (keys[:keys.index(None)], keys[keys.index(None) + 1:]) for key in tmp: left = max( 0, maxlen - sum(imap(lambda k: lendict[k] + 3, tmp))) for edge in edges: if (edge > offset + lendict[key]) and ( edge - (offset + lendict[key]) < left): lendict[key] += edge - (offset + lendict[key]) left -= edge - (offset + lendict[key]) break edges.append(offset + lendict[key]) offset += lendict[key] + 3 return lendict def _get_head_key(key, name): # Wrap and align columns return key def _get_padded_key_len(key, length): return (key, length + 2) headwrap = list( self._get_good_partition( ismap(_get_head_key, head), dict(ismap(_get_padded_key_len, lendict.items())), self._wrap_len)) lendict = _get_aligned_dict(headwrap, lendict, self._wrap_len) return (headwrap, lendict)
def _getSubmitFileMap(self, task, jobNumList): """ Get listed files for submission Returns: taskFiles iterable as (descr, gcPath, scheddPath) files shared by all jobs jobsFileMap map of jobNum to iterable as (descr, gcPath, scheddPath) files per individual job """ taskFiles = [] def mapSBFiles(desrc, path, base): return (descr, path, os.path.join(self.getStagingDir(taskID = task.taskID), base) ) taskFiles.extend(ismap(mapSBFiles, self.parentPool._getSandboxFilesIn(task))) proxyFile = () try: for authFile in parentPool.proxy.getauthFiles(): proxyFile = ('User Proxy', authFile, os.path.join(self.getStagingDir(taskID = task.taskID), os.path.basename(authFile))) except Exception: pass jobFileMap = {} for jobNum in jobNumList: jcFull, jcBase = self.getJobCfgPath(jobNum) jobsFileMap[jobNum] = ('Job Config %d' % jobNum, jcFull, os.path.join(self.getStagingDir(taskID = task.taskID), jcBase)) return taskFiles, proxyFile, jobFileMap
def __init__(self, head, data, fmt=None, wrapLen=100): ConsoleTable.__init__(self) self._log.info('') head = list(head) def getHeadName(key, name): return name maxhead = max(imap(len, ismap(getHeadName, head))) fmt = fmt or {} showLine = False for entry in data: if isinstance(entry, dict): if showLine: self._write_line(('-' * (maxhead + 2)) + '-+-' + '-' * min(30, wrapLen - maxhead - 10)) for (key, name) in head: self._write_line( name.rjust(maxhead + 2) + ' | ' + str(fmt.get(key, str)(entry.get(key, '')))) showLine = True elif showLine: self._write_line(('=' * (maxhead + 2)) + '=+=' + '=' * min(30, wrapLen - maxhead - 10)) showLine = False self._log.info('')
def _getSubmitFileMap(self, task, jobNumList): """ Get listed files for submission Returns: taskFiles iterable as (descr, gcPath, scheddPath) files shared by all jobs jobsFileMap map of jobNum to iterable as (descr, gcPath, scheddPath) files per individual job """ taskFiles = [] def mapSBFiles(desrc, path, base): return (descr, path, os.path.join(self.getStagingDir(taskID=task.taskID), base)) taskFiles.extend( ismap(mapSBFiles, self.parentPool._getSandboxFilesIn(task))) proxyFile = () try: for authFile in parentPool.proxy.getauthFiles(): proxyFile = ('User Proxy', authFile, os.path.join( self.getStagingDir(taskID=task.taskID), os.path.basename(authFile))) except Exception: clear_current_exception() jobFileMap = {} for jobNum in jobNumList: jcFull, jcBase = self.getJobCfgPath(jobNum) jobsFileMap[jobNum] = ('Job Config %d' % jobNum, jcFull, os.path.join( self.getStagingDir(taskID=task.taskID), jcBase)) return taskFiles, proxyFile, jobFileMap
def _format_data(self, head, data, just_fun, fmt_dict): # adjust to lendict of column (considering escape sequence correction) def _get_key_len(key, name): return (key, len(name)) def _just(key, value): return just_fun.get(key, str.rjust)( value, lendict[key] + len(value) - _stripped_len(value)) def _stripped_len(value): return len(re.sub('\033\\[\\d*(;\\d*)*m', '', value)) lendict = dict(ismap(_get_key_len, head)) result = [] for entry in data: if isinstance(entry, dict): tmp = {} for key, _ in head: tmp[key] = str(fmt_dict.get(key, str)(entry.get(key, ''))) lendict[key] = max(lendict[key], _stripped_len(tmp[key])) result.append(tmp) else: result.append(entry) return (result, lendict, _just)
def _resyncInternal(self): # This function is _VERY_ time critical! tmp = self._rawSource.resync() # First ask about psource changes (redoNewPNum, disableNewPNum, sizeChange) = (set(tmp[0]), set(tmp[1]), tmp[2]) hashNew = self._rawSource.getHash() hashChange = self._storedHash != hashNew self._storedHash = hashNew if not (redoNewPNum or disableNewPNum or sizeChange or hashChange): self._resyncState = None return psource_old = ParameterAdapter(None, ParameterSource.createInstance('GCDumpParameterSource', self._pathParams)) psource_new = ParameterAdapter(None, self._rawSource) mapJob2PID = {} (pAdded, pMissing, _) = self._diffParams(psource_old, psource_new, mapJob2PID, redoNewPNum, disableNewPNum) self._source = self._getResyncSource(psource_old, psource_new, mapJob2PID, pAdded, pMissing, disableNewPNum) self._mapJob2PID = mapJob2PID # Update Job2PID map redoNewPNum = redoNewPNum.difference(disableNewPNum) if redoNewPNum or disableNewPNum: mapPID2Job = dict(ismap(utils.swap, self._mapJob2PID.items())) translate = lambda pNum: mapPID2Job.get(pNum, pNum) self._resyncState = (set(imap(translate, redoNewPNum)), set(imap(translate, disableNewPNum)), sizeChange) elif sizeChange: self._resyncState = (set(), set(), sizeChange) # Write resynced state self._writeJob2PID(self._pathJob2PID + '.tmp') ParameterSource.getClass('GCDumpParameterSource').write(self._pathParams + '.tmp', self) os.rename(self._pathJob2PID + '.tmp', self._pathJob2PID) os.rename(self._pathParams + '.tmp', self._pathParams)
def _get_just_fun_dict(self, head, align_str): just_fun_dict = {'l': str.ljust, 'r': str.rjust, 'c': str.center} # just_fun = {id1: str.center, id2: str.rjust, ...} def _get_key_format(head_entry, align_str): return (head_entry[0], just_fun_dict[align_str]) return dict(ismap(_get_key_format, izip(head, align_str)))
def _submitJob(self, jobNum, module): activity = utils.ActivityLog('submitting jobs') try: sandbox = tempfile.mkdtemp('', '%s.%04d.' % (module.taskID, jobNum), self.sandPath) except Exception: raise BackendError('Unable to create sandbox directory "%s"!' % sandbox) sbPrefix = sandbox.replace(self.sandPath, '').lstrip('/') def translateTarget(d, s, t): return (d, s, os.path.join(sbPrefix, t)) self.smSBIn.doTransfer( ismap(translateTarget, self._getSandboxFilesIn(module))) self._writeJobConfig( os.path.join(sandbox, '_jobconfig.sh'), jobNum, module, { 'GC_SANDBOX': sandbox, 'GC_SCRATCH_SEARCH': str.join(' ', self.scratchPath) }) reqs = self.brokerSite.brokerAdd(module.getRequirements(jobNum), WMS.SITES) reqs = dict(self.brokerQueue.brokerAdd(reqs, WMS.QUEUES)) if (self.memory > 0) and (reqs.get(WMS.MEMORY, 0) < self.memory): reqs[ WMS. MEMORY] = self.memory # local jobs need higher (more realistic) memory requirements (stdout, stderr) = (os.path.join(sandbox, 'gc.stdout'), os.path.join(sandbox, 'gc.stderr')) jobName = module.getDescription(jobNum).jobName proc = utils.LoggedProcess( self.submitExec, '%s %s "%s" %s' % (self.submitOpts, self.getSubmitArguments(jobNum, jobName, reqs, sandbox, stdout, stderr), utils.pathShare('gc-local.sh'), self.getJobArguments(jobNum, sandbox))) retCode = proc.wait() wmsIdText = proc.getOutput().strip().strip('\n') try: wmsId = self.parseSubmitOutput(wmsIdText) except Exception: wmsId = None del activity if retCode != 0: self._log.warning('%s failed:', self.submitExec) elif wmsId is None: self._log.warning('%s did not yield job id:\n%s', self.submitExec, wmsIdText) if wmsId: wmsId = self._createId(wmsId) open(os.path.join(sandbox, wmsId), 'w') else: proc.logError(self.errorLog) return (jobNum, utils.QM(wmsId, wmsId, None), {'sandbox': sandbox})
def _get_just_fun_dict(self, head, fmtString): justFunDict = {'l': str.ljust, 'r': str.rjust, 'c': str.center} # justFun = {id1: str.center, id2: str.rjust, ...} def getKeyFormat(headEntry, fmtString): return (headEntry[0], justFunDict[fmtString]) return dict(ismap(getKeyFormat, izip(head, fmtString)))
def _get_just_fun_dict(self, head, align_str): just_fun_dict = {'l': str.ljust, 'r': str.rjust, 'c': str.center} # just_fun = {id1: str.center, id2: str.rjust, ...} def _get_key_format(head_entry, align_str): return (head_entry[0], just_fun_dict[align_str]) return dict(ismap(_get_key_format, izip(head, align_str)))
def _submit_job(self, jobnum, task): # Submit job and yield (jobnum, WMS ID, other data) activity = Activity('submitting job %d' % jobnum) try: sandbox = tempfile.mkdtemp( '', '%s.%04d.' % (task.get_description().task_id, jobnum), self._sandbox_helper.get_path()) except Exception: raise BackendError('Unable to create sandbox directory "%s"!' % sandbox) sb_prefix = sandbox.replace(self._sandbox_helper.get_path(), '').lstrip('/') def _translate_target(desc, src, target): return (desc, src, os.path.join(sb_prefix, target)) self._sm_sb_in.do_transfer( ismap(_translate_target, self._get_in_transfer_info_list(task))) self._write_job_config( os.path.join(sandbox, '_jobconfig.sh'), jobnum, task, { 'GC_SANDBOX': sandbox, 'GC_SCRATCH_SEARCH': str.join(' ', self._scratch_path) }) reqs = self._broker_site.broker(task.get_requirement_list(jobnum), WMS.SITES) reqs = dict(self._broker_queue.broker(reqs, WMS.QUEUES)) if (self._memory > 0) and (reqs.get(WMS.MEMORY, 0) < self._memory): reqs[ WMS. MEMORY] = self._memory # local jobs need higher (more realistic) memory requirements job_name = task.get_description(jobnum).job_name proc = self._get_submit_proc(jobnum, sandbox, job_name, reqs) exit_code = proc.status(timeout=20, terminate=True) wms_id_str = proc.stdout.read(timeout=0).strip().strip('\n') wms_id = ignore_exception(Exception, None, self.parse_submit_output, wms_id_str) activity.finish() if exit_code != 0: self._log.warning('%s failed:', self._submit_exec) elif wms_id is None: self._log.warning('%s did not yield job id:\n%s', self._submit_exec, wms_id_str) gc_id = self._create_gc_id(wms_id) if gc_id is not None: open(os.path.join(sandbox, gc_id), 'w') else: self._log.log_process(proc) return (jobnum, gc_id, {'sandbox': sandbox})
def __init__(self, head, data, align_str='', fmt_dict=None, wrap_len=100, title=None): UserConsoleTable.__init__(self, title) self._wrap_len = wrap_len head = list(head) just_fun = self._get_just_fun_dict(head, align_str) # return formatted, but not yet aligned entries; len dictionary; just function (entries, lendict, just) = self._format_data(head, data, just_fun, fmt_dict or {}) (headwrap, lendict) = self._wrap_head(head, lendict) def _get_key_padded_name(key, name): return (key, name.center(lendict[key])) headentry = dict(ismap(_get_key_padded_name, head)) self._print_table(headwrap, headentry, entries, just, lendict) self._write_line('')
def fill(self, container): searchPaths = [] for configFile in self._configFiles: configContent = {} searchPaths.extend(self._fillContentWithIncludes(configFile, [os.getcwd()], configContent)) # Store config settings for section in configContent: # Allow very basic substitutions with %(option)s syntax def getOptValue(option, value, source): return (option, value) substDict = dict(ichain([ ismap(getOptValue, configContent.get('default', [])), ismap(getOptValue, configContent.get(section, []))])) for (option, value, source) in configContent[section]: # Protection for non-interpolation "%" in value try: value = (value.replace('%', '\x01').replace('\x01(', '%(') % substDict).replace('\x01', '%') except Exception: raise ConfigError('Unable to interpolate value %r with %r' % (value, substDict)) self._addEntry(container, section, option, value, source) searchString = str.join(' ', UniqueList(searchPaths)) if self._addSearchPath: self._addEntry(container, 'global', 'plugin paths+', searchString, str.join(',', self._configFiles))
def __init__(self, head, data, fmtString = '', fmt = None, wrapLen = 100): ConsoleTable.__init__(self) self._wrapLen = wrapLen head = list(head) justFun = self._get_just_fun_dict(head, fmtString) # return formatted, but not yet aligned entries; len dictionary; just function (entries, lendict, just) = self._format_data(head, data, justFun, fmt or {}) (headwrap, lendict) = self._wrap_head(head, lendict) def getKeyPaddedName(key, name): return (key, name.center(lendict[key])) headentry = dict(ismap(getKeyPaddedName, head)) self._log.info('') self._print_table(headwrap, headentry, entries, just, lendict) self._log.info('')
def __init__(self, head, data, fmtString='', fmt=None, wrapLen=100): ConsoleTable.__init__(self) self._wrapLen = wrapLen head = list(head) justFun = self._get_just_fun_dict(head, fmtString) # return formatted, but not yet aligned entries; len dictionary; just function (entries, lendict, just) = self._format_data(head, data, justFun, fmt or {}) (headwrap, lendict) = self._wrap_head(head, lendict) def getKeyPaddedName(key, name): return (key, name.center(lendict[key])) headentry = dict(ismap(getKeyPaddedName, head)) self._print_table(headwrap, headentry, entries, just, lendict)
def _submitJob(self, jobNum, module): activity = utils.ActivityLog('submitting jobs') try: sandbox = self.sandPath # defined here for exception message in case os.mkdir fails if not os.path.exists(self.sandPath): os.mkdir(self.sandPath) sandbox = tempfile.mkdtemp('', '%s.%04d.' % (module.taskID, jobNum), self.sandPath) except Exception: raise BackendError('Unable to create sandbox directory "%s"!' % sandbox) sbPrefix = sandbox.replace(self.sandPath, '').lstrip('/') def translateTarget(d, s, t): return (d, s, os.path.join(sbPrefix, t)) self.smSBIn.doTransfer(ismap(translateTarget, self._getSandboxFilesIn(module))) cfgPath = os.path.join(sandbox, '_jobconfig.sh') self._writeJobConfig(cfgPath, jobNum, module, {'GC_SANDBOX': sandbox, 'GC_SCRATCH_SEARCH': str.join(' ', self.scratchPath)}) reqs = self.brokerSite.brokerAdd(module.getRequirements(jobNum), WMS.SITES) reqs = dict(self.brokerQueue.brokerAdd(reqs, WMS.QUEUES)) if (self.memory > 0) and (reqs.get(WMS.MEMORY, 0) < self.memory): reqs[WMS.MEMORY] = self.memory # local jobs need higher (more realistic) memory requirements (stdout, stderr) = (os.path.join(sandbox, 'gc.stdout'), os.path.join(sandbox, 'gc.stderr')) jobName = module.getDescription(jobNum).jobName proc = utils.LoggedProcess(self.submitExec, '%s %s "%s" %s' % (self.submitOpts, self.getSubmitArguments(jobNum, jobName, reqs, sandbox, stdout, stderr), utils.pathShare('gc-local.sh'), self.getJobArguments(jobNum, sandbox))) retCode = proc.wait() wmsIdText = proc.getOutput().strip().strip('\n') try: wmsId = self.parseSubmitOutput(wmsIdText) except Exception: wmsId = None del activity if retCode != 0: utils.eprint('WARNING: %s failed:' % self.submitExec) elif wmsId is None: utils.eprint('WARNING: %s did not yield job id:\n%s' % (self.submitExec, wmsIdText)) if wmsId: wmsId = self._createId(wmsId) open(os.path.join(sandbox, wmsId), 'w') else: proc.logError(self.errorLog) return (jobNum, utils.QM(wmsId, wmsId, None), {'sandbox': sandbox})
def _submitJob(self, jobNum, module): activity = Activity('submitting job %d' % jobNum) try: sandbox = tempfile.mkdtemp('', '%s.%04d.' % (module.taskID, jobNum), self._sandbox_helper.get_path()) except Exception: raise BackendError('Unable to create sandbox directory "%s"!' % sandbox) sbPrefix = sandbox.replace(self._sandbox_helper.get_path(), '').lstrip('/') def translateTarget(d, s, t): return (d, s, os.path.join(sbPrefix, t)) self.smSBIn.doTransfer(ismap(translateTarget, self._getSandboxFilesIn(module))) self._writeJobConfig(os.path.join(sandbox, '_jobconfig.sh'), jobNum, module, { 'GC_SANDBOX': sandbox, 'GC_SCRATCH_SEARCH': str.join(' ', self.scratchPath)}) reqs = self.brokerSite.brokerAdd(module.getRequirements(jobNum), WMS.SITES) reqs = dict(self.brokerQueue.brokerAdd(reqs, WMS.QUEUES)) if (self.memory > 0) and (reqs.get(WMS.MEMORY, 0) < self.memory): reqs[WMS.MEMORY] = self.memory # local jobs need higher (more realistic) memory requirements (stdout, stderr) = (os.path.join(sandbox, 'gc.stdout'), os.path.join(sandbox, 'gc.stderr')) jobName = module.getDescription(jobNum).jobName proc = LoggedProcess(self.submitExec, '%s %s "%s" %s' % (self.submitOpts, self.getSubmitArguments(jobNum, jobName, reqs, sandbox, stdout, stderr), utils.pathShare('gc-local.sh'), self.getJobArguments(jobNum, sandbox))) retCode = proc.wait() gcIDText = proc.getOutput().strip().strip('\n') try: gcID = self.parseSubmitOutput(gcIDText) except Exception: gcID = None activity.finish() if retCode != 0: self._log.warning('%s failed:', self.submitExec) elif gcID is None: self._log.warning('%s did not yield job id:\n%s', self.submitExec, gcIDText) if gcID: gcID = self._createId(gcID) open(os.path.join(sandbox, gcID), 'w') else: proc.logError(self.errorLog) return (jobNum, utils.QM(gcID, gcID, None), {'sandbox': sandbox})
def __init__(self, head, data, fmt = None, wrapLen = 100): ConsoleTable.__init__(self) self._log.info('') head = list(head) def getHeadName(key, name): return name maxhead = max(imap(len, ismap(getHeadName, head))) fmt = fmt or {} showLine = False for entry in data: if isinstance(entry, dict): if showLine: self._write_line(('-' * (maxhead + 2)) + '-+-' + '-' * min(30, wrapLen - maxhead - 10)) for (key, name) in head: self._write_line(name.rjust(maxhead + 2) + ' | ' + str(fmt.get(key, str)(entry.get(key, '')))) showLine = True elif showLine: self._write_line(('=' * (maxhead + 2)) + '=+=' + '=' * min(30, wrapLen - maxhead - 10)) showLine = False self._log.info('')
def _format_data(self, head, data, justFun, fmt): # adjust to lendict of column (considering escape sequence correction) strippedlen = lambda x: len(re.sub('\033\\[\\d*(;\\d*)*m', '', x)) just = lambda key, x: justFun.get(key, str.rjust)(x, lendict[key] + len(x) - strippedlen(x)) def getKeyLen(key, name): return (key, len(name)) lendict = dict(ismap(getKeyLen, head)) result = [] for entry in data: if isinstance(entry, dict): tmp = {} for key, _ in head: tmp[key] = str(fmt.get(key, str)(entry.get(key, ''))) lendict[key] = max(lendict[key], strippedlen(tmp[key])) result.append(tmp) else: result.append(entry) return (result, lendict, just)
def _format_data(self, head, data, justFun, fmt): # adjust to lendict of column (considering escape sequence correction) strippedlen = lambda x: len(re.sub('\033\\[\\d*(;\\d*)*m', '', x)) just = lambda key, x: justFun.get(key, str.rjust)(x, lendict[key] + len(x) - strippedlen(x)) def getKeyLen(key, name): return (key, len(name)) lendict = dict(ismap(getKeyLen, head)) result = [] for entry in data: if isinstance(entry, dict): tmp = {} for key, _ in head: tmp[key] = str(fmt.get(key, str)(entry.get(key, ''))) lendict[key] = max(lendict[key], strippedlen(tmp[key])) result.append(tmp) else: result.append(entry) return (result, lendict, just)
def _submit_job(self, jobnum, task): # Submit job and yield (jobnum, WMS ID, other data) activity = Activity('submitting job %d' % jobnum) try: sandbox = tempfile.mkdtemp('', '%s.%04d.' % (task.get_description().task_id, jobnum), self._sandbox_helper.get_path()) except Exception: raise BackendError('Unable to create sandbox directory "%s"!' % sandbox) sb_prefix = sandbox.replace(self._sandbox_helper.get_path(), '').lstrip('/') def _translate_target(desc, src, target): return (desc, src, os.path.join(sb_prefix, target)) self._sm_sb_in.do_transfer(ismap(_translate_target, self._get_in_transfer_info_list(task))) self._write_job_config(os.path.join(sandbox, '_jobconfig.sh'), jobnum, task, { 'GC_SANDBOX': sandbox, 'GC_SCRATCH_SEARCH': str.join(' ', self._scratch_path)}) reqs = self._broker_site.broker(task.get_requirement_list(jobnum), WMS.SITES) reqs = dict(self._broker_queue.broker(reqs, WMS.QUEUES)) if (self._memory > 0) and (reqs.get(WMS.MEMORY, 0) < self._memory): reqs[WMS.MEMORY] = self._memory # local jobs need higher (more realistic) memory requirements job_name = task.get_description(jobnum).job_name proc = self._get_submit_proc(jobnum, sandbox, job_name, reqs) exit_code = proc.status(timeout=20, terminate=True) wms_id_str = proc.stdout.read(timeout=0).strip().strip('\n') wms_id = ignore_exception(Exception, None, self.parse_submit_output, wms_id_str) activity.finish() if exit_code != 0: self._log.warning('%s failed:', self._submit_exec) elif wms_id is None: self._log.warning('%s did not yield job id:\n%s', self._submit_exec, wms_id_str) gc_id = self._create_gc_id(wms_id) if gc_id is not None: open(os.path.join(sandbox, gc_id), 'w') else: self._log.log_process(proc) return (jobnum, gc_id, {'sandbox': sandbox})
def __init__(self, head, data, fmt_dict=None, wrap_len=100, title=None): UserConsoleTable.__init__(self, title) head = list(head) def _get_header_name(key, name): return name maxhead = max(imap(len, ismap(_get_header_name, head))) fmt_dict = fmt_dict or {} show_line = False for entry in data: if isinstance(entry, dict): if show_line: self._write_line(('-' * (maxhead + 2)) + '-+-' + '-' * min(30, wrap_len - maxhead - 10)) for (key, name) in head: value = str(fmt_dict.get(key, str)(entry.get(key, ''))) self._write_line(name.rjust(maxhead + 2) + ' | ' + value) show_line = True elif show_line: self._write_line(('=' * (maxhead + 2)) + '=+=' + '=' * min(30, wrap_len - maxhead - 10)) show_line = False self._write_line('')
def __init__(self, head, data, align_str='', fmt_dict=None, wrap_len=100, title=None): UserConsoleTable.__init__(self, title) self._wrap_len = wrap_len head = list(head) just_fun = self._get_just_fun_dict(head, align_str) # return formatted, but not yet aligned entries; len dictionary; just function (entries, lendict, just) = self._format_data(head, data, just_fun, fmt_dict or {}) (headwrap, lendict) = self._wrap_head(head, lendict) def _get_key_padded_name(key, name): return (key, name.center(lendict[key])) headentry = dict(ismap(_get_key_padded_name, head)) self._print_table(headwrap, headentry, entries, just, lendict) self._write_line('')
def __init__(self, head, data, fmt_dict=None, wrap_len=100, title=None): UserConsoleTable.__init__(self, title) head = list(head) def _get_header_name(key, name): return name maxhead = max(imap(len, ismap(_get_header_name, head))) fmt_dict = fmt_dict or {} show_line = False for entry in data: if isinstance(entry, dict): if show_line: self._write_line(('-' * (maxhead + 2)) + '-+-' + '-' * min(30, wrap_len - maxhead - 10)) for (key, name) in head: value = str(fmt_dict.get(key, str)(entry.get(key, ''))) self._write_line(name.rjust(maxhead + 2) + ' | ' + value) show_line = True elif show_line: self._write_line(('=' * (maxhead + 2)) + '=+=' + '=' * min(30, wrap_len - maxhead - 10)) show_line = False self._write_line('')
def _format_data(self, head, data, just_fun, fmt_dict): # adjust to lendict of column (considering escape sequence correction) def _get_key_len(key, name): return (key, len(name)) def _just(key, value): return just_fun.get(key, str.rjust)(value, lendict[key] + len(value) - _stripped_len(value)) def _stripped_len(value): return len(re.sub('\033\\[\\d*(;\\d*)*m', '', value)) lendict = dict(ismap(_get_key_len, head)) result = [] for entry in data: if isinstance(entry, dict): tmp = {} for key, _ in head: tmp[key] = str(fmt_dict.get(key, str)(entry.get(key, ''))) lendict[key] = max(lendict[key], _stripped_len(tmp[key])) result.append(tmp) else: result.append(entry) return (result, lendict, _just)
def __call__(self, jobnum, job_obj): def _match(var, regex_obj): return regex_obj.search(self._job_config(jobnum, var)) is not None return reduce(operator.and_, ismap(_match, self._regex_obj_list))
def __call__(self, jobNum, jobObj): def match(var, rx): return rx.search(self.jobCfg(jobNum, var)) is not None return reduce(operator.and_, ismap(match, self.rxDict))
def reverse_dict(mapping): def _swap(value1, value2): return (value2, value1) return dict(ismap(_swap, mapping.items()))
def __call__(self, jobNum, jobObj): def match(var, rx): return rx.search(self._jobCfg(jobNum, var)) is not None return reduce(operator.and_, ismap(match, self._rxDict))
def reverse_dict(mapping): def _swap(value1, value2): return (value2, value1) return dict(ismap(_swap, mapping.items()))
def _get_just_fun_dict(self, head, fmtString): justFunDict = { 'l': str.ljust, 'r': str.rjust, 'c': str.center } # justFun = {id1: str.center, id2: str.rjust, ...} def getKeyFormat(headEntry, fmtString): return (headEntry[0], justFunDict[fmtString]) return dict(ismap(getKeyFormat, izip(head, fmtString)))