def format(self, print_section=False, print_default=False, default=unspecified, source='', wraplen=33): if unspecified(self.value) or (not print_default and (self.value == default)): return '' if print_section: prefix = '[%s] %s' % (self.section, self.option) else: prefix = self.option prefix += ' %s' % self.opttype line_list = lidfilter(imap(str.strip, self.value.strip().splitlines())) if len(line_list) == 1: line_list = [prefix + ' ' + line_list[0]] # everything on one line else: line_list.insert(0, prefix) # prefix on first line - rest on other lines result = '' for line in line_list: if not result: # first line if source and (len(line) >= wraplen): result += '; source: ' + source + '\n' elif source: result = line.ljust(wraplen) + ' ; ' + source + '\n' continue else: result += '\t' result += line + '\n' return result.rstrip()
def __init__(self, fn, format='sniffed'): (self._fn, self._format) = (fn, format) fp = open(fn) try: first_line = fp.readline() sniffed = csv.Sniffer().sniff(first_line) csv.register_dialect('sniffed', sniffed) csv_header = first_line.strip().split(sniffed.delimiter) + [None] psp_list = list(csv.DictReader(fp, csv_header, dialect=format)) finally: fp.close() for psp in psp_list: psp.pop(None, None) if None in psp.values(): raise ParameterError('Malformed entry in csv file %r: {%s}' % (fn, str_dict_linear(psp))) def _cleanup_dict( mapping ): # strip all key value entries and filter empty parameters tmp = tuple( imap(lambda item: lmap(str.strip, item), mapping.items())) return dict(lfilter(lambda k_v: k_v[0] != '', tmp)) output_vn_list = sorted(imap(ParameterMetadata, lidfilter(csv_header)), key=lambda k: k.value) InternalParameterSource.__init__(self, lmap(_cleanup_dict, psp_list), output_vn_list)
def __new__(cls, pconfig, output_vn, lookup_vn_list): def _replace_nonalnum(value): if str.isalnum(value): return value return ' ' if not lookup_vn_list: lookup_str = pconfig.get(output_vn, 'lookup', '') lookup_vn_list = lidfilter( str.join('', imap(_replace_nonalnum, lookup_str)).split()) lookup_vn = None if lookup_vn_list: # default lookup key lookup_vn = KeyParameterSource(*lookup_vn_list) lookup_args = _get_lookup_args(pconfig, KeyParameterSource(output_vn), lookup_vn) # Determine kind of lookup, [3] == lookup_dict lookup_len = lmap(len, lookup_args[3].values()) if (min(lookup_len) == 1) and (max(lookup_len) == 1): # simple lookup sufficient for this setup return SimpleLookupParameterSource(*lookup_args) # switch needs elevation beyond local scope return InternalSwitchPlaceholder(*lookup_args)
def __new__(cls, *args): args = lidfilter(args) if not args: return None elif len(args) == 1: return args[0] return JobSelector.__new__(cls)
def format(self, print_section=False, print_default=False, default=unspecified, source='', wraplen=33): if unspecified(self.value) or (not print_default and (self.value == default)): return '' if print_section: prefix = '[%s] %s' % (self.section, self.option) else: prefix = self.option prefix += ' %s' % self.opttype line_list = lidfilter(imap(str.strip, self.value.strip().splitlines())) if len(line_list) == 1: line_list = [prefix + ' ' + line_list[0]] # everything on one line else: line_list.insert( 0, prefix) # prefix on first line - rest on other lines result = '' for line in line_list: if not result: # first line if source and (len(line) >= wraplen): result += '; source: ' + source + '\n' elif source: result = line.ljust(wraplen) + ' ; ' + source + '\n' continue else: result += '\t' result += line + '\n' return result.rstrip()
def _register_psrc(self, pconfig, output_vn): def _replace_nonalnum(value): if str.isalnum(value): return value return ' ' lookup_str = pconfig.get(output_vn, 'lookup', '') lookup_vn_list = lidfilter(str.join('', imap(_replace_nonalnum, lookup_str)).split()) self._psrc_list.append(ParameterSource.create_psrc_safe('InternalAutoParameterSource', pconfig, {}, output_vn, lookup_vn_list))
def _get_available_plugins(plugin_infos): available_plugins = {} for plugin in plugin_infos: for base in plugin_infos[plugin]['bases']: alias_list = plugin_infos[plugin].get('alias', []) if alias_list: alias_list = lidfilter(alias_list) plugin_name = '%s_' % plugin if alias_list: plugin_name = '%s_ (alias: %s)' % (plugin, str.join(', ', alias_list)) available_plugins.setdefault(base, []).append(plugin_name) return available_plugins
def _get_node_label(instance): names = [instance.__class__.__name__, repr(instance)] if hasattr(instance.__class__, 'alias_list'): alias_list = lidfilter(instance.__class__.get_class_name_list()) if hasattr(instance.__class__, 'config_tag_name'): names.extend(imap(lambda alias: '%s:%s' % (instance.config_tag_name, alias), alias_list)) elif len(repr(instance)) > len(instance.__class__.__name__): names.extend(alias_list[1:]) result = sorted(names, key=len)[0] if isinstance(instance, NamedPlugin): if instance.get_object_name().lower() != instance.__class__.__name__.lower(): result += ' (%s)' % instance.get_object_name() return result
def _iter_datasource_items(self, item, metadata_dict, entries, location_list, obj_dict): # if parent source is not defined, try to get datacache from GC_WORKDIR map_plfnp2pdn = dict(self._plfnp2pdn_cache.get(self._parent_source, {})) datacache_fn = os.path.join(obj_dict.get('GC_WORKDIR', ''), 'datacache.dat') if os.path.exists(datacache_fn): # extend configured parent source with datacache if it exists map_plfnp2pdn.update(self._read_plfnp_map(self._empty_config, datacache_fn)) pdn_list = [] # list with parent dataset names for key in ifilter(metadata_dict.__contains__, self._parent_keys): parent_lfn_list = metadata_dict[key] if not isinstance(parent_lfn_list, list): parent_lfn_list = [metadata_dict[key]] for parent_lfn in parent_lfn_list: pdn_list.append(map_plfnp2pdn.get(self._get_lfnp(parent_lfn))) metadata_dict['PARENT_PATH'] = lidfilter(set(pdn_list)) yield (item, metadata_dict, entries, location_list, obj_dict)
def _get_node_label(instance): names = [instance.__class__.__name__, repr(instance)] if hasattr(instance.__class__, 'alias_list'): alias_list = lidfilter(instance.__class__.get_class_name_list()) if hasattr(instance.__class__, 'config_tag_name'): names.extend( imap(lambda alias: '%s:%s' % (instance.config_tag_name, alias), alias_list)) elif len(repr(instance)) > len(instance.__class__.__name__): names.extend(alias_list[1:]) result = sorted(names, key=len)[0] if isinstance(instance, NamedPlugin): if instance.get_object_name().lower( ) != instance.__class__.__name__.lower(): result += ' (%s)' % instance.get_object_name() return result
def __init__(self, fn, format='sniffed'): (self._fn, self._format) = (fn, format) fp = open(fn) try: first_line = fp.readline() sniffed = csv.Sniffer().sniff(first_line) csv.register_dialect('sniffed', sniffed) csv_header = first_line.strip().split(sniffed.delimiter) + [None] psp_list = list(csv.DictReader(fp, csv_header, dialect=format)) finally: fp.close() for psp in psp_list: psp.pop(None, None) if None in psp.values(): raise ParameterError('Malformed entry in csv file %r: {%s}' % (fn, str_dict_linear(psp))) def _cleanup_dict(mapping): # strip all key value entries and filter empty parameters tmp = tuple(imap(lambda item: lmap(str.strip, item), mapping.items())) return dict(lfilter(lambda k_v: k_v[0] != '', tmp)) output_vn_list = sorted(imap(ParameterMetadata, lidfilter(csv_header)), key=lambda k: k.value) InternalParameterSource.__init__(self, lmap(_cleanup_dict, psp_list), output_vn_list)
def _iter_datasource_items(self, item, metadata_dict, entries, location_list, obj_dict): # if parent source is not defined, try to get datacache from GC_WORKDIR map_plfnp2pdn = dict(self._plfnp2pdn_cache.get(self._parent_source, {})) datacache_fn = os.path.join(obj_dict.get('GC_WORKDIR', ''), 'datacache.dat') if os.path.exists( datacache_fn ): # extend configured parent source with datacache if it exists map_plfnp2pdn.update( self._read_plfnp_map(self._empty_config, datacache_fn)) pdn_list = [] # list with parent dataset names for key in ifilter(metadata_dict.__contains__, self._parent_keys): parent_lfn_list = metadata_dict[key] if not isinstance(parent_lfn_list, list): parent_lfn_list = [metadata_dict[key]] for parent_lfn in parent_lfn_list: pdn_list.append(map_plfnp2pdn.get(self._get_lfnp(parent_lfn))) metadata_dict['PARENT_PATH'] = lidfilter(set(pdn_list)) yield (item, metadata_dict, entries, location_list, obj_dict)
def __init__(self, option_list, default_section=None): self._default_section = default_section self._option_list = lidfilter(imap(str.strip, option_list))
def _get_cls_info(cls): return {'Name': cls.__name__, 'Alias': str.join(', ', lidfilter(cls.get_class_name_list()[1:]))}
def _get_cls_info(cls): return { 'Name': cls.__name__, 'Alias': str.join(', ', lidfilter(cls.get_class_name_list()[1:])) }