def hardcoded_categories(self): # try reading $LOC/profiles/categories if it's available. categories = readlines(pjoin(self.base, 'profiles', 'categories'), True, True, True) if categories is not None: categories = tuple(map(intern, categories)) return categories
def read_updates(path): def f(): d = deque() return [d,d] # mods tracks the start point [0], and the tail, [1]. # via this, pkg moves into a specific pkg can pick up # changes past that point, while ignoring changes prior # to that point. # Aftwards, we flatten it to get a per cp chain of commands. # no need to do lookups basically, although we do need to # watch for cycles. mods = defaultdict(f) moved = {} for fp in _scan_directory(path): fp = pjoin(path, fp) _process_update(readlines(fp), fp, mods, moved) # force a walk of the tree, flattening it commands = dict((k, list(iflatten_instance(v[0], tuple))) for k,v in mods.iteritems()) # filter out empty nodes. commands = dict((k,v) for k,v in commands.iteritems() if v) return commands
def read_updates(path): def f(): d = deque() return [d, d] # mods tracks the start point [0], and the tail, [1]. # via this, pkg moves into a specific pkg can pick up # changes past that point, while ignoring changes prior # to that point. # Aftwards, we flatten it to get a per cp chain of commands. # no need to do lookups basically, although we do need to # watch for cycles. mods = defaultdict(f) moved = {} for fp in _scan_directory(path): fp = pjoin(path, fp) _process_update(readlines(fp), fp, mods, moved) # force a walk of the tree, flattening it commands = { k: list(iflatten_instance(v[0], tuple)) for k, v in mods.iteritems() } # filter out empty nodes. commands = {k: v for k, v in commands.iteritems() if v} return commands
def hardcoded_categories(self): # try reading $LOC/profiles/categories if it's available. cats = readlines(pjoin(self.base, 'profiles', 'categories'), True, True, True) if cats is not None: cats = tuple(imap(intern, cats)) return cats
def categories(self): categories = readlines(pjoin(self.profiles_base, 'categories'), True, True, True) if categories is not None: return tuple(map(intern, categories)) return ()
def _parse_config(self): """Load data from the repo's metadata/layout.conf file.""" path = pjoin(self.location, self.layout_offset) data = read_dict(iter_read_bash( readlines(path, strip_whitespace=True, swallow_missing=True)), source_isiter=True, strip=True, filename=path, ignore_errors=True) sf = object.__setattr__ sf(self, 'repo_name', data.get('repo-name', None)) hashes = data.get('manifest-hashes', '').lower().split() if hashes: hashes = ['size'] + hashes hashes = tuple(iter_stable_unique(hashes)) else: hashes = self.default_hashes required_hashes = data.get('manifest-required-hashes', '').lower().split() if required_hashes: required_hashes = ['size'] + required_hashes required_hashes = tuple(iter_stable_unique(required_hashes)) else: required_hashes = self.default_required_hashes manifest_policy = data.get('use-manifests', 'strict').lower() d = { 'disabled': (manifest_policy == 'false'), 'strict': (manifest_policy == 'strict'), 'thin': (data.get('thin-manifests', '').lower() == 'true'), 'signed': (data.get('sign-manifests', 'true').lower() == 'true'), 'hashes': hashes, 'required_hashes': required_hashes, } sf(self, 'manifests', _immutable_attr_dict(d)) masters = data.get('masters') _missing_masters = False if masters is None: if not self.is_empty: logger.warning( f"{self.repo_id} repo at {self.location!r}, doesn't " "specify masters in metadata/layout.conf. Please explicitly " "set masters (use \"masters =\" if the repo is standalone)." ) _missing_masters = True masters = () else: masters = tuple(iter_stable_unique(masters.split())) sf(self, '_missing_masters', _missing_masters) sf(self, 'masters', masters) aliases = data.get('aliases', '').split() + [ self.config_name, self.repo_name, self.pms_repo_name, self.location ] sf(self, 'aliases', tuple(filter(None, iter_stable_unique(aliases)))) sf(self, 'eapis_deprecated', tuple(iter_stable_unique(data.get('eapis-deprecated', '').split()))) sf(self, 'eapis_banned', tuple(iter_stable_unique(data.get('eapis-banned', '').split()))) sf( self, 'properties_allowed', tuple( iter_stable_unique(data.get('properties-allowed', '').split()))) sf(self, 'restrict_allowed', tuple(iter_stable_unique(data.get('restrict-allowed', '').split()))) v = set(data.get('cache-formats', 'md5-dict').lower().split()) if not v: v = [None] else: # sort into favored order v = [f for f in self.supported_cache_formats if f in v] if not v: logger.warning( f'unknown cache format: falling back to md5-dict format') v = ['md5-dict'] sf(self, 'cache_format', list(v)[0]) profile_formats = set( data.get('profile-formats', 'pms').lower().split()) if not profile_formats: logger.info( f"{self.repo_id!r} repo at {self.location!r} has explicitly " "unset profile-formats, defaulting to pms") profile_formats = {'pms'} unknown = profile_formats.difference(self.supported_profile_formats) if unknown: logger.info("%r repo at %r has unsupported profile format%s: %s", self.repo_id, self.location, pluralism(unknown), ', '.join(sorted(unknown))) profile_formats.difference_update(unknown) profile_formats.add('pms') sf(self, 'profile_formats', profile_formats)
def _handle(self): return readlines(self._location, True, False, False)