def parse(profiles_base, repo_id, known_status=None, known_arch=None): """Return the mapping of arches to profiles for a repo.""" l = [] fp = pjoin(profiles_base, 'profiles.desc') try: for lineno, line in iter_read_bash(fp, enum_line=True): try: arch, profile, status = line.split() except ValueError: logger.error( f"{repo_id}::profiles/profiles.desc, " f"line {lineno}: invalid profile line format: " "should be 'arch profile status'") continue if known_status is not None and status not in known_status: logger.warning( f"{repo_id}::profiles/profiles.desc, " f"line {lineno}: unknown profile status: {status!r}") if known_arch is not None and arch not in known_arch: logger.warning(f"{repo_id}::profiles/profiles.desc, " f"line {lineno}: unknown arch: {arch!r}") # Normalize the profile name on the offchance someone slipped an extra / # into it. path = '/'.join(filter(None, profile.split('/'))) deprecated = os.path.exists( os.path.join(profiles_base, path, 'deprecated')) l.append( _KnownProfile(profiles_base, arch, path, status, deprecated)) except FileNotFoundError: logger.debug( f"No profile descriptions found at {repo_id}::profiles/profiles.desc" ) return frozenset(l)
def _process_plugin(package, plug, filter_disabled=False): if isinstance(plug.target, str): plug = modules.load_any(plug.target) elif isinstance(plug.target, int): module = modules.load_any(plug.source) plugs = getattr(module, PLUGIN_ATTR, {}) plugs = plugs.get(plug.key, []) if len(plugs) <= plug.target: logger.exception( "plugin cache for %s, %s, %s is somehow wrong; no item at position %s", package.__name__, plug.source, plug.key, plug.target) return None plug = plugs[plug.target] else: logger.error( "package %s, plug %s; non int, non string. wtf?", package.__name__, plug) return None if filter_disabled: if getattr(plug, 'disabled', False): logger.debug("plugin %s is disabled, skipping", plug) return None f = getattr(plug, '_plugin_disabled_check', None) if f is not None and f(): logger.debug("plugin %s is disabled, skipping", plug) return None return plug
def update_mtime(path, timestamp=None): if timestamp is None: timestamp = time.time() logger.debug(f"updating vdb timestamp for {path!r}") try: os.utime(path, (timestamp, timestamp)) except EnvironmentError as e: logger.error(f"failed updated vdb timestamp for {path!r}: {e}")
def is_empty(self): """Return boolean related to if the repo has files in it.""" result = True try: # any files existing means it's not empty result = not listdir(self.location) if result: logger.debug(f"repo is empty: {self.location!r}") except FileNotFoundError: pass return result
def walk_command_complex(buff, pos, endchar, interpret_level): start = pos isspace = str.isspace end = len(buff) while pos < end: ch = buff[pos] if ch == endchar: if endchar != '}': return pos if start == pos: return pos if buff[pos - 1] in ";\n": return pos elif (interpret_level == COMMAND_PARSING and ch in ';\n') or \ (interpret_level == SPACE_PARSING and isspace(ch)): return pos elif ch == '\\': pos += 1 elif ch == '<': if (pos < end - 1 and buff[pos + 1] == '<' and interpret_level == COMMAND_PARSING): pos = walk_here_statement(buff, pos + 1) # we continue immediately; walk_here deposits us at the end # of the here op, not consuming the final delimiting char # since it may be an endchar continue else: logger.debug(f'noticed <, interpret_level={interpret_level}') elif ch == '#': if start == pos or isspace(buff[pos - 1]) or buff[pos - 1] == ';': pos = walk_statement_pound(buff, pos) continue elif ch == '$': pos = walk_dollar_expansion(buff, pos + 1, end, endchar) continue elif ch == '{': pos = walk_command_escaped_parsing(buff, pos + 1, '}') elif ch == '(' and interpret_level == COMMAND_PARSING: pos = walk_command_escaped_parsing(buff, pos + 1, ')') elif ch in '`"': pos = walk_command_escaped_parsing(buff, pos + 1, ch) elif ch == "'" and endchar != '"': pos = walk_statement_no_parsing(buff, pos + 1, "'") pos += 1 return pos
def walk_here_statement(buff, pos): pos += 1 logger.debug( 'starting here processing for COMMAND for level 2 at p == %.10s', pos) if buff[pos] == '<': logger.debug( "correction, it's a third level here. Handing back to command parsing" ) return pos + 1 isspace = str.isspace end = len(buff) while pos < end and (isspace(buff[pos]) or buff[pos] == '-'): pos += 1 if buff[pos] in "'\"": end_here = walk_statement_no_parsing(buff, pos + 1, buff[pos]) pos += 1 else: end_here = walk_command_complex(buff, pos, ' ', SPACE_PARSING) here_word = buff[pos:end_here] logger.debug( f'matched len({len(here_word)})/{here_word!r} for a here word') # XXX watch this. Potential for horkage. Need to do the quote # removal thing. This sucks. end_here += 1 if end_here >= end: return end_here here_len = len(here_word) end_here = buff.find(here_word, end_here) while end_here != -1: i = here_len + end_here if buff[i] in ';\n\r})': i = end_here - 1 while i >= 0 and buff[i] in '\t ': i -= 1 if i >= 0 and buff[i] == '\n': break end_here = buff.find(here_word, end_here + here_len) if end_here == -1: return end return end_here + len(here_word)
def main(options, out, err): if options.debug: if options.funcs is None: logger.debug('=== Funcs: None') else: logger.debug('=== Funcs:') for thing in options.funcs: logger.debug(repr(thing)) if options.vars is None: logger.debug('=== Vars: None') else: logger.debug('=== Vars:') for thing in options.vars: logger.debug(repr(thing)) logger.debug('var_match: %r, func_match: %r', options.var_match, options.func_match) stream = out.stream var_callback = func_callback = None if options.print_vars: stream = None var_matches = [] var_callback = var_matches.append if options.print_funcs: stream = None func_matches = [] def func_callback(level, name, body): func_matches.append((level, name, body)) # Hack: write to the stream directly. filter_env.main_run(stream, options.input.read(), options.vars, options.funcs, options.var_match, options.func_match, global_envvar_callback=var_callback, func_callback=func_callback) if options.print_vars: for var in sorted(var_matches): out.write(var.strip()) if options.print_funcs: for level, name, block in func_matches: if level == 0: out.write(block)
def _sync(self, verbosity, output_fd, force=False, **kwargs): dest = self._pre_download() if self.uri.lower().startswith('https://'): # default to using system ssl certs context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) else: context = None headers = {} etag_path = pjoin(self.basedir, '.etag') modified_path = pjoin(self.basedir, '.modified') if not force: # use cached ETag to check if updates exist previous_etag = readfile_ascii(etag_path, none_on_missing=True) if previous_etag: headers['If-None-Match'] = previous_etag # use cached modification timestamp to check if updates exist previous_modified = readfile_ascii(modified_path, none_on_missing=True) if previous_modified: headers['If-Modified-Since'] = previous_modified req = urllib.request.Request(self.uri, headers=headers, method='GET') # TODO: add customizable timeout try: resp = urllib.request.urlopen(req, context=context) except urllib.error.URLError as e: if e.getcode() == 304: # Not Modified logger.debug("content is unchanged") return True raise base.SyncError( f'failed fetching {self.uri!r}: {e.reason}') from e # Manually check cached values ourselves since some servers appear to # ignore If-None-Match or If-Modified-Since headers. convert = lambda x: x.strip() if x else None etag = resp.getheader('ETag') modified = resp.getheader('Last-Modified') if not force: if etag is not None and convert(etag) == convert(previous_etag): logger.debug(f"etag {etag} is equal, no update available") return True if modified is not None and convert(modified) == convert( previous_modified): logger.debug(f"header mtime is unmodified: {modified}") return True try: os.makedirs(self.basedir, exist_ok=True) except OSError as e: raise base.SyncError( f'failed creating repo dir {self.basedir!r}: {e.strerror}' ) from e length = resp.getheader('content-length') if length: length = int(length) blocksize = max(4096, length // 100) else: blocksize = 1000000 try: self._download = AtomicWriteFile(dest, binary=True, perms=0o644) except OSError as e: raise base.PathError(self.basedir, e.strerror) from e # retrieve the file while providing simple progress output size = 0 while True: buf = resp.read(blocksize) if not buf: if length: sys.stdout.write('\n') break self._download.write(buf) size += len(buf) if length: sys.stdout.write('\r') progress = '=' * int(size / length * 50) percent = int(size / length * 100) sys.stdout.write("[%-50s] %d%%" % (progress, percent)) self._post_download(dest) # TODO: store this in pkgcore cache dir instead? # update cached ETag/Last-Modified values if etag: with open(etag_path, 'w') as f: f.write(etag) if modified: with open(modified_path, 'w') as f: f.write(modified) return True
def main(options, out, err): if options.debug: if options.funcs is None: logger.debug('=== Funcs: None') else: logger.debug('=== Funcs:') for thing in options.funcs: logger.debug(repr(thing)) if options.vars is None: logger.debug('=== Vars: None') else: logger.debug('=== Vars:') for thing in options.vars: logger.debug(repr(thing)) logger.debug('var_match: %r, func_match: %r', options.var_match, options.func_match) stream = out.stream var_callback = func_callback = None if options.print_vars: stream = None var_matches = [] var_callback = var_matches.append if options.print_funcs: stream = None func_matches = [] def func_callback(level, name, body): func_matches.append((level, name, body)) # Hack: write to the stream directly. filter_env.main_run( stream, options.input.read(), options.vars, options.funcs, options.var_match, options.func_match, global_envvar_callback=var_callback, func_callback=func_callback) if options.print_vars: for var in sorted(var_matches): out.write(var.strip()) if options.print_funcs: for level, name, block in func_matches: if level == 0: out.write(block)
def process_scope(out, buff, pos, var_match, func_match, endchar, envvar_callback=None, func_callback=None, func_level=0): window_start = pos window_end = None isspace = str.isspace end = len(buff) while pos < end and buff[pos] != endchar: # Wander forward to the next non space. if window_end is not None: if out is not None: out.write(buff[window_start:window_end].encode('utf-8')) window_start = pos window_end = None com_start = pos ch = buff[pos] if isspace(ch): pos += 1 continue # Ignore comments. if ch == '#': pos = walk_statement_pound(buff, pos, endchar) continue new_start, new_end, new_p = is_function(buff, pos) if new_p is not None: func_name = buff[new_start:new_end] logger.debug(f'matched func name {func_name!r}') new_p = process_scope(None, buff, new_p, None, None, '}', func_callback=func_callback, func_level=func_level + 1) logger.debug(f'ended processing {func_name!r}') if func_callback is not None: func_callback(func_level, func_name, buff[new_start:new_p]) if func_match is not None and func_match(func_name): logger.debug(f'filtering func {func_name!r}') window_end = com_start pos = new_p pos += 1 continue # Check for env assignment. new_start, new_end, new_p = is_envvar(buff, pos) if new_p is None: # Non env assignment. pos = walk_command_complex(buff, pos, endchar, COMMAND_PARSING) # icky icky icky icky if pos < end and buff[pos] != endchar: pos += 1 else: # Env assignment. var_name = buff[new_start:new_end] pos = new_p if envvar_callback: envvar_callback(var_name) logger.debug(f'matched env assign {var_name!r}') if var_match is not None and var_match(var_name): # This would be filtered. logger.info(f"filtering var {var_name!r}") window_end = com_start if pos >= end: return pos while (pos < end and not isspace(buff[pos]) and buff[pos] != ';'): if buff[pos] == "'": pos = walk_statement_no_parsing(buff, pos + 1, "'") + 1 elif buff[pos] in '"`': pos = walk_command_escaped_parsing(buff, pos + 1, buff[pos]) + 1 elif buff[pos] == '(': pos = walk_command_escaped_parsing(buff, pos + 1, ')') + 1 elif buff[pos] == '$': pos += 1 if pos >= end: continue pos = walk_dollar_expansion(buff, pos, end, endchar) continue else: # blah=cah ; single word pos = walk_command_complex(buff, pos, ' ', SPACE_PARSING) if out is not None: if window_end is None: window_end = pos if window_end > end: window_end = end out.write(buff[window_start:window_end].encode('utf-8')) return pos
def _disabled_if_frozen(self, command): if self.repo.frozen: logger.debug( "disabling repo(%r) command(%r) due to repo being frozen", self.repo, command) return not self.repo.frozen
def initialize_cache(package, force=False, cache_dir=None): """Determine available plugins in a package. Writes cache files if they are stale and writing is possible. """ modpath = os.path.dirname(package.__file__) pkgpath = os.path.dirname(os.path.dirname(modpath)) uid = gid = -1 mode = 0o755 if cache_dir is None: if not force: # use user-generated caches if they exist, fallback to module cache if os.path.exists(pjoin(const.USER_CACHE_PATH, CACHE_FILENAME)): cache_dir = const.USER_CACHE_PATH elif os.path.exists(pjoin(const.SYSTEM_CACHE_PATH, CACHE_FILENAME)): cache_dir = const.SYSTEM_CACHE_PATH uid = os_data.portage_uid gid = os_data.portage_gid mode = 0o775 else: cache_dir = modpath else: # generate module cache when running from git repo, otherwise create system/user cache if pkgpath == sys.path[0]: cache_dir = modpath elif os_data.uid in (os_data.root_uid, os_data.portage_uid): cache_dir = const.SYSTEM_CACHE_PATH uid = os_data.portage_uid gid = os_data.portage_gid mode = 0o775 else: cache_dir = const.USER_CACHE_PATH # put pkgcore consumer plugins (e.g. pkgcheck) inside pkgcore cache dir if cache_dir in (const.SYSTEM_CACHE_PATH, const.USER_CACHE_PATH): chunks = package.__name__.split('.', 1) if chunks[0] != os.path.basename(cache_dir): cache_dir = pjoin(cache_dir, chunks[0]) # package plugin cache, see above. package_cache = defaultdict(set) stored_cache_name = pjoin(cache_dir, CACHE_FILENAME) stored_cache = _read_cache_file(package, stored_cache_name) if force: _clean_old_caches(cache_dir) # Directory cache, mapping modulename to # (mtime, set([keys])) modlist = listdir_files(modpath) modlist = set(x for x in modlist if os.path.splitext(x)[1] == '.py' and x != '__init__.py') cache_stale = False # Hunt for modules. actual_cache = defaultdict(set) mtime_cache = mappings.defaultdictkey(lambda x: int(os.path.getmtime(x))) for modfullname in sorted(modlist): modname = os.path.splitext(modfullname)[0] # It is an actual module. Check if its cache entry is valid. mtime = mtime_cache[pjoin(modpath, modfullname)] vals = stored_cache.get((modname, mtime)) if vals is None or force: # Cache entry is stale. logger.debug( 'stale because of %s: actual %s != stored %s', modname, mtime, stored_cache.get(modname, (0, ()))[0]) cache_stale = True entries = [] qualname = '.'.join((package.__name__, modname)) module = import_module(qualname) registry = getattr(module, PLUGIN_ATTR, {}) vals = set() for key, plugs in registry.items(): for idx, plug_name in enumerate(plugs): if isinstance(plug_name, str): plug = _process_plugin(package, _plugin_data(key, 0, qualname, plug_name)) else: plug = plug_name if plug is None: # import failure, ignore it, error already logged continue priority = getattr(plug, 'priority', 0) if not isinstance(priority, int): logger.error( "ignoring plugin %s: has a non integer priority: %s", plug, priority) continue if plug_name is plug: # this means it's an object, rather than a string; store # the offset. plug_name = idx data = _plugin_data(key, priority, qualname, plug_name) vals.add(data) actual_cache[(modname, mtime)] = vals for data in vals: package_cache[data.key].add(data) if force or set(stored_cache) != set(actual_cache): logger.debug('updating cache %r for new plugins', stored_cache_name) ensure_dirs(cache_dir, uid=uid, gid=gid, mode=mode) _write_cache_file(stored_cache_name, actual_cache, uid=uid, gid=gid) return mappings.ImmutableDict((k, sort_plugs(v)) for k, v in package_cache.items())