def format_qa_output(formatter, stats, fails, dofull, dofail, options, qawarnings): """Helper function that formats output properly Args: formatter - a subclass of Formatter stats - a dict of qa status items fails - a dict of qa status failures dofull - boolean to print full results or a summary dofail - boolean to decide if failure was hard or soft Returns: None (modifies formatter) """ full = options.mode == 'full' # we only want key value pairs where value > 0 for category, number in \ filter(lambda myitem: myitem[1] > 0, iter(stats.items())): formatter.add_literal_data(_unicode_decode(" " + category.ljust(30))) if category in qawarnings: formatter.push_style("WARN") else: formatter.push_style("BAD") formatter.add_literal_data(_unicode_decode(str(number))) formatter.pop_style() formatter.add_line_break() if not dofull: if not full and dofail and category in qawarnings: # warnings are considered noise when there are failures continue fails_list = fails[category] if not full and len(fails_list) > 12: fails_list = fails_list[:12] for failure in fails_list: formatter.add_literal_data(_unicode_decode(" " + failure)) formatter.add_line_break()
def multiBuilder(self, options, settings, trees): rValue = {} directory = options.get("directory", os.path.join(settings["PORTAGE_CONFIGROOT"], USER_CONFIG_PATH, "sets")) name_pattern = options.get("name_pattern", "${name}") if not "$name" in name_pattern and not "${name}" in name_pattern: raise SetConfigError(_("name_pattern doesn't include ${name} placeholder")) greedy = get_boolean(options, "greedy", False) # look for repository path variables match = self._repopath_match.match(directory) if match: try: directory = self._repopath_sub.sub(trees["porttree"].dbapi.treemap[match.groupdict()["reponame"]], directory) except KeyError: raise SetConfigError(_("Could not find repository '%s'") % match.groupdict()["reponame"]) try: directory = _unicode_decode(directory, encoding=_encodings['fs'], errors='strict') # Now verify that we can also encode it. _unicode_encode(directory, encoding=_encodings['fs'], errors='strict') except UnicodeError: directory = _unicode_decode(directory, encoding=_encodings['fs'], errors='replace') raise SetConfigError( _("Directory path contains invalid character(s) for encoding '%s': '%s'") \ % (_encodings['fs'], directory)) if os.path.isdir(directory): directory = normalize_path(directory) for parent, dirs, files in os.walk(directory): try: parent = _unicode_decode(parent, encoding=_encodings['fs'], errors='strict') except UnicodeDecodeError: continue for d in dirs[:]: if d[:1] == '.': dirs.remove(d) for filename in files: try: filename = _unicode_decode(filename, encoding=_encodings['fs'], errors='strict') except UnicodeDecodeError: continue if filename[:1] == '.': continue if filename.endswith(".metadata"): continue filename = os.path.join(parent, filename)[1 + len(directory):] myname = name_pattern.replace("$name", filename) myname = myname.replace("${name}", filename) rValue[myname] = StaticFileSet( os.path.join(directory, filename), greedy=greedy, dbapi=trees["vartree"].dbapi) return rValue
def _copyxattr(src, dest, exclude=None): try: attrs = _os.listxattr(src) except OSError as e: if e.errno != OperationNotSupported.errno: raise attrs = () if attrs: if exclude is not None and isinstance(attrs[0], bytes): exclude = exclude.encode(_encodings["fs"]) exclude = _get_xattr_excluder(exclude) for attr in attrs: if exclude(attr): continue try: _os.setxattr(dest, attr, _os.getxattr(src, attr)) raise_exception = False except OSError: raise_exception = True if raise_exception: raise OperationNotSupported( _("Filesystem containing file '%s' " "does not support extended attribute '%s'") % (_unicode_decode(dest), _unicode_decode(attr)) )
def _metadata_exception(self, k, e): # For unicode safety with python-2.x we need to avoid # using the string format operator with a non-unicode # format string, since that will result in the # PortageException.__str__() method being invoked, # followed by unsafe decoding that may result in a # UnicodeDecodeError. Therefore, use _unicode_decode() # to ensure that format strings are unicode, so that # PortageException.__unicode__() is used when necessary # in python-2.x. if not self.installed: categorized_error = False if e.errors: for error in e.errors: if getattr(error, 'category', None) is None: continue categorized_error = True self._invalid_metadata(error.category, _unicode_decode("%s: %s") % (k, error)) if not categorized_error: self._invalid_metadata(k + ".syntax", _unicode_decode("%s: %s") % (k, e)) else: # For installed packages, show the path of the file # containing the invalid metadata, since the user may # want to fix the deps by hand. vardb = self.root_config.trees['vartree'].dbapi path = vardb.getpath(self.cpv, filename=k) self._invalid_metadata(k + ".syntax", _unicode_decode("%s: %s in '%s'") % (k, e, path))
def testCompileModules(self): for parent, dirs, files in itertools.chain( os.walk(PORTAGE_BIN_PATH), os.walk(PORTAGE_PYM_PATH)): parent = _unicode_decode(parent, encoding=_encodings['fs'], errors='strict') for x in files: x = _unicode_decode(x, encoding=_encodings['fs'], errors='strict') if x[-4:] in ('.pyc', '.pyo'): continue x = os.path.join(parent, x) st = os.lstat(x) if not stat.S_ISREG(st.st_mode): continue do_compile = False if x[-3:] == '.py': do_compile = True else: # Check for python shebang with open(_unicode_encode(x, encoding=_encodings['fs'], errors='strict'), 'rb') as f: line = _unicode_decode(f.readline(), encoding=_encodings['content'], errors='replace') if line[:2] == '#!' and 'python' in line: do_compile = True if do_compile: with open(_unicode_encode(x, encoding=_encodings['fs'], errors='strict'), 'rb') as f: compile(f.read(), x, 'exec')
def _init(config_root='/'): """ Load color.map from the given config_root. This is called automatically on first access of the codes or _styles attributes (unless it has already been called for some other reason). """ global _color_map_loaded, codes, _styles if _color_map_loaded: return _color_map_loaded = True codes = object.__getattribute__(codes, '_attr') _styles = object.__getattribute__(_styles, '_attr') for k, v in codes.items(): codes[k] = _unicode_decode(v) for k, v in _styles.items(): _styles[k] = _unicode_decode(v) try: _parse_color_map(config_root=config_root, onerror=lambda e: writemsg("%s\n" % str(e), noiselevel=-1)) except FileNotFound: pass except PermissionDenied as e: writemsg(_("Permission denied: '%s'\n") % str(e), noiselevel=-1) del e except PortageException as e: writemsg("%s\n" % str(e), noiselevel=-1) del e
def addtolist(mylist, curdir): """(list, dir) --- Takes an array(list) and appends all files from dir down the directory tree. Returns nothing. list is modified.""" curdir = normalize_path(_unicode_decode(curdir, encoding=_encodings['fs'], errors='strict')) for parent, dirs, files in os.walk(curdir): parent = _unicode_decode(parent, encoding=_encodings['fs'], errors='strict') if parent != curdir: mylist.append(parent[len(curdir) + 1:] + os.sep) for x in dirs: try: _unicode_decode(x, encoding=_encodings['fs'], errors='strict') except UnicodeDecodeError: dirs.remove(x) for x in files: try: x = _unicode_decode(x, encoding=_encodings['fs'], errors='strict') except UnicodeDecodeError: continue mylist.append(os.path.join(parent, x)[len(curdir) + 1:])
def testBashSyntax(self): for parent, dirs, files in os.walk(PORTAGE_BIN_PATH): parent = _unicode_decode(parent, encoding=_encodings['fs'], errors='strict') for x in files: x = _unicode_decode(x, encoding=_encodings['fs'], errors='strict') ext = x.split('.')[-1] if ext in ('.py', '.pyc', '.pyo'): continue x = os.path.join(parent, x) st = os.lstat(x) if not stat.S_ISREG(st.st_mode): continue # Check for bash shebang f = open(_unicode_encode(x, encoding=_encodings['fs'], errors='strict'), 'rb') line = _unicode_decode(f.readline(), encoding=_encodings['content'], errors='replace') f.close() if line[:2] == '#!' and \ 'bash' in line: cmd = "%s -n %s" % (_shell_quote(BASH_BINARY), _shell_quote(x)) status, output = subprocess_getstatusoutput(cmd) self.assertEqual(os.WIFEXITED(status) and \ os.WEXITSTATUS(status) == os.EX_OK, True, msg=output)
def _copyxattr(src, dest, exclude=None): """Copy the extended attributes from |src| to |dest|""" try: attrs = xattr.list(src) except (OSError, IOError) as e: if e.errno != OperationNotSupported.errno: raise attrs = () if attrs: if exclude is not None and isinstance(attrs[0], bytes): exclude = exclude.encode(_encodings['fs']) exclude = _get_xattr_excluder(exclude) for attr in attrs: if exclude(attr): continue try: xattr.set(dest, attr, xattr.get(src, attr)) raise_exception = False except (OSError, IOError): raise_exception = True if raise_exception: raise OperationNotSupported(_("Filesystem containing file '%s' " "does not support extended attribute '%s'") % (_unicode_decode(dest), _unicode_decode(attr)))
def emergelog(xterm_titles, mystr, short_msg=None): if _disable: return mystr = _unicode_decode(mystr) if short_msg is not None: short_msg = _unicode_decode(short_msg) if xterm_titles and short_msg: if "HOSTNAME" in os.environ: short_msg = os.environ["HOSTNAME"]+": "+short_msg xtermTitle(short_msg) try: file_path = os.path.join(_emerge_log_dir, 'emerge.log') existing_log = os.path.exists(file_path) mylogfile = io.open(_unicode_encode(file_path, encoding=_encodings['fs'], errors='strict'), mode='a', encoding=_encodings['content'], errors='backslashreplace') if not existing_log: portage.util.apply_secpass_permissions(file_path, uid=portage.portage_uid, gid=portage.portage_gid, mode=0o660) mylock = portage.locks.lockfile(file_path) try: mylogfile.write("%.0f: %s\n" % (time.time(), mystr)) mylogfile.close() finally: portage.locks.unlockfile(mylock) except (IOError,OSError,portage.exception.PortageException) as e: if secpass >= 1: portage.util.writemsg("emergelog(): %s\n" % (e,), noiselevel=-1)
def testCompileModules(self): for parent, dirs, files in itertools.chain( os.walk(PORTAGE_BIN_PATH), os.walk(PORTAGE_PYM_PATH)): parent = _unicode_decode(parent, encoding=_encodings['fs'], errors='strict') for x in files: x = _unicode_decode(x, encoding=_encodings['fs'], errors='strict') if x[-4:] in ('.pyc', '.pyo'): continue x = os.path.join(parent, x) st = os.lstat(x) if not stat.S_ISREG(st.st_mode): continue do_compile = False cfile = x if x[-3:] == '.py': do_compile = True else: # Check for python shebang f = open(_unicode_encode(x, encoding=_encodings['fs'], errors='strict'), 'rb') line = _unicode_decode(f.readline(), encoding=_encodings['content'], errors='replace') f.close() if line[:2] == '#!' and \ 'python' in line: do_compile = True cfile += '.py' if do_compile: cfile += (__debug__ and 'c' or 'o') py_compile.compile(x, cfile=cfile, doraise=True)
def testUseFlagDisplay(self): self.assertEqual(_encodings['content'], 'utf_8') for enabled in (True, False): for forced in (True, False): for arg_bytes in self.unicode_strings: arg_unicode = _unicode_decode(arg_bytes, encoding=_encodings['content']) e = UseFlagDisplay(arg_unicode, enabled, forced) # Force unicode format string so that __unicode__() is # called in python2. formatted_str = _unicode_decode("%s") % (e,) self.assertEqual(isinstance(formatted_str, basestring), True) if STR_IS_UNICODE: # Test the __str__ method which returns unicode in python3 formatted_str = "%s" % (e,) self.assertEqual(isinstance(formatted_str, str), True) else: # Test the __str__ method which returns encoded bytes in python2 formatted_bytes = "%s" % (e,) self.assertEqual(isinstance(formatted_bytes, bytes), True)
def process(mysettings, key, logentries, fulltext): if mysettings.get("PORT_LOGDIR"): logdir = normalize_path(mysettings["PORT_LOGDIR"]) else: logdir = os.path.join(os.sep, mysettings["EPREFIX"].lstrip(os.sep), "var", "log", "portage") if not os.path.isdir(logdir): # Only initialize group/mode if the directory doesn't # exist, so that we don't override permissions if they # were previously set by the administrator. # NOTE: These permissions should be compatible with our # default logrotate config as discussed in bug 374287. uid = -1 if portage.data.secpass >= 2: uid = portage_uid ensure_dirs(logdir, uid=uid, gid=portage_gid, mode=0o2770) cat = mysettings['CATEGORY'] pf = mysettings['PF'] elogfilename = pf + ":" + _unicode_decode( time.strftime("%Y%m%d-%H%M%S", time.gmtime(time.time())), encoding=_encodings['content'], errors='replace') + ".log" if "split-elog" in mysettings.features: log_subdir = os.path.join(logdir, "elog", cat) elogfilename = os.path.join(log_subdir, elogfilename) else: log_subdir = os.path.join(logdir, "elog") elogfilename = os.path.join(log_subdir, cat + ':' + elogfilename) _ensure_log_subdirs(logdir, log_subdir) elogfile = io.open(_unicode_encode(elogfilename, encoding=_encodings['fs'], errors='strict'), mode='w', encoding=_encodings['content'], errors='backslashreplace') elogfile.write(_unicode_decode(fulltext)) elogfile.close() # Copy group permission bits from parent directory. elogdir_st = os.stat(log_subdir) elogdir_gid = elogdir_st.st_gid elogdir_grp_mode = 0o060 & elogdir_st.st_mode # Copy the uid from the parent directory if we have privileges # to do so, for compatibility with our default logrotate # config (see bug 378451). With the "su portage portage" # directive and logrotate-3.8.0, logrotate's chown call during # the compression phase will only succeed if the log file's uid # is portage_uid. logfile_uid = -1 if portage.data.secpass >= 2: logfile_uid = elogdir_st.st_uid apply_permissions(elogfilename, uid=logfile_uid, gid=elogdir_gid, mode=elogdir_grp_mode, mask=0) return elogfilename
def process(mysettings, key, logentries, fulltext): if mysettings.get("PORT_LOGDIR"): logdir = normalize_path(mysettings["PORT_LOGDIR"]) else: logdir = os.path.join(os.sep, mysettings["EPREFIX"].lstrip(os.sep), "var", "log", "portage") if not os.path.isdir(logdir): # Only initialize group/mode if the directory doesn't # exist, so that we don't override permissions if they # were previously set by the administrator. # NOTE: These permissions should be compatible with our # default logrotate config as discussed in bug 374287. logdir_uid = -1 if portage.data.secpass >= 2: logdir_uid = portage_uid ensure_dirs(logdir, uid=logdir_uid, gid=portage_gid, mode=0o2770) elogdir = os.path.join(logdir, "elog") _ensure_log_subdirs(logdir, elogdir) # TODO: Locking elogfilename = elogdir+"/summary.log" elogfile = io.open(_unicode_encode(elogfilename, encoding=_encodings['fs'], errors='strict'), mode='a', encoding=_encodings['content'], errors='backslashreplace') # Copy group permission bits from parent directory. elogdir_st = os.stat(elogdir) elogdir_gid = elogdir_st.st_gid elogdir_grp_mode = 0o060 & elogdir_st.st_mode # Copy the uid from the parent directory if we have privileges # to do so, for compatibility with our default logrotate # config (see bug 378451). With the "su portage portage" # directive and logrotate-3.8.0, logrotate's chown call during # the compression phase will only succeed if the log file's uid # is portage_uid. logfile_uid = -1 if portage.data.secpass >= 2: logfile_uid = elogdir_st.st_uid apply_permissions(elogfilename, uid=logfile_uid, gid=elogdir_gid, mode=elogdir_grp_mode, mask=0) time_str = time.strftime("%Y-%m-%d %H:%M:%S %Z", time.localtime(time.time())) # Avoid potential UnicodeDecodeError later. time_str = _unicode_decode(time_str, encoding=_encodings['content'], errors='replace') elogfile.write(_unicode_decode( _(">>> Messages generated by process " + "%(pid)d on %(time)s for package %(pkg)s:\n\n") % {"pid": os.getpid(), "time": time_str, "pkg": key})) elogfile.write(_unicode_decode(fulltext)) elogfile.write(_unicode_decode("\n")) elogfile.close() return elogfilename
def testCompileModules(self): for parent, dirs, files in os.walk(PORTAGE_PYM_PATH): parent = _unicode_decode(parent, encoding=_encodings['fs'], errors='strict') for x in files: x = _unicode_decode(x, encoding=_encodings['fs'], errors='strict') if x[-3:] == '.py': py_compile.compile(os.path.join(parent, x), doraise=True)
def parse_args(): argv = sys.argv[:] if sys.hexversion >= 0x3000000: # We can't trust that the filesystem encoding (locale dependent) # correctly matches the arguments, so use surrogateescape to # pass through the original argv bytes for Python 3. fs_encoding = sys.getfilesystemencoding() argv = [x.encode(fs_encoding, 'surrogateescape') for x in argv] for x, arg in enumerate(argv): try: argv[x] = _unicode_decode(arg, errors='strict') except UnicodeDecodeError: writemsg('dohtml: argument is not encoded as UTF-8: %s\n' % _unicode_decode(arg), noiselevel=-1) sys.exit(1) options = OptionsClass() args = [] x = 1 while x < len(argv): arg = argv[x] if arg in ["-h","-r","-V"]: if arg == "-h": print_help() sys.exit(0) elif arg == "-r": options.recurse = True elif arg == "-V": options.verbose = True elif argv[x] in ["-A","-a","-f","-x","-p"]: x += 1 if x == len(argv): print_help() sys.exit(0) elif arg == "-p": options.doc_prefix = argv[x] if options.doc_prefix: options.doc_prefix = normalize_path(options.doc_prefix) else: values = argv[x].split(",") if arg == "-A": options.allowed_exts.extend(values) elif arg == "-a": options.allowed_exts = values elif arg == "-f": options.allowed_files = values elif arg == "-x": options.disallowed_dirs = values else: args.append(argv[x]) x += 1 return (options, args)
def query(self, prompt, enter_invalid, responses=None, colours=None): """Display a prompt and a set of responses, then waits for user input and check it against the responses. The first match is returned. An empty response will match the first value in the list of responses, unless enter_invalid is True. The input buffer is *not* cleared prior to the prompt! prompt: The String to display as a prompt. responses: a List of Strings with the acceptable responses. colours: a List of Functions taking and returning a String, used to process the responses for display. Typically these will be functions like red() but could be e.g. lambda x: "DisplayString". If responses is omitted, it defaults to ["Yes", "No"], [green, red]. If only colours is omitted, it defaults to [bold, ...]. Returns a member of the List responses. (If called without optional arguments, it returns "Yes" or "No".) KeyboardInterrupt is converted to SystemExit to avoid tracebacks being printed.""" if responses is None: responses = ["Yes", "No"] colours = [create_color_func("PROMPT_CHOICE_DEFAULT"), create_color_func("PROMPT_CHOICE_OTHER")] elif colours is None: colours = [bold] colours = (colours * len(responses))[: len(responses)] responses = [_unicode_decode(x) for x in responses] if "--alert" in self.myopts: prompt = "\a" + prompt print(bold(prompt), end=" ") try: while True: if sys.hexversion >= 0x3000000: try: response = input("[%s] " % "/".join([colours[i](responses[i]) for i in range(len(responses))])) except UnicodeDecodeError as e: response = _unicode_decode(e.object).rstrip("\n") else: response = raw_input( "[" + "/".join([colours[i](responses[i]) for i in range(len(responses))]) + "] " ) response = _unicode_decode(response) if response or not enter_invalid: for key in responses: # An empty response will match the # first value in responses. if response.upper() == key[: len(response)].upper(): return key print("Sorry, response '%s' not understood." % response, end=" ") except (EOFError, KeyboardInterrupt): print("Interrupted.") sys.exit(128 + signal.SIGINT)
def testCompileModules(self): iters = [os.walk(os.path.join(PORTAGE_PYM_PATH, x)) for x in PORTAGE_PYM_PACKAGES] iters.append(os.walk(PORTAGE_BIN_PATH)) for parent, _dirs, files in itertools.chain(*iters): parent = _unicode_decode(parent, encoding=_encodings['fs'], errors='strict') for x in files: x = _unicode_decode(x, encoding=_encodings['fs'], errors='strict') if x[-4:] in ('.pyc', '.pyo'): continue x = os.path.join(parent, x) st = os.lstat(x) if not stat.S_ISREG(st.st_mode): continue bin_path = os.path.relpath(x, PORTAGE_BIN_PATH) mod_path = os.path.relpath(x, PORTAGE_PYM_PATH) meta = module_metadata.get(mod_path) or script_metadata.get(bin_path) if meta: req_py = tuple(int(x) for x in meta.get('required_python', '0.0').split('.')) if sys.version_info < req_py: continue do_compile = False if x[-3:] == '.py': do_compile = True else: # Check for python shebang. try: with open(_unicode_encode(x, encoding=_encodings['fs'], errors='strict'), 'rb') as f: line = _unicode_decode(f.readline(), encoding=_encodings['content'], errors='replace') except IOError as e: # Some tests create files that are unreadable by the # user (by design), so ignore EACCES issues. if e.errno != errno.EACCES: raise continue if line[:2] == '#!' and 'python' in line: do_compile = True if do_compile: with open(_unicode_encode(x, encoding=_encodings['fs'], errors='strict'), 'rb') as f: compile(f.read(), x, 'exec')
def aux_get(self, mycpv, wants, myrepo=None): if self.bintree and not self.bintree.populated: self.bintree.populate() cache_me = False if not self._known_keys.intersection( wants).difference(self._aux_cache_keys): aux_cache = self._aux_cache.get(mycpv) if aux_cache is not None: return [aux_cache.get(x, "") for x in wants] cache_me = True mysplit = mycpv.split("/") mylist = [] tbz2name = mysplit[1]+".tbz2" if not self.bintree._remotepkgs or \ not self.bintree.isremote(mycpv): tbz2_path = self.bintree.getname(mycpv) if not os.path.exists(tbz2_path): raise KeyError(mycpv) metadata_bytes = portage.xpak.tbz2(tbz2_path).get_data() def getitem(k): v = metadata_bytes.get(_unicode_encode(k, encoding=_encodings['repo.content'], errors='backslashreplace')) if v is not None: v = _unicode_decode(v, encoding=_encodings['repo.content'], errors='replace') return v else: getitem = self.bintree._remotepkgs[mycpv].get mydata = {} mykeys = wants if cache_me: mykeys = self._aux_cache_keys.union(wants) for x in mykeys: myval = getitem(x) # myval is None if the key doesn't exist # or the tbz2 is corrupt. if myval: mydata[x] = " ".join(myval.split()) if not mydata.setdefault('EAPI', _unicode_decode('0')): mydata['EAPI'] = _unicode_decode('0') if cache_me: aux_cache = self._aux_cache_slot_dict() for x in self._aux_cache_keys: aux_cache[x] = mydata.get(x, _unicode_decode('')) self._aux_cache[mycpv] = aux_cache return [mydata.get(x, _unicode_decode('')) for x in wants]
def make_metadata_dict(data): warnings.warn("portage.getbinpkg.make_metadata_dict() is deprecated", DeprecationWarning, stacklevel=2) myid, _myglob = data mydict = {} for k_bytes in portage.xpak.getindex_mem(myid): k = _unicode_decode(k_bytes, encoding=_encodings["repo.content"], errors="replace") if k not in _all_metadata_keys and k != "CATEGORY": continue v = _unicode_decode(portage.xpak.getitem(data, k_bytes), encoding=_encodings["repo.content"], errors="replace") mydict[k] = v return mydict
def make_metadata_dict(data): myid,myglob = data mydict = {} for k_bytes in portage.xpak.getindex_mem(myid): k = _unicode_decode(k_bytes, encoding=_encodings['repo.content'], errors='replace') if k not in _all_metadata_keys and \ k != "CATEGORY": continue v = _unicode_decode(portage.xpak.getitem(data, k_bytes), encoding=_encodings['repo.content'], errors='replace') mydict[k] = v return mydict
def debug_print(self): def output(s): writemsg(s, noiselevel=-1) # Use _unicode_decode() to force unicode format # strings for python-2.x safety, ensuring that # node.__unicode__() is used when necessary. for node in self.nodes: output(_unicode_decode("%s ") % (node,)) if self.nodes[node][0]: output("depends on\n") else: output("(no children)\n") for child, priorities in self.nodes[node][0].items(): output(_unicode_decode(" %s (%s)\n") % \ (child, priorities[-1],))
def grabfile(myfilename, compat_level=0, recursive=0): """This function grabs the lines in a file, normalizes whitespace and returns lines in a list; if a line begins with a #, it is ignored, as are empty lines""" mylines=grablines(myfilename, recursive) newlines=[] for x in mylines: #the split/join thing removes leading and trailing whitespace, and converts any whitespace in the line #into single spaces. myline = _unicode_decode(' ').join(x.split()) if not len(myline): continue if myline[0]=="#": # Check if we have a compat-level string. BC-integration data. # '##COMPAT==>N<==' 'some string attached to it' mylinetest = myline.split("<==",1) if len(mylinetest) == 2: myline_potential = mylinetest[1] mylinetest = mylinetest[0].split("##COMPAT==>") if len(mylinetest) == 2: if compat_level >= int(mylinetest[1]): # It's a compat line, and the key matches. newlines.append(myline_potential) continue else: continue newlines.append(myline) return newlines
def main(): TEST_FILE = _unicode_encode('__test__', encoding=_encodings['fs'], errors='strict') svn_dirname = _unicode_encode('.svn', encoding=_encodings['fs'], errors='strict') suite = unittest.TestSuite() basedir = os.path.dirname(os.path.realpath(__file__)) testDirs = [] # the os.walk help mentions relative paths as being quirky # I was tired of adding dirs to the list, so now we add __test__ # to each dir we want tested. for root, dirs, files in os.walk(basedir): if svn_dirname in dirs: dirs.remove(svn_dirname) try: root = _unicode_decode(root, encoding=_encodings['fs'], errors='strict') except UnicodeDecodeError: continue if TEST_FILE in files: testDirs.append(root) for mydir in testDirs: suite.addTests(getTests(os.path.join(basedir, mydir), basedir) ) return TextTestRunner(verbosity=2).run(suite)
def __init__(self, myid, myconfig, vardbapi, portdbapi): """ Simple constructor to set the ID, store the config and gets the XML data by calling C{self.read()}. @type myid: String @param myid: String describing the id for the GLSA object (standard GLSAs have an ID of the form YYYYMM-nn) or an existing filename containing a GLSA. @type myconfig: portage.config @param myconfig: the config that should be used for this object. @type vardbapi: portage.dbapi.vartree.vardbapi @param vardbapi: installed package repository @type portdbapi: portage.dbapi.porttree.portdbapi @param portdbapi: ebuild repository """ myid = _unicode_decode(myid, encoding=_encodings['content'], errors='strict') if re.match(r'\d{6}-\d{2}', myid): self.type = "id" elif os.path.exists(myid): self.type = "file" else: raise GlsaArgumentException(_("Given ID %s isn't a valid GLSA ID or filename.") % myid) self.nr = myid self.config = myconfig self.vardbapi = vardbapi self.portdbapi = portdbapi self.read()
def _elog_output_handler(self, fd, event): output = None if event & self.scheduler.IO_IN: try: output = os.read(fd, self._bufsize) except OSError as e: if e.errno not in (errno.EAGAIN, errno.EINTR): raise if output: lines = _unicode_decode(output).split('\n') if len(lines) == 1: self._buf += lines[0] else: lines[0] = self._buf + lines[0] self._buf = lines.pop() out = io.StringIO() for line in lines: funcname, phase, key, msg = line.split(' ', 3) self._elog_keys.add(key) reporter = getattr(portage.elog.messages, funcname) reporter(msg, phase=phase, key=key, out=out) if event & self.scheduler.IO_HUP: self.scheduler.source_remove(self._elog_reg_id) self._elog_reg_id = None os.close(self._elog_reader_fd) self._elog_reader_fd = None return False return True
def format_date(datestr): """ Takes a date (announced, revised) date from a GLSA and formats it as readable text (i.e. "January 1, 2008"). @type date: String @param date: the date string to reformat @rtype: String @return: a reformatted string, or the original string if it cannot be reformatted. """ splitdate = datestr.split("-", 2) if len(splitdate) != 3: return datestr # This cannot raise an error as we use () instead of [] splitdate = (int(x) for x in splitdate) from datetime import date try: d = date(*splitdate) except ValueError: return datestr # TODO We could format to local date format '%x' here? return _unicode_decode(d.strftime("%B %d, %Y"), encoding=_encodings['content'], errors='replace')
def RecursiveFileLoader(filename): """ If filename is of type file, return a generate that yields filename else if filename is of type directory, return a generator that fields files in that directory. Ignore files beginning with . or ending in ~. Prune CVS directories. @param filename: name of a file/directory to traverse @rtype: list @returns: List of files to process """ try: st = os.stat(filename) except OSError: return if stat.S_ISDIR(st.st_mode): for root, dirs, files in os.walk(filename): for d in list(dirs): if d[:1] == "." or d == "CVS": dirs.remove(d) for f in files: try: f = _unicode_decode(f, encoding=_encodings["fs"], errors="strict") except UnicodeDecodeError: continue if f[:1] == "." or f[-1:] == "~": continue yield os.path.join(root, f) else: yield filename
def testArrayFromfileEof(self): # This tests if the following python issue is fixed # in the currently running version of python: # http://bugs.python.org/issue5334 input_data = "an arbitrary string" input_bytes = _unicode_encode(input_data, encoding='utf_8', errors='strict') f = tempfile.TemporaryFile() f.write(input_bytes) f.seek(0) data = [] eof = False while not eof: a = array.array('B') try: a.fromfile(f, len(input_bytes) + 1) except (EOFError, IOError): # python-3.0 lost data here eof = True if not a: eof = True else: data.append(_unicode_decode(a.tostring(), encoding='utf_8', errors='strict')) f.close() self.assertEqual(input_data, ''.join(data))
def __init__(self, cpv, metadata=None, settings=None, eapi=None, repo=None, slot=None, build_time=None, build_id=None, file_size=None, mtime=None): if not isinstance(cpv, _unicode): # Avoid TypeError from _unicode.__init__ with PyPy. cpv = _unicode_decode(cpv) _unicode.__init__(cpv) if metadata is not None: self.__dict__['_metadata'] = metadata slot = metadata.get('SLOT', slot) repo = metadata.get('repository', repo) eapi = metadata.get('EAPI', eapi) build_time = metadata.get('BUILD_TIME', build_time) file_size = metadata.get('SIZE', file_size) build_id = metadata.get('BUILD_ID', build_id) mtime = metadata.get('_mtime_', mtime) if settings is not None: self.__dict__['_settings'] = settings if eapi is not None: self.__dict__['eapi'] = eapi self.__dict__['build_time'] = self._long(build_time, 0) self.__dict__['file_size'] = self._long(file_size, None) self.__dict__['build_id'] = self._long(build_id, None) self.__dict__['mtime'] = self._long(mtime, None) self.__dict__['cpv_split'] = catpkgsplit(cpv, eapi=eapi) if self.cpv_split is None: raise InvalidData(cpv) self.__dict__['cp'] = self.cpv_split[0] + '/' + self.cpv_split[1] if self.cpv_split[-1] == "r0" and cpv[-3:] != "-r0": self.__dict__['version'] = "-".join(self.cpv_split[2:-1]) else: self.__dict__['version'] = "-".join(self.cpv_split[2:]) # for match_from_list introspection self.__dict__['cpv'] = self if slot is not None: eapi_attrs = _get_eapi_attrs(eapi) slot_match = _get_slot_re(eapi_attrs).match(slot) if slot_match is None: # Avoid an InvalidAtom exception when creating SLOT atoms self.__dict__['slot'] = '0' self.__dict__['sub_slot'] = '0' self.__dict__['slot_invalid'] = slot else: if eapi_attrs.slot_operator: slot_split = slot.split("/") self.__dict__['slot'] = slot_split[0] if len(slot_split) > 1: self.__dict__['sub_slot'] = slot_split[1] else: self.__dict__['sub_slot'] = slot_split[0] else: self.__dict__['slot'] = slot self.__dict__['sub_slot'] = slot if repo is not None: repo = _gen_valid_repo(repo) if not repo: repo = _unknown_repo self.__dict__['repo'] = repo
def setexec(ctx="\n"): ctx = _native_string(ctx, encoding=_encodings['content'], errors='strict') if selinux.setexeccon(ctx) < 0: if sys.hexversion < 0x3000000: ctx = _unicode_decode(ctx, encoding=_encodings['content'], errors='replace') if selinux.security_getenforce() == 1: raise OSError(_("Failed setting exec() context \"%s\".") % ctx) else: portage.writemsg("!!! " + \ _("Failed setting exec() context \"%s\".") % ctx, \ noiselevel=-1)
def update_dbentries(update_iter, mydata): """Performs update commands and returns a dict containing only the updated items.""" updated_items = {} for k, mycontent in mydata.items(): k_unicode = _unicode_decode(k, encoding=_encodings['repo.content'], errors='replace') if k_unicode not in ignored_dbentries: orig_content = mycontent mycontent = _unicode_decode(mycontent, encoding=_encodings['repo.content'], errors='replace') is_encoded = mycontent is not orig_content orig_content = mycontent for update_cmd in update_iter: mycontent = update_dbentry(update_cmd, mycontent) if mycontent != orig_content: if is_encoded: mycontent = _unicode_encode(mycontent, encoding=_encodings['repo.content'], errors='backslashreplace') updated_items[k] = mycontent return updated_items
def make_metadata_dict(data): warnings.warn("portage.getbinpkg.make_metadata_dict() is deprecated", DeprecationWarning, stacklevel=2) myid, myglob = data mydict = {} for k_bytes in portage.xpak.getindex_mem(myid): k = _unicode_decode(k_bytes, encoding=_encodings['repo.content'], errors='replace') if k not in _all_metadata_keys and \ k != "CATEGORY": continue v = _unicode_decode(portage.xpak.getitem(data, k_bytes), encoding=_encodings['repo.content'], errors='replace') mydict[k] = v return mydict
def rename(src, dest): src = _unicode_encode(src, encoding=_encodings['fs'], errors='strict') dest = _unicode_encode(dest, encoding=_encodings['fs'], errors='strict') (rc, ctx) = selinux.lgetfilecon(src) if rc < 0: src = _unicode_decode(src, encoding=_encodings['fs'], errors='replace') raise OSError(_("rename: Failed getting context of \"%s\".") % src) setfscreate(ctx) try: os.rename(src, dest) finally: setfscreate()
def dir_get_list(baseurl, conn=None): """(baseurl[,connection]) -- Takes a base url to connect to and read from. URI should be in the form <proto>://<site>[:port]<path> Connection is used for persistent connection instances.""" warnings.warn("portage.getbinpkg.dir_get_list() is deprecated", DeprecationWarning, stacklevel=2) if not conn: keepconnection = 0 else: keepconnection = 1 conn, protocol, address, params, headers = create_conn(baseurl, conn) listing = None if protocol in ["http", "https"]: if not address.endswith("/"): # http servers can return a 400 error here # if the address doesn't end with a slash. address += "/" page, rc, msg = make_http_request(conn, address, params, headers) if page: parser = ParseLinks() parser.feed(_unicode_decode(page)) del page listing = parser.get_anchors() else: import portage.exception raise portage.exception.PortageException( _("Unable to get listing: %s %s") % (rc, msg)) elif protocol in ["ftp"]: if address[-1] == '/': olddir = conn.pwd() conn.cwd(address) listing = conn.nlst() conn.cwd(olddir) del olddir else: listing = conn.nlst(address) elif protocol == "sftp": listing = conn.listdir(address) else: raise TypeError(_("Unknown protocol. '%s'") % protocol) if not keepconnection: conn.close() return listing
def _set_returncode(self, wait_retval): SubProcess._set_returncode(self, wait_retval) # self._raw_metadata is None when _start returns # early due to an unsupported EAPI if self.returncode == os.EX_OK and \ self._raw_metadata is not None: metadata_lines = _unicode_decode(b''.join(self._raw_metadata), encoding=_encodings['repo.content'], errors='replace').splitlines() metadata_valid = True if len(portage.auxdbkeys) != len(metadata_lines): # Don't trust bash's returncode if the # number of lines is incorrect. metadata_valid = False else: metadata = dict(zip(portage.auxdbkeys, metadata_lines)) parsed_eapi = self._eapi if parsed_eapi is None: parsed_eapi = "0" self.eapi_supported = \ portage.eapi_is_supported(metadata["EAPI"]) if (not metadata["EAPI"] or self.eapi_supported) and \ metadata["EAPI"] != parsed_eapi: self._eapi_invalid(metadata) metadata_valid = False if metadata_valid: # Since we're supposed to be able to efficiently obtain the # EAPI from _parse_eapi_ebuild_head, we don't write cache # entries for unsupported EAPIs. if self.eapi_supported: if metadata.get("INHERITED", False): metadata["_eclasses_"] = \ self.portdb.repositories.get_repo_for_location( self.repo_path).eclass_db.get_eclass_data( metadata["INHERITED"].split()) else: metadata["_eclasses_"] = {} metadata.pop("INHERITED", None) # If called by egencache, this cache write is # undesirable when metadata-transfer is disabled. if self.write_auxdb is not False: self.portdb._write_cache(self.cpv, self.repo_path, metadata, self.ebuild_hash) else: metadata = {"EAPI": metadata["EAPI"]} self.metadata = metadata else: self.returncode = 1
def _parse(paths, prepos, ignored_map, ignored_location_map): """Parse files in paths to load config""" parser = SafeConfigParser() # use read_file/readfp in order to control decoding of unicode try: # Python >=3.2 read_file = parser.read_file except AttributeError: read_file = parser.readfp for p in paths: f = None try: f = io.open(_unicode_encode(p, encoding=_encodings['fs'], errors='strict'), mode='r', encoding=_encodings['repo.content'], errors='replace') except EnvironmentError: pass else: try: read_file(f) except ParsingError as e: writemsg(_unicode_decode( _("!!! Error while reading repo config file: %s\n") ) % e, noiselevel=-1) finally: if f is not None: f.close() prepos['DEFAULT'] = RepoConfig("DEFAULT", parser.defaults()) for sname in parser.sections(): optdict = {} for oname in parser.options(sname): optdict[oname] = parser.get(sname, oname) repo = RepoConfig(sname, optdict) if repo.location and not os.path.exists(repo.location): writemsg(_("!!! Invalid repos.conf entry '%s'" " (not a dir): '%s'\n") % (sname, repo.location), noiselevel=-1) continue if repo.name in prepos: old_location = prepos[repo.name].location if old_location is not None and repo.location is not None and old_location != repo.location: ignored_map.setdefault(repo.name, []).append(old_location) ignored_location_map[old_location] = repo.name prepos[repo.name].update(repo) else: prepos[repo.name] = repo
def testBashSyntax(self): locations = [PORTAGE_BIN_PATH] misc_dir = os.path.join(PORTAGE_BASE_PATH, "misc") if os.path.isdir(misc_dir): locations.append(misc_dir) for parent, dirs, files in \ chain.from_iterable(os.walk(x) for x in locations): parent = _unicode_decode(parent, encoding=_encodings['fs'], errors='strict') for x in files: x = _unicode_decode(x, encoding=_encodings['fs'], errors='strict') ext = x.split('.')[-1] if ext in ('.py', '.pyc', '.pyo'): continue x = os.path.join(parent, x) st = os.lstat(x) if not stat.S_ISREG(st.st_mode): continue # Check for bash shebang f = open(_unicode_encode(x, encoding=_encodings['fs'], errors='strict'), 'rb') line = _unicode_decode(f.readline(), encoding=_encodings['content'], errors='replace') f.close() if line[:2] == '#!' and \ 'bash' in line: cmd = [BASH_BINARY, "-n", x] cmd = [_unicode_encode(x, encoding=_encodings['fs'], errors='strict') for x in cmd] proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) output = _unicode_decode(proc.communicate()[0], encoding=_encodings['fs']) status = proc.wait() self.assertEqual(os.WIFEXITED(status) and \ os.WEXITSTATUS(status) == os.EX_OK, True, msg=output)
def emergelog(xterm_titles, mystr, short_msg=None): if _disable: return mystr = portage._unicode_decode(mystr) if short_msg is not None: short_msg = portage._unicode_decode(short_msg) if xterm_titles and short_msg: if "HOSTNAME" in os.environ: short_msg = os.environ["HOSTNAME"]+": "+short_msg xtermTitle(short_msg) try: file_path = os.path.join(_emerge_log_dir, 'emerge.log') mylogfile = codecs.open(_unicode_encode(file_path, encoding=_encodings['fs'], errors='strict'), mode='a', encoding=_encodings['content'], errors='backslashreplace') portage.util.apply_secpass_permissions(file_path, uid=portage.portage_uid, gid=portage.portage_gid, mode=0o660) mylock = None try: mylock = portage.locks.lockfile(mylogfile) # seek because we may have gotten held up by the lock. # if so, we may not be positioned at the end of the file. mylogfile.seek(0, 2) mylogfile.write(str(time.time())[:10]+": "+mystr+"\n") mylogfile.flush() finally: if mylock: portage.locks.unlockfile(mylock) mylogfile.close() except (IOError,OSError,portage.exception.PortageException) as e: if secpass >= 1: print("emergelog():",e, file=sys.stderr)
def __call__(self, argv): """ @return: tuple of (stdout, stderr, returncode) """ cmd, root, atom_str = argv eapi = self.settings.get('EAPI') allow_repo = eapi_has_repo_deps(eapi) try: atom = Atom(atom_str, allow_repo=allow_repo) except InvalidAtom: return ('', 'invalid atom: %s\n' % atom_str, 2) warnings = [] try: atom = Atom(atom_str, allow_repo=allow_repo, eapi=eapi) except InvalidAtom as e: warnings.append(_unicode_decode("QA Notice: %s: %s") % (cmd, e)) use = self.settings.get('PORTAGE_BUILT_USE') if use is None: use = self.settings['PORTAGE_USE'] use = frozenset(use.split()) atom = atom.evaluate_conditionals(use) db = self.get_db() warnings_str = '' if warnings: warnings_str = self._elog('eqawarn', warnings) root = normalize_path(root).rstrip(os.path.sep) + os.path.sep if root not in db: return ('', 'invalid ROOT: %s\n' % root, 2) vardb = db[root]["vartree"].dbapi if cmd == 'has_version': if vardb.match(atom): returncode = 0 else: returncode = 1 return ('', warnings_str, returncode) elif cmd == 'best_version': m = best(vardb.match(atom)) return ('%s\n' % m, warnings_str, 0) else: return ('', 'invalid command: %s\n' % cmd, 2)
def copyfile(src, dest): src = _native_string(src, encoding=_encodings['fs'], errors='strict') dest = _native_string(dest, encoding=_encodings['fs'], errors='strict') (rc, ctx) = selinux.lgetfilecon(src) if rc < 0: if sys.hexversion < 0x3000000: src = _unicode_decode(src, encoding=_encodings['fs'], errors='replace') raise OSError(_("copyfile: Failed getting context of \"%s\".") % src) setfscreate(ctx) try: shutil.copyfile(src, dest) finally: setfscreate()
def _get_target(self): global VERSION if VERSION is not self: return VERSION if os.path.isdir(os.path.join(PORTAGE_BASE_PATH, '.git')): encoding = _encodings['fs'] cmd = [BASH_BINARY, "-c", ("cd %s ; git describe --match 'repoman-*' || exit $? ; " + \ "if [ -n \"`git diff-index --name-only --diff-filter=M HEAD`\" ] ; " + \ "then echo modified ; git rev-list --format=%%ct -n 1 HEAD ; fi ; " + \ "exit 0") % _shell_quote(PORTAGE_BASE_PATH)] cmd = [ _unicode_encode(x, encoding=encoding, errors='strict') for x in cmd ] proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) output = _unicode_decode(proc.communicate()[0], encoding=encoding) status = proc.wait() if os.WIFEXITED(status) and os.WEXITSTATUS(status) == os.EX_OK: output_lines = output.splitlines() if output_lines: version_split = output_lines[0].split('-') if len(version_split) > 1: VERSION = version_split[1] patchlevel = False if len(version_split) > 2: patchlevel = True VERSION = "%s_p%s" % (VERSION, version_split[2]) if len(output_lines ) > 1 and output_lines[1] == 'modified': head_timestamp = None if len(output_lines) > 3: try: head_timestamp = long(output_lines[3]) except ValueError: pass timestamp = long(time.time()) if head_timestamp is not None and timestamp > head_timestamp: timestamp = timestamp - head_timestamp if not patchlevel: VERSION = "%s_p0" % (VERSION, ) VERSION = "%s_p%d" % (VERSION, timestamp) return VERSION else: print("NO output lines :(") VERSION = 'HEAD' return VERSION
def localized_size(num_bytes): """ Return pretty localized size string for num_bytes size (given in bytes). The output will be in kibibytes. """ # always round up, so that small files don't end up as '0 KiB' num_kib = math.ceil(num_bytes / 1024) try: formatted_num = locale.format_string('%d', num_kib, grouping=True) except UnicodeDecodeError: # failure to decode locale data formatted_num = str(num_kib) return (_unicode_decode(formatted_num, encoding=_encodings['stdio']) + ' KiB')
def _open_file(filename): try: return open(_unicode_encode(filename, encoding=_encodings['fs'], errors='strict'), 'rb') except IOError as e: func_call = "open('%s')" % _unicode_decode(filename) if e.errno == errno.EPERM: raise portage.exception.OperationNotPermitted(func_call) elif e.errno == errno.EACCES: raise portage.exception.PermissionDenied(func_call) elif e.errno == errno.ENOENT: raise portage.exception.FileNotFound(filename) else: raise
def prevent_collision(self, cpv): """Make sure that the file location ${PKGDIR}/All/${PF}.tbz2 is safe to use for a given cpv. If a collision will occur with an existing package from another category, the existing package will be bumped to ${PKGDIR}/${CATEGORY}/${PF}.tbz2 so that both can coexist.""" if not self._all_directory: return # Copy group permissions for new directories that # may have been created. for path in ("All", catsplit(cpv)[0]): path = os.path.join(self.pkgdir, path) self._ensure_dir(path) if not os.access(path, os.W_OK): raise PermissionDenied("access('%s', W_OK)" % path) full_path = self.getname(cpv) if "All" == full_path.split(os.path.sep)[-2]: return """Move a colliding package if it exists. Code below this point only executes in rare cases.""" mycat, mypkg = catsplit(cpv) myfile = mypkg + ".tbz2" mypath = os.path.join("All", myfile) dest_path = os.path.join(self.pkgdir, mypath) try: st = os.lstat(dest_path) except OSError: st = None else: if stat.S_ISLNK(st.st_mode): st = None try: os.unlink(dest_path) except OSError: if os.path.exists(dest_path): raise if st is not None: # For invalid packages, other_cat could be None. other_cat = portage.xpak.tbz2(dest_path).getfile(b"CATEGORY") if other_cat: other_cat = _unicode_decode(other_cat, encoding=_encodings['repo.content'], errors='replace') other_cat = other_cat.strip() other_cpv = other_cat + "/" + mypkg self._move_from_all(other_cpv) self.inject(other_cpv) self._move_to_all(cpv)
def perform_post_sync_hook(self, reponame, dosyncuri='', repolocation=''): succeeded = os.EX_OK if reponame: _hooks = self.hooks["repo.postsync.d"] else: _hooks = self.hooks["postsync.d"] for filepath in _hooks: writemsg_level("Spawning post_sync hook: %s\n" % (_unicode_decode(_hooks[filepath])), level=logging.ERROR, noiselevel=4) if reponame: retval = portage.process.spawn( [filepath, reponame, dosyncuri, repolocation], env=self.settings.environ()) else: retval = portage.process.spawn([filepath], env=self.settings.environ()) if retval != os.EX_OK: writemsg_level(" %s Spawn failed for: %s, %s\n" % (bad("*"), _unicode_decode(_hooks[filepath]), filepath), level=logging.ERROR, noiselevel=-1) succeeded = retval return succeeded
def retrieve_head(self, **kwargs): '''Get information about the head commit''' if kwargs: self._kwargs(kwargs) rev_cmd = [self.bin_command, "rev-list", "--max-count=1", "HEAD"] try: ret = (os.EX_OK, portage._unicode_decode( subprocess.check_output(rev_cmd, cwd=portage._unicode_encode( self.repo.location)))) except subprocess.CalledProcessError: ret = (1, False) return ret
def load(self): """ Reload the registry data from file """ self._data = None f = None content = None try: f = open(_unicode_encode(self._filename, encoding=_encodings['fs'], errors='strict'), 'rb') content = f.read() except EnvironmentError as e: if not hasattr(e, 'errno'): raise elif e.errno == errno.ENOENT: pass elif e.errno == PermissionDenied.errno: raise PermissionDenied(self._filename) else: raise finally: if f is not None: f.close() # content is empty if it's an empty lock file if content: try: self._data = json.loads(_unicode_decode(content, encoding=_encodings['repo.content'], errors='strict')) except SystemExit: raise except Exception as e: try: self._data = pickle.loads(content) except SystemExit: raise except Exception: writemsg_level(_("!!! Error loading '%s': %s\n") % (self._filename, e), level=logging.ERROR, noiselevel=-1) if self._data is None: self._data = {} else: for k, v in self._data.items(): if isinstance(v, (list, tuple)) and len(v) == 3 and \ isinstance(v[2], set): # convert set to list, for write with JSONEncoder self._data[k] = (v[0], v[1], list(v[2])) self._data_orig = self._data.copy() self.pruneNonExisting()
def emergelog(xterm_titles, mystr, short_msg=None): if _disable: return mystr = _unicode_decode(mystr) if short_msg is not None: short_msg = _unicode_decode(short_msg) if xterm_titles and short_msg: if "HOSTNAME" in os.environ: short_msg = os.environ["HOSTNAME"] + ": " + short_msg xtermTitle(short_msg) try: file_path = os.path.join(_emerge_log_dir, 'emerge.log') existing_log = os.path.exists(file_path) mylogfile = io.open(_unicode_encode(file_path, encoding=_encodings['fs'], errors='strict'), mode='a', encoding=_encodings['content'], errors='backslashreplace') if not existing_log: portage.util.apply_secpass_permissions(file_path, uid=portage.portage_uid, gid=portage.portage_gid, mode=0o660) mylock = portage.locks.lockfile(file_path) try: mylogfile.write("%.0f: %s\n" % (time.time(), mystr)) mylogfile.close() finally: portage.locks.unlockfile(mylock) except (IOError, OSError, portage.exception.PortageException) as e: if secpass >= 1: portage.util.writemsg("emergelog(): %s\n" % (e, ), noiselevel=-1)
def testCompileModules(self): for parent, _dirs, files in itertools.chain( os.walk(PORTAGE_BIN_PATH), os.walk(PORTAGE_PYM_PATH)): parent = _unicode_decode(parent, encoding=_encodings['fs'], errors='strict') for x in files: x = _unicode_decode(x, encoding=_encodings['fs'], errors='strict') if x[-4:] in ('.pyc', '.pyo'): continue x = os.path.join(parent, x) st = os.lstat(x) if not stat.S_ISREG(st.st_mode): continue do_compile = False if x[-3:] == '.py': do_compile = True else: # Check for python shebang. try: with open(_unicode_encode(x, encoding=_encodings['fs'], errors='strict'), 'rb') as f: line = _unicode_decode(f.readline(), encoding=_encodings['content'], errors='replace') except IOError as e: # Some tests create files that are unreadable by the # user (by design), so ignore EACCES issues. if e.errno != errno.EACCES: raise continue if line[:2] == '#!' and 'python' in line: do_compile = True if do_compile: with open(_unicode_encode(x, encoding=_encodings['fs'], errors='strict'), 'rb') as f: compile(f.read(), x, 'exec')
def __init__(self, paths, settings, trees): self._parser = SafeConfigParser( defaults={ "EPREFIX": settings["EPREFIX"], "EROOT": settings["EROOT"], "PORTAGE_CONFIGROOT": settings["PORTAGE_CONFIGROOT"], "ROOT": settings["ROOT"], }) if _ENABLE_SET_CONFIG: # use read_file/readfp in order to control decoding of unicode try: # Python >=3.2 read_file = self._parser.read_file except AttributeError: read_file = self._parser.readfp for p in paths: f = None try: f = io.open(_unicode_encode(p, encoding=_encodings['fs'], errors='strict'), mode='r', encoding=_encodings['repo.content'], errors='replace') except EnvironmentError: pass else: try: read_file(f) except ParsingError as e: writemsg_level(_unicode_decode( _("!!! Error while reading sets config file: %s\n") ) % e, level=logging.ERROR, noiselevel=-1) finally: if f is not None: f.close() else: self._create_default_config() self.errors = [] self.psets = {} self.trees = trees self.settings = settings self._parsed = False self.active = []
def __str__(self): eapi = self.eapi if not isinstance(eapi, str): eapi = str(eapi) eapi = eapi.lstrip("-") msg = _( "Unable to do any operations on '%(cpv)s', since " "its EAPI is higher than this portage version's. Please upgrade" " to a portage version that supports EAPI '%(eapi)s'.") % { "cpv": self.cpv, "eapi": eapi } return _unicode_decode(msg, encoding=_encodings["content"], errors="replace")
def __iter__(self): for root, dirs, files in os.walk(self.portdir): for file in files: try: file = _unicode_decode(file, encoding=_encodings['fs'], errors='strict') except UnicodeDecodeError: continue if file[-7:] == '.ebuild': cat = os.path.basename(os.path.dirname(root)) pn_pv = file[:-7] path = os.path.join(root,file) if self.__has_cache(path): yield "%s/%s/%s" % (cat,os.path.basename(root),file[:-7])
def testPortageException(self): self.assertEqual(_encodings['content'], 'utf_8') for arg_bytes in self.unicode_strings: arg_unicode = _unicode_decode(arg_bytes, encoding=_encodings['content']) e = PortageException(arg_unicode) # Force unicode format string so that __unicode__() is # called in python2. formatted_str = _unicode_decode("%s") % (e,) self.assertEqual(formatted_str, arg_unicode) if STR_IS_UNICODE: # Test the __str__ method which returns unicode in python3 formatted_str = "%s" % (e,) self.assertEqual(formatted_str, arg_unicode) else: # Test the __str__ method which returns encoded bytes in python2 formatted_bytes = "%s" % (e,) self.assertEqual(formatted_bytes, arg_bytes)
def _update_thin_pkgdir(self, cat, pn, pkgdir): for pkgdir, pkgdir_dirs, pkgdir_files in os.walk(pkgdir): break cpvlist = [] for f in pkgdir_files: try: f = _unicode_decode(f, encoding=_encodings["fs"], errors="strict") except UnicodeDecodeError: continue if f[:1] == ".": continue pf = self._is_cpv(cat, pn, f) if pf is not None: cpvlist.append(pf) return cpvlist
def main(args): if args and isinstance(args[0], bytes): for i, x in enumerate(args): args[i] = portage._unicode_decode(x, errors="strict") # Make locks quiet since unintended locking messages displayed on # stdout would corrupt the intended output of this program. portage.locks._quiet = True lock_obj = portage.locks.lockfile(args[0], wantnewlockfile=True) sys.stdout.write("\0") sys.stdout.flush() sys.stdin.read(1) portage.locks.unlockfile(lock_obj) return portage.os.EX_OK
def addtolist(mylist, curdir): """(list, dir) --- Takes an array(list) and appends all files from dir down the directory tree. Returns nothing. list is modified.""" curdir = normalize_path( _unicode_decode(curdir, encoding=_encodings["fs"], errors="strict") ) for parent, dirs, files in os.walk(curdir): parent = _unicode_decode(parent, encoding=_encodings["fs"], errors="strict") if parent != curdir: mylist.append(parent[len(curdir) + 1 :] + os.sep) for x in dirs: try: _unicode_decode(x, encoding=_encodings["fs"], errors="strict") except UnicodeDecodeError: dirs.remove(x) for x in files: try: x = _unicode_decode(x, encoding=_encodings["fs"], errors="strict") except UnicodeDecodeError: continue mylist.append(os.path.join(parent, x)[len(curdir) + 1 :])
def old_tree_timestamp_warn(portdir, settings): unixtime = time.time() default_warnsync = 30 timestamp_file = os.path.join(portdir, "metadata/timestamp.x") try: lastsync = grabfile(timestamp_file) except PortageException: return False if not lastsync: return False lastsync = lastsync[0].split() if not lastsync: return False try: lastsync = int(lastsync[0]) except ValueError: return False var_name = "PORTAGE_SYNC_STALE" try: warnsync = float(settings.get(var_name, default_warnsync)) except ValueError: writemsg_level( "!!! %s contains non-numeric value: %s\n" % (var_name, settings[var_name]), level=logging.ERROR, noiselevel=-1, ) return False if warnsync <= 0: return False if (unixtime - 86400 * warnsync) > lastsync: out = EOutput() if have_english_locale(): out.ewarn("Last emerge --sync was %s ago." % whenago(unixtime - lastsync)) else: out.ewarn( _("Last emerge --sync was %s.") % _unicode_decode(time.strftime("%c", time.localtime(lastsync)))) return True return False
def retrieve_head(self, **kwargs): """Get information about the head commit""" if kwargs: self._kwargs(kwargs) rev_cmd = [self.bin_command, "id", "--id", "--rev", "tip"] try: ret = ( os.EX_OK, portage._unicode_decode( subprocess.check_output(rev_cmd, cwd=portage._unicode_encode( self.repo.location))), ) except subprocess.CalledProcessError: ret = (1, False) return ret