def readline(self, size=-1): """Read a line from the remote host. If size is specified, read to newline or size, whichever is smaller. We force the return value to be str here since the caller expect str.""" # Caller shouldn't hold lock when calling this method assert not self.__lock._is_owned() if size < 0: curdata = self.__buf newline = curdata.find(b"\n") if newline >= 0: newline += 1 self.__buf = curdata[newline:] return force_str(curdata[:newline]) while self.__fill_buffer(): newline = self.__buf.find(b"\n") if newline >= 0: break curdata = self.__buf newline = curdata.find(b"\n") if newline >= 0: newline += 1 self.__buf = curdata[newline:] return force_str(curdata[:newline]) self.__buf = b"" return force_str(curdata) else: curdata = self.__buf newline = curdata.find(b"\n", 0, size) datalen = len(curdata) if newline >= 0: newline += 1 self.__buf = curdata[newline:] return force_str(curdata[:newline]) if datalen >= size: self.__buf = curdata[size:] return force_str(curdata[:size]) while self.__fill_buffer(): newline = self.__buf.find(b"\n", 0, size) datalen = len(self.__buf) if newline >= 0: break if datalen >= size: break curdata = self.__buf newline = curdata.find(b"\n", 0, size) datalen = len(curdata) if newline >= 0: newline += 1 self.__buf = curdata[newline:] return force_str(curdata[:newline]) if datalen >= size: self.__buf = curdata[size:] return force_str(curdata[:size]) self.__buf = b"" return force_str(curdata)
def get_text(self, img, pfmri, alt_pub=None): """Retrieves and returns the payload of the license (which should be text). This may require remote retrieval of resources and so this could raise a TransportError or other ApiException. 'alt_pub' is an optional alternate Publisher to use for any required transport operations. """ path = self.get_local_path(img, pfmri) hash_attr, hash_attr_val, hash_func = \ digest.get_least_preferred_hash(self) try: with open(path, "rb") as fh: length = os.stat(path).st_size chash, txt = misc.get_data_digest(fh, length=length, return_content=True, hash_func=hash_func) if chash == hash_attr_val: return misc.force_str(txt) except EnvironmentError as e: if e.errno != errno.ENOENT: raise # If we get here, either the license file wasn't on disk, or the # hash didn't match. In either case, go retrieve it from the # publisher. try: if not alt_pub: alt_pub = img.get_publisher(pfmri.publisher) assert pfmri.publisher == alt_pub.prefix return img.transport.get_content(alt_pub, hash_attr_val, fmri=pfmri, hash_func=hash_func) finally: img.cleanup_downloads()
def __call(args, zone=None): # a way to invoke a separate executable for testing cmds_dir = DebugValues.get_value("smf_cmds_dir") # returned values will be in the user's locale # so we need to ensure that the force_str uses # their locale. encoding = locale.getpreferredencoding(do_setlocale=False) if cmds_dir: args = ( os.path.join(cmds_dir, args[0].lstrip("/")),) + args[1:] if zone: cmd = DebugValues.get_value("bin_zlogin") if cmd is None: cmd = zlogin_path args = (cmd, zone) + args try: proc = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) buf = [misc.force_str(l, encoding=encoding) for l in proc.stdout.readlines()] ret = proc.wait() except OSError as e: raise RuntimeError("cannot execute {0}: {1}".format(args, e)) if ret != 0: raise NonzeroExitException(args, ret, buf) return buf
def write(self, msg): """Write a string to the pipe.""" # JSON object must be str to be used in jsonrpclib mf = tempfile.TemporaryFile(mode="w+") mf.write(force_str(msg)) mf.flush() self.sendfd(mf.fileno()) mf.close()
def reopen(self, rstore, trans_dir): """The reopen() method is invoked by the repository as needed to load Transaction data.""" self.rstore = rstore try: open_time_str, self.esc_pkg_name = \ os.path.basename(trans_dir).split("_", 1) except ValueError: raise TransactionUnknownIDError(os.path.basename( trans_dir)) self.open_time = \ datetime.datetime.utcfromtimestamp(int(open_time_str)) self.pkg_name = unquote(self.esc_pkg_name) # This conversion should always work, because we encoded the # client release on the initial open of the transaction. self.fmri = fmri.PkgFmri(self.pkg_name, None) self.dir = os.path.join(rstore.trans_root, self.get_basename()) if not os.path.exists(self.dir): raise TransactionUnknownIDError(self.get_basename()) tmode = "rb" if not rstore.read_only: # The mode is important especially when dealing with # NFS because of problems with opening a file as # read/write or readonly multiple times. tmode += "+" # Find out if the package is renamed or obsolete. try: tfpath = os.path.join(self.dir, "manifest") tfile = open(tfpath, tmode) except IOError as e: if e.errno == errno.ENOENT: return raise m = pkg.manifest.Manifest() # If tfile is a StreamingFileObj obj, its read() # methods will return bytes. We need str for # manifest and here's an earlisest point that # we can convert it to str. m.set_content(content=misc.force_str(tfile.read())) tfile.close() if os.path.exists(os.path.join(self.dir, "append")): self.append_trans = True self.obsolete = m.getbool("pkg.obsolete", "false") self.renamed = m.getbool("pkg.renamed", "false") self.types_found = set(( action.name for action in m.gen_actions() )) self.has_reqdeps = any( a.attrs["type"] == "require" for a in m.gen_actions_by_type("depend") )
def __header_callback(self, data): """A callback given to the transport engine. It reads header information from the transport. This function saves the message from the http response, as well as a dictionary of headers that it can parse.""" if data.startswith(b"HTTP/"): rtup = data.split(None, 2) try: self.__httpmsg = rtup[2] except IndexError: pass elif data.find(b":") > -1: k, v = data.split(b":", 1) if v: # convert to str as early as we can self.__headers[force_str(k.lower())] = \ force_str(v.strip())
def putp(self, string): """This routine loosely emulates python's curses.putp, but works on whatever our output file is, instead just stdout""" assert self.__ttymode # Hardware terminals are pretty much gone now; we choose # to drop delays specified in termcap (delays are in the # form: $<[0-9]+>). self._out_file.write(self.__putp_re.sub("", force_str(string)))
def _check(self, dep_action, which): """Performs the subprocess invocation and returns (status, outputbuffer) to the caller""" # leverage smf test infrastructure cmds_dir = DebugValues["smf_cmds_dir"] if DebugValues["firmware-dependency-bypass"]: return (True, None) if cmds_dir: # we're testing; firmware_dir = cmds_dir else: firmware_dir = "/usr/lib/fwenum" args = [os.path.join(firmware_dir, which)] args.extend([ "{0}={1}".format(k, quote_attr_value(v)) for k, v in sorted(six.iteritems(dep_action.attrs)) ]) # Set up the default return values ret = 0 buf = "" # use a cache since each check may be expensive and each # pkg version may have the same dependency. # ignore non-solaris systems here if portable.osname != "sunos" and key not in self._cache: self._cache[key] = (True, None) if str(args) not in self._cache: try: proc = subprocess.Popen( args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) # output from proc is bytes buf = [misc.force_str(l) for l in proc.stdout.readlines()] ret = proc.wait() except OSError as e: # we have no enumerator installed. This can # occur if this driver is being installed for # the first time or, more likely, we just added # enumerators and a firmware dependency for the # first time. For now, drive on and ignore this # to permit the addition of such dependencies # concurrently with their enumerarators. buf = (_("Firmware dependency error:" " Cannot exec {0}: {1}").format( " ".join(args), str(e))) ret = -1 return (ret, buf, args)
def process_script_deps(action, pkg_vars, **kwargs): """Given an action, if the file starts with #! a list containing a ScriptDependency is returned. Further, if the file is of a known type, it is further analyzed and any dependencies found are added to the list returned.""" if action.name != "file": return [], [], {} f = action.data() l = force_str(f.readline()) f.close() deps = [] elist = [] pkg_attrs = {} script_path = None run_paths = kwargs.get("run_paths", []) # add #! dependency if l.startswith("#!"): # Determine whether the file will be delivered executable. ex_bit = int(action.attrs.get("mode", "0"), 8) & \ (stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH) if ex_bit: p = (l[2:].split()[0]) # first part of string is path (removes options) # we don't handle dependencies through links, so fix up # the common one p = p.strip() if not os.path.isabs(p): elist.append(ScriptNonAbsPath( action.attrs[PD_LOCAL_PATH], p)) else: if p.startswith("/bin"): # Use p[1:] to strip off the leading /. p = os.path.join("/usr", p[1:]) deps.append(ScriptDependency(action, p, pkg_vars, action.attrs[PD_PROTO_DIR])) script_path = l if "python" in l: ds, errs, py_attrs = python.process_python_dependencies( action, pkg_vars, script_path, run_paths) elist.extend(errs) deps.extend(ds) for key in py_attrs: if key in pkg_attrs: pkg_attrs[key].extend(py_attrs[key]) else: pkg_attrs[key] = py_attrs[key] # Ensure that the reported dependencies are # always in the same order. deps.sort() return deps, elist, pkg_attrs
def test_14_history_unicode_locale(self): """Verify we can get history when unicode locale is set""" # If pkg history run when below locales set, it fails. unicode_locales = ["fr_FR.UTF-8", "zh_TW.UTF-8", "zh_CN.UTF-8", "ko_KR.UTF-8", "ja_JP.UTF-8"] p = subprocess.Popen(["/usr/bin/locale", "-a"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) lines = p.stdout.readlines() # subprocess return bytes and we need str locale_list = [force_str(i.rstrip()) for i in lines] unicode_list = list(set(locale_list) & set(unicode_locales)) self.assertTrue(unicode_list, "You must have one of the " " following locales installed for this test to succeed: " + ", ".join(unicode_locales)) env = { "LC_ALL": unicode_list[0] } self.pkg("history", env_arg=env)
def get_text(self, img, pfmri, alt_pub=None): """Retrieves and returns the payload of the license (which should be text). This may require remote retrieval of resources and so this could raise a TransportError or other ApiException. If there are UTF-8 encoding errors in the text replace them so that we still have a license to show rather than failing the entire operation. The copy saved on disk is left as is. 'alt_pub' is an optional alternate Publisher to use for any required transport operations. """ path = self.get_local_path(img, pfmri) hash_attr, hash_attr_val, hash_func = \ digest.get_least_preferred_hash(self) try: with open(path, "rb") as fh: length = os.stat(path).st_size chash, txt = misc.get_data_digest(fh, length=length, return_content=True, hash_func=hash_func) if chash == hash_attr_val: return misc.force_str(txt, errors='replace') except EnvironmentError as e: if e.errno != errno.ENOENT: raise try: if not alt_pub: alt_pub = img.get_publisher(pfmri.publisher) assert pfmri.publisher == alt_pub.prefix return img.transport.get_content(alt_pub, hash_attr_val, fmri=pfmri, hash_func=hash_func, errors="replace") finally: img.cleanup_downloads()
def __call(args, zone=None): # a way to invoke a separate executable for testing cmds_dir = DebugValues.get_value("smf_cmds_dir") if cmds_dir: args = (os.path.join(cmds_dir, args[0].lstrip("/")), ) + args[1:] if zone: cmd = DebugValues.get_value("bin_zlogin") if cmd is None: cmd = zlogin_path args = (cmd, zone) + args try: proc = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) buf = [misc.force_str(l) for l in proc.stdout.readlines()] ret = proc.wait() except OSError as e: raise RuntimeError("cannot execute {0}: {1}".format(args, e)) if ret != 0: raise NonzeroExitException(args, ret, buf) return buf
def process_python_dependencies(action, pkg_vars, script_path, run_paths): """Analyze the file delivered by the action for any python dependencies. The 'action' parameter contain the action which delivers the file. The 'pkg_vars' parameter contains the variants against which the action's package was published. The 'script_path' parameter is None of the file is not executable, or is the path for the binary which is used to execute the file. The 'run_paths' parameter is a list of paths that should be searched for modules. """ # There are three conditions which determine whether python dependency # analysis is performed on a file with python in its #! line. # 1) Is the file executable. (Represented in the table below by X) # 2) Is the file installed into a directory which provides information # about what version of python should be used for it. # (Represented by D) # 3) Does the first line of the file include a specific version of # python. (Represented by F) # # Conditions || Perform Analysis # X D F || Y, if F and D disagree, display a warning in the output # || and use D to analyze the file. # X D !F || Y # X !D F || Y # X !D !F || N, and display a warning in the output. # !X D F || Y # !X D !F || Y # !X !D F || N # !X !D !F || N local_file = action.attrs[PD_LOCAL_PATH] deps = [] errs = [] path_version = None dir_major = None dir_minor = None file_major = None file_minor = None cur_major = None cur_minor = None executable = bool(script_path) # Version of python to use to do the analysis. analysis_major = None analysis_minor = None cur_major, cur_minor = sys.version_info[0:2] install_match = py_lib_re.match(action.attrs["path"]) if install_match: dir_major = install_match.group("major") dir_minor = install_match.group("minor") script_match = None if script_path: script_match = py_bin_re.match(script_path) if script_match: file_major = script_match.group("major") file_minor = script_match.group("minor") if executable: # Check whether the version of python declared in the #! line # of the file and the version of python implied by the directory # the file is delivered into match. if install_match and script_match and \ (file_major != dir_major or file_minor != dir_minor): errs.append( PythonMismatchedVersion( "{0}.{1}".format(dir_major, dir_minor), "{0}.{1}".format(file_major, file_minor), local_file, action.attrs["path"])) if install_match: analysis_major = dir_major analysis_minor = dir_minor elif script_match: analysis_major = file_major analysis_minor = file_minor else: # An example of this case is an executable file in # /usr/bin with #!/usr/bin/python as its first line. errs.append( PythonUnspecifiedVersion(local_file, action.attrs["path"])) elif install_match: analysis_major = dir_major analysis_minor = dir_minor if analysis_major is None or analysis_minor is None: return deps, errs, {} analysis_major = int(analysis_major) analysis_minor = int(analysis_minor) # If the version implied by the directory hierarchy matches the version # of python running, use the default analyzer and don't fork and exec. if cur_major == analysis_major and cur_minor == analysis_minor: mf = modulefinder.DepthLimitedModuleFinder(os.path.dirname( action.attrs["path"]), run_paths=run_paths) try: loaded_modules = mf.run_script(local_file) for names, dirs in set([(tuple(m.get_file_names()), tuple(m.dirs)) for m in loaded_modules]): # Add the directory the python file will be # installed in to the paths used to find modules # for import. This allows relative imports to # work correctly. deps.append( PythonDependency(action, names, dirs, pkg_vars, action.attrs[PD_PROTO_DIR])) missing, maybe = mf.any_missing_maybe() for name in missing: errs.append( PythonModuleMissingPath(name, action.attrs[PD_LOCAL_PATH])) except SyntaxError as e: errs.append( PythonSyntaxError(action.attrs["path"], local_file, s_err=e)) except Exception as e: errs.append(e) return deps, errs, {} # If the version implied by the directory hierarchy does not match the # version of python running, it's necessary to fork and run the # appropriate version of python. root_dir = os.path.dirname(__file__) exec_file = os.path.join(root_dir, "depthlimitedmf.py") cmd = [ "python{0}.{1}".format(analysis_major, analysis_minor), exec_file, os.path.dirname(action.attrs["path"]), local_file ] newenv = os.environ.copy() # Tell Python to not create .pyc, .pyo, etc. cache files for any Python # modules our script imports. newenv["PYTHONDONTWRITEBYTECODE"] = "1" if run_paths: cmd.extend(run_paths) try: sp = subprocess.Popen(cmd, env=newenv, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8") except Exception as e: return [], [PythonSubprocessError(None, " ".join(cmd),\ str(e))], {} out, err = sp.communicate() out = force_str(out) if sp.returncode: errs.append(PythonSubprocessError(sp.returncode, " ".join(cmd), err)) bad_lines = [] for l in out.splitlines(): l = l.strip() if l.startswith("DEP "): try: names, dirs = eval(l[4:]) except Exception: bad_lines.append(l) else: deps.append( PythonDependency(action, names, dirs, pkg_vars, action.attrs[PD_PROTO_DIR])) elif l.startswith("ERR MISSING "): errs.append( PythonModuleMissingPath(l[len("ERR MISSING "):], action.attrs[PD_LOCAL_PATH])) elif l.startswith("ERR SYNTAX "): errs.append( PythonSyntaxError(action.attrs["path"], local_file, msg=l)) elif l.startswith("ERR "): # Generic error which is assigned as a missing path errs.append( PythonModuleMissingPath(l[4:], action.attrs[PD_LOCAL_PATH])) else: bad_lines.append(l) if bad_lines: errs.append(PythonSubprocessBadLine(" ".join(cmd), bad_lines)) return deps, errs, {}
def add_manifest(self, f): """Adds the manifest to the Transaction.""" if isinstance(f, six.string_types): f = open(f, "rb") # Store the manifest file. fpath = os.path.join(self.dir, "manifest") with open(fpath, "ab+") as wf: try: misc.gunzip_from_stream(f, wf, ignore_hash=True) wf.seek(0) content = wf.read() except zlib.error: # No need to decompress it if it's not a gzipped # file. f.seek(0) content = f.read() wf.write(content) # Do some sanity checking on packages marked or being marked # obsolete or renamed. m = pkg.manifest.Manifest() m.set_content(misc.force_str(content)) for action in m.gen_actions(): if action.name == "set" and \ action.attrs["name"] == "pkg.obsolete" and \ action.attrs["value"] == "true": self.obsolete = True if self.types_found.difference( set(("set", "signature"))): raise TransactionOperationError(_("An obsolete " "package cannot contain actions other than " "'set' and 'signature'.")) elif action.name == "set" and \ action.attrs["name"] == "pkg.renamed" and \ action.attrs["value"] == "true": self.renamed = True if self.types_found.difference( set(("depend", "set", "signature"))): raise TransactionOperationError(_("A renamed " "package cannot contain actions other than " "'set', 'depend', and 'signature'.")) if not self.has_reqdeps and action.name == "depend" and \ action.attrs["type"] == "require": self.has_reqdeps = True if self.obsolete and self.renamed: # Reset either obsolete or renamed, depending on which # action this was. if action.attrs["name"] == "pkg.obsolete": self.obsolete = False else: self.renamed = False raise TransactionOperationError(_("A package may not " " be marked for both obsoletion and renaming.")) elif self.obsolete and action.name not in ("set", "signature"): raise TransactionOperationError(_("A '{type}' action " "cannot be present in an obsolete package: " "{action}").format( type=action.name, action=action)) elif self.renamed and action.name not in \ ("depend", "set", "signature"): raise TransactionOperationError(_("A '{type}' action " "cannot be present in a renamed package: " "{action}").format( type=action.name, action=action)) self.types_found.add(action.name)
def check_firmware(self, dep_action, firmware_name): """Check firmware dependency. returns ((true, false, none (internal error)), error text)""" firmware_dir = "/usr/lib/fwenum" # leverage smf test infrastructure cmds_dir = DebugValues["smf_cmds_dir"] if DebugValues["firmware-dependency-bypass"]: return (True, None) if cmds_dir: # we're testing; firmware_dir = cmds_dir args = [os.path.join(firmware_dir, firmware_name[len("feature/firmware/"):])] args.extend([ "{0}={1}".format(k, quote_attr_value(v)) for k,v in sorted(six.iteritems(dep_action.attrs)) if k not in ["type", "root-image", "fmri"] ]) key = str(args) # use a cache since each check may be expensive and each # pkg version may have the same dependency. # ignore non-solaris systems here if portable.osname != "sunos" and key not in self.firmware: self.__firmware[key] = (True, None) if key not in self.__firmware: try: proc = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) # output from proc is bytes buf = [misc.force_str(l) for l in proc.stdout.readlines()] ret = proc.wait() # if there was output, something went wrong. # Since generic errors are often exit(1), # map this to an internal error. if ret == 1 and len(buf) > 0: ret = 255 if ret == 0: ans = (True, None) elif 0 < ret <= 239: ans = (False, (_("There are {0} instances" " of downrev firmware for the '{1}' " " devices present on this system. " "Update each to version {2} or better." ).format(ret, args[1], dep_action.attrs.get("minimum-version", _("UNSPECIFIED"))))) elif ret == 240: ans = (False, (_("There are 240 or more " "instances of downrev firmware for the" "'{0}' devices present on this system. " "Update each to version {1} or better." ).format(args[1], dep_action.attrs.get("minimum-version", _("UNSPECIFIED"))))) elif ret < 0: ans = (None, (_("Firmware dependency error: {0} " " exited due to signal {1}").format( " ".join(args), misc.signame(-ret)))) else: ans = (None, (_("Firmware dependency error: General " "internal error {0} running '{1}': '{2}'" ).format(str(ret), " ".join(args), "\n".join(buf)))) except OSError as e: # we have no enumerator installed. This can # occur if this driver is being installed # for the first time or, more likely, we # just added enumerators & a firmware dependency # for the first time. For now, drive on and # ignore this to permit the addition of such # dependencies concurrently with their # enumerarators. # ans = (None, (_("Firmware dependency error:" # " Cannot exec {0}: {1}").format(" ".join(args) # , str(e)))) ans = (True, 0) self.__firmware[key] = ans return self.__firmware[key]